commit stringlengths 40 40 | old_file stringlengths 4 118 | new_file stringlengths 4 118 | old_contents stringlengths 0 2.94k | new_contents stringlengths 1 4.43k | subject stringlengths 15 444 | message stringlengths 16 3.45k | lang stringclasses 1 value | license stringclasses 13 values | repos stringlengths 5 43.2k | prompt stringlengths 17 4.58k | response stringlengths 1 4.43k | prompt_tagged stringlengths 58 4.62k | response_tagged stringlengths 1 4.43k | text stringlengths 132 7.29k | text_tagged stringlengths 173 7.33k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
b964c71509d1c562d4080a39bf5fc7333da39608 | fedora/__init__.py | fedora/__init__.py | # This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
'''
Python Fedora
Modules to communicate with and help implement Fedora Services.
'''
from fedora import release
__version__ = release.VERSION
import gettext
translation = gettext.translation('python-fedora', '/usr/share/locale',
fallback=True)
_ = translation.ugettext
| # This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
'''
Python Fedora
Modules to communicate with and help implement Fedora Services.
'''
import gettext
translation = gettext.translation('python-fedora', '/usr/share/locale',
fallback=True)
_ = translation.ugettext
from fedora import release
__version__ = release.VERSION
| Rearrange imports of gettext and release because of dependencies in circular import. | Rearrange imports of gettext and release because of dependencies in
circular import.
| Python | lgpl-2.1 | fedora-infra/python-fedora | # This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
'''
Python Fedora
Modules to communicate with and help implement Fedora Services.
'''
from fedora import release
__version__ = release.VERSION
import gettext
translation = gettext.translation('python-fedora', '/usr/share/locale',
fallback=True)
_ = translation.ugettext
Rearrange imports of gettext and release because of dependencies in
circular import. | # This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
'''
Python Fedora
Modules to communicate with and help implement Fedora Services.
'''
import gettext
translation = gettext.translation('python-fedora', '/usr/share/locale',
fallback=True)
_ = translation.ugettext
from fedora import release
__version__ = release.VERSION
| <commit_before># This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
'''
Python Fedora
Modules to communicate with and help implement Fedora Services.
'''
from fedora import release
__version__ = release.VERSION
import gettext
translation = gettext.translation('python-fedora', '/usr/share/locale',
fallback=True)
_ = translation.ugettext
<commit_msg>Rearrange imports of gettext and release because of dependencies in
circular import.<commit_after> | # This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
'''
Python Fedora
Modules to communicate with and help implement Fedora Services.
'''
import gettext
translation = gettext.translation('python-fedora', '/usr/share/locale',
fallback=True)
_ = translation.ugettext
from fedora import release
__version__ = release.VERSION
| # This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
'''
Python Fedora
Modules to communicate with and help implement Fedora Services.
'''
from fedora import release
__version__ = release.VERSION
import gettext
translation = gettext.translation('python-fedora', '/usr/share/locale',
fallback=True)
_ = translation.ugettext
Rearrange imports of gettext and release because of dependencies in
circular import.# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
'''
Python Fedora
Modules to communicate with and help implement Fedora Services.
'''
import gettext
translation = gettext.translation('python-fedora', '/usr/share/locale',
fallback=True)
_ = translation.ugettext
from fedora import release
__version__ = release.VERSION
| <commit_before># This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
'''
Python Fedora
Modules to communicate with and help implement Fedora Services.
'''
from fedora import release
__version__ = release.VERSION
import gettext
translation = gettext.translation('python-fedora', '/usr/share/locale',
fallback=True)
_ = translation.ugettext
<commit_msg>Rearrange imports of gettext and release because of dependencies in
circular import.<commit_after># This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
'''
Python Fedora
Modules to communicate with and help implement Fedora Services.
'''
import gettext
translation = gettext.translation('python-fedora', '/usr/share/locale',
fallback=True)
_ = translation.ugettext
from fedora import release
__version__ = release.VERSION
|
f3735ed56bff6425eb87f492707b7a8a5ef3119a | touch/__init__.py | touch/__init__.py | from pelican import signals
import logging
import os
import time
logger = logging.getLogger(__name__)
def touch_file(path, context):
content = context.get('article', context.get('page'))
if content and hasattr(content, 'date'):
mtime = time.mktime(content.date.timetuple())
logger.info('touching %s', path)
os.utime(path, (mtime, mtime))
def register():
signals.content_written.connect(touch_file)
| from pelican import signals
import logging
import os
import time
logger = logging.getLogger(__name__)
def set_file_utime(path, datetime):
mtime = time.mktime(datetime.timetuple())
logger.info('touching %s', path)
os.utime(path, (mtime, mtime))
def touch_file(path, context):
content = context.get('article', context.get('page'))
page = context.get('articles_page')
dates = context.get('dates')
if content and hasattr(content, 'date'):
set_file_utime(path, content.date)
elif page:
set_file_utime(path, max(x.date for x in page.object_list))
elif dates:
set_file_utime(path, max(x.date for x in dates))
def register():
signals.content_written.connect(touch_file)
| Make touch plugin touch files with lists | Make touch plugin touch files with lists
| Python | agpl-3.0 | xsteadfastx/pelican-plugins,doctorwidget/pelican-plugins,lazycoder-ru/pelican-plugins,Neurita/pelican-plugins,lele1122/pelican-plugins,mitchins/pelican-plugins,jantman/pelican-plugins,samueljohn/pelican-plugins,MarkusH/pelican-plugins,jakevdp/pelican-plugins,florianjacob/pelican-plugins,danmackinlay/pelican-plugins,danmackinlay/pelican-plugins,publicus/pelican-plugins,if1live/pelican-plugins,publicus/pelican-plugins,clokep/pelican-plugins,pestrickland/pelican-plugins,pestrickland/pelican-plugins,andreas-h/pelican-plugins,UHBiocomputation/pelican-plugins,joachimneu/pelican-plugins,phrawzty/pelican-plugins,lazycoder-ru/pelican-plugins,farseerfc/pelican-plugins,benjaminabel/pelican-plugins,doctorwidget/pelican-plugins,mwcz/pelican-plugins,mwcz/pelican-plugins,xsteadfastx/pelican-plugins,makefu/pelican-plugins,kdheepak89/pelican-plugins,kdheepak89/pelican-plugins,xsteadfastx/pelican-plugins,florianjacob/pelican-plugins,FuzzJunket/pelican-plugins,Neurita/pelican-plugins,doctorwidget/pelican-plugins,jantman/pelican-plugins,FuzzJunket/pelican-plugins,if1live/pelican-plugins,prisae/pelican-plugins,clokep/pelican-plugins,florianjacob/pelican-plugins,andreas-h/pelican-plugins,kdheepak89/pelican-plugins,lele1122/pelican-plugins,goerz/pelican-plugins,andreas-h/pelican-plugins,mwcz/pelican-plugins,cmacmackin/pelican-plugins,pelson/pelican-plugins,mitchins/pelican-plugins,Xion/pelican-plugins,karya0/pelican-plugins,frickp/pelican-plugins,karya0/pelican-plugins,prisae/pelican-plugins,mortada/pelican-plugins,benjaminabel/pelican-plugins,makefu/pelican-plugins,olgabot/pelican-plugins,gw0/pelican-plugins,jakevdp/pelican-plugins,cmacmackin/pelican-plugins,Neurita/pelican-plugins,barrysteyn/pelican-plugins,lindzey/pelican-plugins,prisae/pelican-plugins,shireenrao/pelican-plugins,clokep/pelican-plugins,cmacmackin/pelican-plugins,gjreda/pelican-plugins,UHBiocomputation/pelican-plugins,Samael500/pelican-plugins,pxquim/pelican-plugins,prisae/pelican-plugins,Samael500/pelican-plugins,wilsonfreitas/pelican-plugins,rlaboiss/pelican-plugins,MarkusH/pelican-plugins,Xion/pelican-plugins,goerz/pelican-plugins,amitsaha/pelican-plugins,pelson/pelican-plugins,UHBiocomputation/pelican-plugins,karya0/pelican-plugins,seandavi/pelican-plugins,joachimneu/pelican-plugins,lindzey/pelican-plugins,phrawzty/pelican-plugins,lazycoder-ru/pelican-plugins,wilsonfreitas/pelican-plugins,talha131/pelican-plugins,jakevdp/pelican-plugins,cctags/pelican-plugins,pxquim/pelican-plugins,ziaa/pelican-plugins,davidmarquis/pelican-plugins,mortada/pelican-plugins,mortada/pelican-plugins,gjreda/pelican-plugins,jprine/pelican-plugins,Samael500/pelican-plugins,ingwinlu/pelican-plugins,mikitex70/pelican-plugins,danmackinlay/pelican-plugins,makefu/pelican-plugins,danmackinlay/pelican-plugins,ingwinlu/pelican-plugins,yuanboshe/pelican-plugins,farseerfc/pelican-plugins,proteansec/pelican-plugins,mikitex70/pelican-plugins,proteansec/pelican-plugins,jantman/pelican-plugins,barrysteyn/pelican-plugins,lindzey/pelican-plugins,M157q/pelican-plugins,benjaminabel/pelican-plugins,karya0/pelican-plugins,gjreda/pelican-plugins,shireenrao/pelican-plugins,Neurita/pelican-plugins,if1live/pelican-plugins,seandavi/pelican-plugins,FuzzJunket/pelican-plugins,phrawzty/pelican-plugins,lindzey/pelican-plugins,publicus/pelican-plugins,yuanboshe/pelican-plugins,rlaboiss/pelican-plugins,makefu/pelican-plugins,clokep/pelican-plugins,shireenrao/pelican-plugins,samueljohn/pelican-plugins,jprine/pelican-plugins,barrysteyn/pelican-plugins,kdheepak89/pelican-plugins,jfosorio/pelican-plugins,benjaminabel/pelican-plugins,frickp/pelican-plugins,cmacmackin/pelican-plugins,M157q/pelican-plugins,if1live/pelican-plugins,gw0/pelican-plugins,olgabot/pelican-plugins,joachimneu/pelican-plugins,jfosorio/pelican-plugins,pxquim/pelican-plugins,lele1122/pelican-plugins,talha131/pelican-plugins,howthebodyworks/pelican-plugins,pestrickland/pelican-plugins,samueljohn/pelican-plugins,rlaboiss/pelican-plugins,ingwinlu/pelican-plugins,goerz/pelican-plugins,amitsaha/pelican-plugins,farseerfc/pelican-plugins,mwcz/pelican-plugins,Xion/pelican-plugins,lele1122/pelican-plugins,MarkusH/pelican-plugins,gjreda/pelican-plugins,seandavi/pelican-plugins,jcdubacq/pelican-plugins,mortada/pelican-plugins,amitsaha/pelican-plugins,rlaboiss/pelican-plugins,mikitex70/pelican-plugins,jfosorio/pelican-plugins,seandavi/pelican-plugins,howthebodyworks/pelican-plugins,andreas-h/pelican-plugins,M157q/pelican-plugins,mortada/pelican-plugins,jcdubacq/pelican-plugins,yuanboshe/pelican-plugins,mitchins/pelican-plugins,cctags/pelican-plugins,cctags/pelican-plugins,goerz/pelican-plugins,howthebodyworks/pelican-plugins,proteansec/pelican-plugins,mikitex70/pelican-plugins,lazycoder-ru/pelican-plugins,ziaa/pelican-plugins,MarkusH/pelican-plugins,doctorwidget/pelican-plugins,davidmarquis/pelican-plugins,talha131/pelican-plugins,joachimneu/pelican-plugins,phrawzty/pelican-plugins,florianjacob/pelican-plugins,Samael500/pelican-plugins,ingwinlu/pelican-plugins,frickp/pelican-plugins,jakevdp/pelican-plugins,wilsonfreitas/pelican-plugins,MarkusH/pelican-plugins,barrysteyn/pelican-plugins,pelson/pelican-plugins,talha131/pelican-plugins,pelson/pelican-plugins,Xion/pelican-plugins,mitchins/pelican-plugins,FuzzJunket/pelican-plugins,amitsaha/pelican-plugins,shireenrao/pelican-plugins,pxquim/pelican-plugins,cctags/pelican-plugins,UHBiocomputation/pelican-plugins,jfosorio/pelican-plugins,howthebodyworks/pelican-plugins,jantman/pelican-plugins,farseerfc/pelican-plugins,farseerfc/pelican-plugins,ziaa/pelican-plugins,wilsonfreitas/pelican-plugins,xsteadfastx/pelican-plugins,davidmarquis/pelican-plugins,samueljohn/pelican-plugins,frickp/pelican-plugins,yuanboshe/pelican-plugins,olgabot/pelican-plugins,ziaa/pelican-plugins,olgabot/pelican-plugins,davidmarquis/pelican-plugins,talha131/pelican-plugins,publicus/pelican-plugins,proteansec/pelican-plugins,pestrickland/pelican-plugins,M157q/pelican-plugins | from pelican import signals
import logging
import os
import time
logger = logging.getLogger(__name__)
def touch_file(path, context):
content = context.get('article', context.get('page'))
if content and hasattr(content, 'date'):
mtime = time.mktime(content.date.timetuple())
logger.info('touching %s', path)
os.utime(path, (mtime, mtime))
def register():
signals.content_written.connect(touch_file)
Make touch plugin touch files with lists | from pelican import signals
import logging
import os
import time
logger = logging.getLogger(__name__)
def set_file_utime(path, datetime):
mtime = time.mktime(datetime.timetuple())
logger.info('touching %s', path)
os.utime(path, (mtime, mtime))
def touch_file(path, context):
content = context.get('article', context.get('page'))
page = context.get('articles_page')
dates = context.get('dates')
if content and hasattr(content, 'date'):
set_file_utime(path, content.date)
elif page:
set_file_utime(path, max(x.date for x in page.object_list))
elif dates:
set_file_utime(path, max(x.date for x in dates))
def register():
signals.content_written.connect(touch_file)
| <commit_before>from pelican import signals
import logging
import os
import time
logger = logging.getLogger(__name__)
def touch_file(path, context):
content = context.get('article', context.get('page'))
if content and hasattr(content, 'date'):
mtime = time.mktime(content.date.timetuple())
logger.info('touching %s', path)
os.utime(path, (mtime, mtime))
def register():
signals.content_written.connect(touch_file)
<commit_msg>Make touch plugin touch files with lists<commit_after> | from pelican import signals
import logging
import os
import time
logger = logging.getLogger(__name__)
def set_file_utime(path, datetime):
mtime = time.mktime(datetime.timetuple())
logger.info('touching %s', path)
os.utime(path, (mtime, mtime))
def touch_file(path, context):
content = context.get('article', context.get('page'))
page = context.get('articles_page')
dates = context.get('dates')
if content and hasattr(content, 'date'):
set_file_utime(path, content.date)
elif page:
set_file_utime(path, max(x.date for x in page.object_list))
elif dates:
set_file_utime(path, max(x.date for x in dates))
def register():
signals.content_written.connect(touch_file)
| from pelican import signals
import logging
import os
import time
logger = logging.getLogger(__name__)
def touch_file(path, context):
content = context.get('article', context.get('page'))
if content and hasattr(content, 'date'):
mtime = time.mktime(content.date.timetuple())
logger.info('touching %s', path)
os.utime(path, (mtime, mtime))
def register():
signals.content_written.connect(touch_file)
Make touch plugin touch files with listsfrom pelican import signals
import logging
import os
import time
logger = logging.getLogger(__name__)
def set_file_utime(path, datetime):
mtime = time.mktime(datetime.timetuple())
logger.info('touching %s', path)
os.utime(path, (mtime, mtime))
def touch_file(path, context):
content = context.get('article', context.get('page'))
page = context.get('articles_page')
dates = context.get('dates')
if content and hasattr(content, 'date'):
set_file_utime(path, content.date)
elif page:
set_file_utime(path, max(x.date for x in page.object_list))
elif dates:
set_file_utime(path, max(x.date for x in dates))
def register():
signals.content_written.connect(touch_file)
| <commit_before>from pelican import signals
import logging
import os
import time
logger = logging.getLogger(__name__)
def touch_file(path, context):
content = context.get('article', context.get('page'))
if content and hasattr(content, 'date'):
mtime = time.mktime(content.date.timetuple())
logger.info('touching %s', path)
os.utime(path, (mtime, mtime))
def register():
signals.content_written.connect(touch_file)
<commit_msg>Make touch plugin touch files with lists<commit_after>from pelican import signals
import logging
import os
import time
logger = logging.getLogger(__name__)
def set_file_utime(path, datetime):
mtime = time.mktime(datetime.timetuple())
logger.info('touching %s', path)
os.utime(path, (mtime, mtime))
def touch_file(path, context):
content = context.get('article', context.get('page'))
page = context.get('articles_page')
dates = context.get('dates')
if content and hasattr(content, 'date'):
set_file_utime(path, content.date)
elif page:
set_file_utime(path, max(x.date for x in page.object_list))
elif dates:
set_file_utime(path, max(x.date for x in dates))
def register():
signals.content_written.connect(touch_file)
|
f4d5bafcf99d2117fe589d8c31f8aff8ed3467a5 | RefreshScripts.py | RefreshScripts.py | #from CreateTableFromDatabase import getRankingsFromDatabase
import time
from CheckAndPostForSeriesSubmissions import checkNewSubmissions
# Refreshes all other scripts every couple of minutes
def refreshScripts():
while True:
checkNewSubmissions()
timeToSleep = 900
print("Sleeping for " + str(timeToSleep / 60) + " minutes.")
time.sleep(timeToSleep)
print("")
if __name__ == '__main__':
refreshScripts() | ### exclam /usr/bin/env python3
#from CreateTableFromDatabase import getRankingsFromDatabase
import time
from CheckAndPostForSeriesSubmissions import checkNewSubmissions
# Refreshes all other scripts every couple of minutes
def refreshScripts():
while True:
try:
checkNewSubmissions()
except Exception as e:
#traceback.print_exc()
print("Found error, skipping this loop. ")
print(str(e))
timeToSleep = 900
print("Sleeping for " + str(timeToSleep / 60) + " minutes.")
time.sleep(timeToSleep)
print("")
if __name__ == '__main__':
refreshScripts() | Refresh script now displays error message | Refresh script now displays error message
| Python | mit | LiquidFun/Reddit-GeoGuessr-Tracking-Bot | #from CreateTableFromDatabase import getRankingsFromDatabase
import time
from CheckAndPostForSeriesSubmissions import checkNewSubmissions
# Refreshes all other scripts every couple of minutes
def refreshScripts():
while True:
checkNewSubmissions()
timeToSleep = 900
print("Sleeping for " + str(timeToSleep / 60) + " minutes.")
time.sleep(timeToSleep)
print("")
if __name__ == '__main__':
refreshScripts()Refresh script now displays error message | ### exclam /usr/bin/env python3
#from CreateTableFromDatabase import getRankingsFromDatabase
import time
from CheckAndPostForSeriesSubmissions import checkNewSubmissions
# Refreshes all other scripts every couple of minutes
def refreshScripts():
while True:
try:
checkNewSubmissions()
except Exception as e:
#traceback.print_exc()
print("Found error, skipping this loop. ")
print(str(e))
timeToSleep = 900
print("Sleeping for " + str(timeToSleep / 60) + " minutes.")
time.sleep(timeToSleep)
print("")
if __name__ == '__main__':
refreshScripts() | <commit_before>#from CreateTableFromDatabase import getRankingsFromDatabase
import time
from CheckAndPostForSeriesSubmissions import checkNewSubmissions
# Refreshes all other scripts every couple of minutes
def refreshScripts():
while True:
checkNewSubmissions()
timeToSleep = 900
print("Sleeping for " + str(timeToSleep / 60) + " minutes.")
time.sleep(timeToSleep)
print("")
if __name__ == '__main__':
refreshScripts()<commit_msg>Refresh script now displays error message<commit_after> | ### exclam /usr/bin/env python3
#from CreateTableFromDatabase import getRankingsFromDatabase
import time
from CheckAndPostForSeriesSubmissions import checkNewSubmissions
# Refreshes all other scripts every couple of minutes
def refreshScripts():
while True:
try:
checkNewSubmissions()
except Exception as e:
#traceback.print_exc()
print("Found error, skipping this loop. ")
print(str(e))
timeToSleep = 900
print("Sleeping for " + str(timeToSleep / 60) + " minutes.")
time.sleep(timeToSleep)
print("")
if __name__ == '__main__':
refreshScripts() | #from CreateTableFromDatabase import getRankingsFromDatabase
import time
from CheckAndPostForSeriesSubmissions import checkNewSubmissions
# Refreshes all other scripts every couple of minutes
def refreshScripts():
while True:
checkNewSubmissions()
timeToSleep = 900
print("Sleeping for " + str(timeToSleep / 60) + " minutes.")
time.sleep(timeToSleep)
print("")
if __name__ == '__main__':
refreshScripts()Refresh script now displays error message### exclam /usr/bin/env python3
#from CreateTableFromDatabase import getRankingsFromDatabase
import time
from CheckAndPostForSeriesSubmissions import checkNewSubmissions
# Refreshes all other scripts every couple of minutes
def refreshScripts():
while True:
try:
checkNewSubmissions()
except Exception as e:
#traceback.print_exc()
print("Found error, skipping this loop. ")
print(str(e))
timeToSleep = 900
print("Sleeping for " + str(timeToSleep / 60) + " minutes.")
time.sleep(timeToSleep)
print("")
if __name__ == '__main__':
refreshScripts() | <commit_before>#from CreateTableFromDatabase import getRankingsFromDatabase
import time
from CheckAndPostForSeriesSubmissions import checkNewSubmissions
# Refreshes all other scripts every couple of minutes
def refreshScripts():
while True:
checkNewSubmissions()
timeToSleep = 900
print("Sleeping for " + str(timeToSleep / 60) + " minutes.")
time.sleep(timeToSleep)
print("")
if __name__ == '__main__':
refreshScripts()<commit_msg>Refresh script now displays error message<commit_after>### exclam /usr/bin/env python3
#from CreateTableFromDatabase import getRankingsFromDatabase
import time
from CheckAndPostForSeriesSubmissions import checkNewSubmissions
# Refreshes all other scripts every couple of minutes
def refreshScripts():
while True:
try:
checkNewSubmissions()
except Exception as e:
#traceback.print_exc()
print("Found error, skipping this loop. ")
print(str(e))
timeToSleep = 900
print("Sleeping for " + str(timeToSleep / 60) + " minutes.")
time.sleep(timeToSleep)
print("")
if __name__ == '__main__':
refreshScripts() |
e881465050ef9edbf2b47071b1fa2fc27ac26c1a | tests/Settings/TestExtruderStack.py | tests/Settings/TestExtruderStack.py | # Copyright (c) 2017 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
import pytest #This module contains automated tests.
import unittest.mock #For the mocking and monkeypatching functionality.
import cura.Settings.ExtruderStack #The module we're testing.
from cura.Settings.Exceptions import InvalidOperationError #To check whether the correct exceptions are raised.
## An empty extruder stack to test with.
@pytest.fixture()
def extruder_stack() -> cura.Settings.ExtruderStack.ExtruderStack:
return cura.Settings.ExtruderStack.ExtruderStack
## Tests whether adding a container is properly forbidden.
def test_addContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.addContainer(unittest.mock.MagicMock())
## Tests whether inserting a container is properly forbidden.
def test_insertContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.insertContainer(0, unittest.mock.MagicMock())
## Tests whether removing a container is properly forbidden.
def test_removeContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.removeContainer(unittest.mock.MagicMock()) | # Copyright (c) 2017 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
import pytest #This module contains automated tests.
import unittest.mock #For the mocking and monkeypatching functionality.
import cura.Settings.ExtruderStack #The module we're testing.
from cura.Settings.Exceptions import InvalidOperationError #To check whether the correct exceptions are raised.
## An empty extruder stack to test with.
@pytest.fixture()
def extruder_stack() -> cura.Settings.ExtruderStack.ExtruderStack:
return cura.Settings.ExtruderStack.ExtruderStack
#############################START OF TEST CASES################################
## Tests whether adding a container is properly forbidden.
def test_addContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.addContainer(unittest.mock.MagicMock())
## Tests whether inserting a container is properly forbidden.
def test_insertContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.insertContainer(0, unittest.mock.MagicMock())
## Tests whether removing a container is properly forbidden.
def test_removeContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.removeContainer(unittest.mock.MagicMock()) | Add delimiter between global stuff and test cases | Add delimiter between global stuff and test cases
Helps provide some oversight since this module is about to explode in size.
Contributes to issue CURA-3497.
| Python | agpl-3.0 | hmflash/Cura,ynotstartups/Wanhao,Curahelper/Cura,ynotstartups/Wanhao,hmflash/Cura,fieldOfView/Cura,fieldOfView/Cura,Curahelper/Cura | # Copyright (c) 2017 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
import pytest #This module contains automated tests.
import unittest.mock #For the mocking and monkeypatching functionality.
import cura.Settings.ExtruderStack #The module we're testing.
from cura.Settings.Exceptions import InvalidOperationError #To check whether the correct exceptions are raised.
## An empty extruder stack to test with.
@pytest.fixture()
def extruder_stack() -> cura.Settings.ExtruderStack.ExtruderStack:
return cura.Settings.ExtruderStack.ExtruderStack
## Tests whether adding a container is properly forbidden.
def test_addContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.addContainer(unittest.mock.MagicMock())
## Tests whether inserting a container is properly forbidden.
def test_insertContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.insertContainer(0, unittest.mock.MagicMock())
## Tests whether removing a container is properly forbidden.
def test_removeContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.removeContainer(unittest.mock.MagicMock())Add delimiter between global stuff and test cases
Helps provide some oversight since this module is about to explode in size.
Contributes to issue CURA-3497. | # Copyright (c) 2017 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
import pytest #This module contains automated tests.
import unittest.mock #For the mocking and monkeypatching functionality.
import cura.Settings.ExtruderStack #The module we're testing.
from cura.Settings.Exceptions import InvalidOperationError #To check whether the correct exceptions are raised.
## An empty extruder stack to test with.
@pytest.fixture()
def extruder_stack() -> cura.Settings.ExtruderStack.ExtruderStack:
return cura.Settings.ExtruderStack.ExtruderStack
#############################START OF TEST CASES################################
## Tests whether adding a container is properly forbidden.
def test_addContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.addContainer(unittest.mock.MagicMock())
## Tests whether inserting a container is properly forbidden.
def test_insertContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.insertContainer(0, unittest.mock.MagicMock())
## Tests whether removing a container is properly forbidden.
def test_removeContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.removeContainer(unittest.mock.MagicMock()) | <commit_before># Copyright (c) 2017 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
import pytest #This module contains automated tests.
import unittest.mock #For the mocking and monkeypatching functionality.
import cura.Settings.ExtruderStack #The module we're testing.
from cura.Settings.Exceptions import InvalidOperationError #To check whether the correct exceptions are raised.
## An empty extruder stack to test with.
@pytest.fixture()
def extruder_stack() -> cura.Settings.ExtruderStack.ExtruderStack:
return cura.Settings.ExtruderStack.ExtruderStack
## Tests whether adding a container is properly forbidden.
def test_addContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.addContainer(unittest.mock.MagicMock())
## Tests whether inserting a container is properly forbidden.
def test_insertContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.insertContainer(0, unittest.mock.MagicMock())
## Tests whether removing a container is properly forbidden.
def test_removeContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.removeContainer(unittest.mock.MagicMock())<commit_msg>Add delimiter between global stuff and test cases
Helps provide some oversight since this module is about to explode in size.
Contributes to issue CURA-3497.<commit_after> | # Copyright (c) 2017 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
import pytest #This module contains automated tests.
import unittest.mock #For the mocking and monkeypatching functionality.
import cura.Settings.ExtruderStack #The module we're testing.
from cura.Settings.Exceptions import InvalidOperationError #To check whether the correct exceptions are raised.
## An empty extruder stack to test with.
@pytest.fixture()
def extruder_stack() -> cura.Settings.ExtruderStack.ExtruderStack:
return cura.Settings.ExtruderStack.ExtruderStack
#############################START OF TEST CASES################################
## Tests whether adding a container is properly forbidden.
def test_addContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.addContainer(unittest.mock.MagicMock())
## Tests whether inserting a container is properly forbidden.
def test_insertContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.insertContainer(0, unittest.mock.MagicMock())
## Tests whether removing a container is properly forbidden.
def test_removeContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.removeContainer(unittest.mock.MagicMock()) | # Copyright (c) 2017 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
import pytest #This module contains automated tests.
import unittest.mock #For the mocking and monkeypatching functionality.
import cura.Settings.ExtruderStack #The module we're testing.
from cura.Settings.Exceptions import InvalidOperationError #To check whether the correct exceptions are raised.
## An empty extruder stack to test with.
@pytest.fixture()
def extruder_stack() -> cura.Settings.ExtruderStack.ExtruderStack:
return cura.Settings.ExtruderStack.ExtruderStack
## Tests whether adding a container is properly forbidden.
def test_addContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.addContainer(unittest.mock.MagicMock())
## Tests whether inserting a container is properly forbidden.
def test_insertContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.insertContainer(0, unittest.mock.MagicMock())
## Tests whether removing a container is properly forbidden.
def test_removeContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.removeContainer(unittest.mock.MagicMock())Add delimiter between global stuff and test cases
Helps provide some oversight since this module is about to explode in size.
Contributes to issue CURA-3497.# Copyright (c) 2017 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
import pytest #This module contains automated tests.
import unittest.mock #For the mocking and monkeypatching functionality.
import cura.Settings.ExtruderStack #The module we're testing.
from cura.Settings.Exceptions import InvalidOperationError #To check whether the correct exceptions are raised.
## An empty extruder stack to test with.
@pytest.fixture()
def extruder_stack() -> cura.Settings.ExtruderStack.ExtruderStack:
return cura.Settings.ExtruderStack.ExtruderStack
#############################START OF TEST CASES################################
## Tests whether adding a container is properly forbidden.
def test_addContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.addContainer(unittest.mock.MagicMock())
## Tests whether inserting a container is properly forbidden.
def test_insertContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.insertContainer(0, unittest.mock.MagicMock())
## Tests whether removing a container is properly forbidden.
def test_removeContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.removeContainer(unittest.mock.MagicMock()) | <commit_before># Copyright (c) 2017 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
import pytest #This module contains automated tests.
import unittest.mock #For the mocking and monkeypatching functionality.
import cura.Settings.ExtruderStack #The module we're testing.
from cura.Settings.Exceptions import InvalidOperationError #To check whether the correct exceptions are raised.
## An empty extruder stack to test with.
@pytest.fixture()
def extruder_stack() -> cura.Settings.ExtruderStack.ExtruderStack:
return cura.Settings.ExtruderStack.ExtruderStack
## Tests whether adding a container is properly forbidden.
def test_addContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.addContainer(unittest.mock.MagicMock())
## Tests whether inserting a container is properly forbidden.
def test_insertContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.insertContainer(0, unittest.mock.MagicMock())
## Tests whether removing a container is properly forbidden.
def test_removeContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.removeContainer(unittest.mock.MagicMock())<commit_msg>Add delimiter between global stuff and test cases
Helps provide some oversight since this module is about to explode in size.
Contributes to issue CURA-3497.<commit_after># Copyright (c) 2017 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
import pytest #This module contains automated tests.
import unittest.mock #For the mocking and monkeypatching functionality.
import cura.Settings.ExtruderStack #The module we're testing.
from cura.Settings.Exceptions import InvalidOperationError #To check whether the correct exceptions are raised.
## An empty extruder stack to test with.
@pytest.fixture()
def extruder_stack() -> cura.Settings.ExtruderStack.ExtruderStack:
return cura.Settings.ExtruderStack.ExtruderStack
#############################START OF TEST CASES################################
## Tests whether adding a container is properly forbidden.
def test_addContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.addContainer(unittest.mock.MagicMock())
## Tests whether inserting a container is properly forbidden.
def test_insertContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.insertContainer(0, unittest.mock.MagicMock())
## Tests whether removing a container is properly forbidden.
def test_removeContainer(extruder_stack):
with pytest.raises(InvalidOperationError):
extruder_stack.removeContainer(unittest.mock.MagicMock()) |
1d1348eb2126a0a8ee1a18b37a5254b59c3a4c76 | examples/ensemble/plot_forest_importances_faces.py | examples/ensemble/plot_forest_importances_faces.py | """
=======================================
Pixel importances with forests of trees
=======================================
This example shows the use of forests of trees to evaluate the importance
of the pixels in an image classification task (faces). The hotter the pixel,
the more important.
"""
print __doc__
import pylab as pl
from sklearn.datasets import fetch_olivetti_faces
from sklearn.ensemble import ExtraTreesClassifier
# Loading the digits dataset
data = fetch_olivetti_faces()
X = data.images.reshape((len(data.images), -1))
y = data.target
mask = y < 5 # Limit to 5 classes
X = X[mask]
y = y[mask]
# Build a forest and compute the pixel importances
forest = ExtraTreesClassifier(n_estimators=1000,
max_features=128,
compute_importances=True,
random_state=0)
forest.fit(X, y)
importances = forest.feature_importances_
importances = importances.reshape(data.images[0].shape)
# Plot pixel importances
pl.matshow(importances, cmap=pl.cm.hot)
pl.title("Pixel importances with forests of trees")
pl.show()
| """
=======================================
Pixel importances with forests of trees
=======================================
This example shows the use of forests of trees to evaluate the importance
of the pixels in an image classification task (faces). The hotter the pixel,
the more important.
"""
print __doc__
from time import time
import pylab as pl
from sklearn.datasets import fetch_olivetti_faces
from sklearn.ensemble import ExtraTreesClassifier
# Number of cores to use to perform parallel fitting of the forest model
n_jobs=2
# Loading the digits dataset
data = fetch_olivetti_faces()
X = data.images.reshape((len(data.images), -1))
y = data.target
mask = y < 5 # Limit to 5 classes
X = X[mask]
y = y[mask]
# Build a forest and compute the pixel importances
print "Fitting ExtraTreesClassifier on faces data with %d cores..." % n_jobs
t0 = time()
forest = ExtraTreesClassifier(n_estimators=1000,
max_features=128,
compute_importances=True,
random_state=0, n_jobs=n_jobs)
forest.fit(X, y)
print "done in %0.3fs" % (time() - t0)
importances = forest.feature_importances_
importances = importances.reshape(data.images[0].shape)
# Plot pixel importances
pl.matshow(importances, cmap=pl.cm.hot)
pl.title("Pixel importances with forests of trees")
pl.show()
| Update random forest face example to use several cores | Update random forest face example to use several cores
| Python | bsd-3-clause | scikit-learn/scikit-learn,madjelan/scikit-learn,xubenben/scikit-learn,Vimos/scikit-learn,nrhine1/scikit-learn,zhenv5/scikit-learn,liberatorqjw/scikit-learn,ndingwall/scikit-learn,beepee14/scikit-learn,shyamalschandra/scikit-learn,walterreade/scikit-learn,dhruv13J/scikit-learn,Adai0808/scikit-learn,Nyker510/scikit-learn,huobaowangxi/scikit-learn,hrjn/scikit-learn,IssamLaradji/scikit-learn,rahul-c1/scikit-learn,f3r/scikit-learn,qifeigit/scikit-learn,xubenben/scikit-learn,nmayorov/scikit-learn,pkruskal/scikit-learn,mjudsp/Tsallis,imaculate/scikit-learn,procoder317/scikit-learn,costypetrisor/scikit-learn,henrykironde/scikit-learn,aflaxman/scikit-learn,murali-munna/scikit-learn,yyjiang/scikit-learn,andaag/scikit-learn,akionakamura/scikit-learn,arjoly/scikit-learn,procoder317/scikit-learn,nesterione/scikit-learn,AlexRobson/scikit-learn,ChanderG/scikit-learn,chrisburr/scikit-learn,OshynSong/scikit-learn,elkingtonmcb/scikit-learn,pypot/scikit-learn,ndingwall/scikit-learn,quheng/scikit-learn,eickenberg/scikit-learn,michigraber/scikit-learn,eickenberg/scikit-learn,eg-zhang/scikit-learn,mayblue9/scikit-learn,ahoyosid/scikit-learn,stylianos-kampakis/scikit-learn,bthirion/scikit-learn,larsmans/scikit-learn,pv/scikit-learn,mjgrav2001/scikit-learn,fabianp/scikit-learn,ZenDevelopmentSystems/scikit-learn,lesteve/scikit-learn,RayMick/scikit-learn,xyguo/scikit-learn,jjx02230808/project0223,ldirer/scikit-learn,clemkoa/scikit-learn,potash/scikit-learn,466152112/scikit-learn,betatim/scikit-learn,ngoix/OCRF,hainm/scikit-learn,russel1237/scikit-learn,ChanChiChoi/scikit-learn,shikhardb/scikit-learn,xyguo/scikit-learn,xuewei4d/scikit-learn,icdishb/scikit-learn,CVML/scikit-learn,joshloyal/scikit-learn,loli/sklearn-ensembletrees,sanketloke/scikit-learn,ycaihua/scikit-learn,adamgreenhall/scikit-learn,hainm/scikit-learn,abhishekgahlot/scikit-learn,fabianp/scikit-learn,maheshakya/scikit-learn,wanggang3333/scikit-learn,aabadie/scikit-learn,trungnt13/scikit-learn,xwolf12/scikit-learn,elkingtonmcb/scikit-learn,arabenjamin/scikit-learn,jzt5132/scikit-learn,MartinDelzant/scikit-learn,0asa/scikit-learn,pythonvietnam/scikit-learn,DonBeo/scikit-learn,smartscheduling/scikit-learn-categorical-tree,walterreade/scikit-learn,rexshihaoren/scikit-learn,depet/scikit-learn,RPGOne/scikit-learn,adamgreenhall/scikit-learn,AlexandreAbraham/scikit-learn,trungnt13/scikit-learn,simon-pepin/scikit-learn,LiaoPan/scikit-learn,larsmans/scikit-learn,aminert/scikit-learn,arabenjamin/scikit-learn,hsiaoyi0504/scikit-learn,jorik041/scikit-learn,eickenberg/scikit-learn,trankmichael/scikit-learn,imaculate/scikit-learn,plissonf/scikit-learn,mattilyra/scikit-learn,ky822/scikit-learn,0x0all/scikit-learn,tdhopper/scikit-learn,Adai0808/scikit-learn,themrmax/scikit-learn,hdmetor/scikit-learn,mhdella/scikit-learn,ankurankan/scikit-learn,r-mart/scikit-learn,henridwyer/scikit-learn,q1ang/scikit-learn,yask123/scikit-learn,arabenjamin/scikit-learn,mwv/scikit-learn,Vimos/scikit-learn,0x0all/scikit-learn,glemaitre/scikit-learn,jorge2703/scikit-learn,betatim/scikit-learn,mayblue9/scikit-learn,shusenl/scikit-learn,B3AU/waveTree,equialgo/scikit-learn,PatrickOReilly/scikit-learn,marcocaccin/scikit-learn,appapantula/scikit-learn,MechCoder/scikit-learn,pianomania/scikit-learn,MartinSavc/scikit-learn,LiaoPan/scikit-learn,466152112/scikit-learn,arahuja/scikit-learn,mjudsp/Tsallis,rohanp/scikit-learn,tmhm/scikit-learn,PatrickChrist/scikit-learn,jaidevd/scikit-learn,xyguo/scikit-learn,victorbergelin/scikit-learn,ChanChiChoi/scikit-learn,petosegan/scikit-learn,CforED/Machine-Learning,mattilyra/scikit-learn,aetilley/scikit-learn,joernhees/scikit-learn,roxyboy/scikit-learn,zaxtax/scikit-learn,r-mart/scikit-learn,maheshakya/scikit-learn,ilyes14/scikit-learn,NunoEdgarGub1/scikit-learn,zihua/scikit-learn,ningchi/scikit-learn,glennq/scikit-learn,shenzebang/scikit-learn,tomlof/scikit-learn,loli/semisupervisedforests,jakobworldpeace/scikit-learn,rrohan/scikit-learn,sergeyf/scikit-learn,equialgo/scikit-learn,yunfeilu/scikit-learn,macks22/scikit-learn,JPFrancoia/scikit-learn,olologin/scikit-learn,deepesch/scikit-learn,belltailjp/scikit-learn,nesterione/scikit-learn,rahul-c1/scikit-learn,ltiao/scikit-learn,abhishekgahlot/scikit-learn,hitszxp/scikit-learn,AIML/scikit-learn,samzhang111/scikit-learn,kylerbrown/scikit-learn,PrashntS/scikit-learn,Fireblend/scikit-learn,ltiao/scikit-learn,krez13/scikit-learn,maheshakya/scikit-learn,ephes/scikit-learn,kjung/scikit-learn,MohammedWasim/scikit-learn,khkaminska/scikit-learn,imaculate/scikit-learn,michigraber/scikit-learn,RPGOne/scikit-learn,xiaoxiamii/scikit-learn,chrisburr/scikit-learn,jaidevd/scikit-learn,vortex-ape/scikit-learn,deepesch/scikit-learn,UNR-AERIAL/scikit-learn,jorik041/scikit-learn,henridwyer/scikit-learn,frank-tancf/scikit-learn,henrykironde/scikit-learn,Barmaley-exe/scikit-learn,MohammedWasim/scikit-learn,ElDeveloper/scikit-learn,evgchz/scikit-learn,aflaxman/scikit-learn,andaag/scikit-learn,marcocaccin/scikit-learn,alvarofierroclavero/scikit-learn,mattgiguere/scikit-learn,imaculate/scikit-learn,sanketloke/scikit-learn,Djabbz/scikit-learn,kashif/scikit-learn,appapantula/scikit-learn,samzhang111/scikit-learn,fbagirov/scikit-learn,f3r/scikit-learn,jjx02230808/project0223,anntzer/scikit-learn,theoryno3/scikit-learn,kylerbrown/scikit-learn,etkirsch/scikit-learn,costypetrisor/scikit-learn,kagayakidan/scikit-learn,cybernet14/scikit-learn,ssaeger/scikit-learn,jereze/scikit-learn,frank-tancf/scikit-learn,xwolf12/scikit-learn,amueller/scikit-learn,waterponey/scikit-learn,shahankhatch/scikit-learn,toastedcornflakes/scikit-learn,bikong2/scikit-learn,ky822/scikit-learn,Fireblend/scikit-learn,mrshu/scikit-learn,mhdella/scikit-learn,samzhang111/scikit-learn,fabianp/scikit-learn,davidgbe/scikit-learn,aetilley/scikit-learn,terkkila/scikit-learn,nmayorov/scikit-learn,mfjb/scikit-learn,jmschrei/scikit-learn,costypetrisor/scikit-learn,samuel1208/scikit-learn,gclenaghan/scikit-learn,zorroblue/scikit-learn,fabioticconi/scikit-learn,shangwuhencc/scikit-learn,spallavolu/scikit-learn,olologin/scikit-learn,pnedunuri/scikit-learn,mrshu/scikit-learn,arjoly/scikit-learn,thientu/scikit-learn,gotomypc/scikit-learn,JPFrancoia/scikit-learn,MohammedWasim/scikit-learn,fredhusser/scikit-learn,henridwyer/scikit-learn,ChanChiChoi/scikit-learn,kylerbrown/scikit-learn,hitszxp/scikit-learn,dingocuster/scikit-learn,jereze/scikit-learn,jereze/scikit-learn,Srisai85/scikit-learn,ZENGXH/scikit-learn,jmetzen/scikit-learn,cybernet14/scikit-learn,aabadie/scikit-learn,xwolf12/scikit-learn,vermouthmjl/scikit-learn,frank-tancf/scikit-learn,stylianos-kampakis/scikit-learn,jlegendary/scikit-learn,RPGOne/scikit-learn,glennq/scikit-learn,shikhardb/scikit-learn,lenovor/scikit-learn,theoryno3/scikit-learn,ankurankan/scikit-learn,trankmichael/scikit-learn,shyamalschandra/scikit-learn,voxlol/scikit-learn,quheng/scikit-learn,xuewei4d/scikit-learn,spallavolu/scikit-learn,liberatorqjw/scikit-learn,DonBeo/scikit-learn,harshaneelhg/scikit-learn,ishanic/scikit-learn,mattgiguere/scikit-learn,mwv/scikit-learn,lucidfrontier45/scikit-learn,cybernet14/scikit-learn,Clyde-fare/scikit-learn,robin-lai/scikit-learn,vibhorag/scikit-learn,vivekmishra1991/scikit-learn,vermouthmjl/scikit-learn,ZenDevelopmentSystems/scikit-learn,michigraber/scikit-learn,robin-lai/scikit-learn,zuku1985/scikit-learn,ankurankan/scikit-learn,Obus/scikit-learn,anirudhjayaraman/scikit-learn,rahuldhote/scikit-learn,fabioticconi/scikit-learn,evgchz/scikit-learn,waterponey/scikit-learn,MatthieuBizien/scikit-learn,mjgrav2001/scikit-learn,terkkila/scikit-learn,vshtanko/scikit-learn,evgchz/scikit-learn,hainm/scikit-learn,Titan-C/scikit-learn,treycausey/scikit-learn,schets/scikit-learn,harshaneelhg/scikit-learn,murali-munna/scikit-learn,xuewei4d/scikit-learn,hainm/scikit-learn,Vimos/scikit-learn,mblondel/scikit-learn,YinongLong/scikit-learn,HolgerPeters/scikit-learn,cdegroc/scikit-learn,mjgrav2001/scikit-learn,krez13/scikit-learn,espg/scikit-learn,3manuek/scikit-learn,henridwyer/scikit-learn,jmschrei/scikit-learn,siutanwong/scikit-learn,cl4rke/scikit-learn,cwu2011/scikit-learn,yyjiang/scikit-learn,fredhusser/scikit-learn,Jimmy-Morzaria/scikit-learn,carrillo/scikit-learn,massmutual/scikit-learn,loli/sklearn-ensembletrees,ilyes14/scikit-learn,jzt5132/scikit-learn,MartinSavc/scikit-learn,wzbozon/scikit-learn,aewhatley/scikit-learn,pianomania/scikit-learn,Achuth17/scikit-learn,ldirer/scikit-learn,mojoboss/scikit-learn,manhhomienbienthuy/scikit-learn,xwolf12/scikit-learn,nomadcube/scikit-learn,nhejazi/scikit-learn,sanketloke/scikit-learn,kaichogami/scikit-learn,sgenoud/scikit-learn,thientu/scikit-learn,procoder317/scikit-learn,Aasmi/scikit-learn,stylianos-kampakis/scikit-learn,AnasGhrab/scikit-learn,andrewnc/scikit-learn,Garrett-R/scikit-learn,mehdidc/scikit-learn,hsiaoyi0504/scikit-learn,manashmndl/scikit-learn,mikebenfield/scikit-learn,Nyker510/scikit-learn,fzalkow/scikit-learn,AlexanderFabisch/scikit-learn,espg/scikit-learn,robbymeals/scikit-learn,PatrickOReilly/scikit-learn,djgagne/scikit-learn,RayMick/scikit-learn,russel1237/scikit-learn,tmhm/scikit-learn,xzh86/scikit-learn,pythonvietnam/scikit-learn,shahankhatch/scikit-learn,rvraghav93/scikit-learn,MechCoder/scikit-learn,AlexRobson/scikit-learn,devanshdalal/scikit-learn,abhishekgahlot/scikit-learn,lbishal/scikit-learn,abhishekkrthakur/scikit-learn,madjelan/scikit-learn,themrmax/scikit-learn,thilbern/scikit-learn,alexeyum/scikit-learn,mfjb/scikit-learn,billy-inn/scikit-learn,ElDeveloper/scikit-learn,justincassidy/scikit-learn,anurag313/scikit-learn,jmetzen/scikit-learn,luo66/scikit-learn,pkruskal/scikit-learn,Titan-C/scikit-learn,jorik041/scikit-learn,gclenaghan/scikit-learn,manashmndl/scikit-learn,RachitKansal/scikit-learn,mxjl620/scikit-learn,marcocaccin/scikit-learn,icdishb/scikit-learn,IshankGulati/scikit-learn,tawsifkhan/scikit-learn,vortex-ape/scikit-learn,mwv/scikit-learn,JosmanPS/scikit-learn,pkruskal/scikit-learn,mojoboss/scikit-learn,alexeyum/scikit-learn,vinayak-mehta/scikit-learn,IssamLaradji/scikit-learn,mattilyra/scikit-learn,abhishekkrthakur/scikit-learn,glouppe/scikit-learn,ldirer/scikit-learn,depet/scikit-learn,Clyde-fare/scikit-learn,bthirion/scikit-learn,kmike/scikit-learn,nrhine1/scikit-learn,iismd17/scikit-learn,voxlol/scikit-learn,Adai0808/scikit-learn,rajat1994/scikit-learn,poryfly/scikit-learn,jlegendary/scikit-learn,tawsifkhan/scikit-learn,UNR-AERIAL/scikit-learn,mattgiguere/scikit-learn,djgagne/scikit-learn,AnasGhrab/scikit-learn,sgenoud/scikit-learn,MartinSavc/scikit-learn,rexshihaoren/scikit-learn,mjudsp/Tsallis,adamgreenhall/scikit-learn,mugizico/scikit-learn,sarahgrogan/scikit-learn,shusenl/scikit-learn,zihua/scikit-learn,sanketloke/scikit-learn,yanlend/scikit-learn,Akshay0724/scikit-learn,akionakamura/scikit-learn,fengzhyuan/scikit-learn,victorbergelin/scikit-learn,ngoix/OCRF,rishikksh20/scikit-learn,lenovor/scikit-learn,tomlof/scikit-learn,MatthieuBizien/scikit-learn,rohanp/scikit-learn,hlin117/scikit-learn,untom/scikit-learn,jorge2703/scikit-learn,phdowling/scikit-learn,russel1237/scikit-learn,ahoyosid/scikit-learn,mlyundin/scikit-learn,yunfeilu/scikit-learn,pv/scikit-learn,potash/scikit-learn,billy-inn/scikit-learn,fbagirov/scikit-learn,fzalkow/scikit-learn,bhargav/scikit-learn,jpautom/scikit-learn,jkarnows/scikit-learn,JosmanPS/scikit-learn,nmayorov/scikit-learn,sarahgrogan/scikit-learn,Windy-Ground/scikit-learn,qifeigit/scikit-learn,shyamalschandra/scikit-learn,clemkoa/scikit-learn,ishanic/scikit-learn,rsivapr/scikit-learn,liangz0707/scikit-learn,rexshihaoren/scikit-learn,nelson-liu/scikit-learn,davidgbe/scikit-learn,zuku1985/scikit-learn,tosolveit/scikit-learn,fyffyt/scikit-learn,zuku1985/scikit-learn,btabibian/scikit-learn,pnedunuri/scikit-learn,mjgrav2001/scikit-learn,nomadcube/scikit-learn,Clyde-fare/scikit-learn,ClimbsRocks/scikit-learn,moutai/scikit-learn,florian-f/sklearn,eickenberg/scikit-learn,gotomypc/scikit-learn,bigdataelephants/scikit-learn,jzt5132/scikit-learn,meduz/scikit-learn,jm-begon/scikit-learn,raghavrv/scikit-learn,aetilley/scikit-learn,pnedunuri/scikit-learn,mattilyra/scikit-learn,jpautom/scikit-learn,lesteve/scikit-learn,sarahgrogan/scikit-learn,RachitKansal/scikit-learn,pypot/scikit-learn,pypot/scikit-learn,466152112/scikit-learn,ogrisel/scikit-learn,depet/scikit-learn,roxyboy/scikit-learn,zuku1985/scikit-learn,wazeerzulfikar/scikit-learn,Obus/scikit-learn,scikit-learn/scikit-learn,tmhm/scikit-learn,pompiduskus/scikit-learn,mikebenfield/scikit-learn,amueller/scikit-learn,pv/scikit-learn,cainiaocome/scikit-learn,henrykironde/scikit-learn,pompiduskus/scikit-learn,abimannans/scikit-learn,jblackburne/scikit-learn,hitszxp/scikit-learn,Jimmy-Morzaria/scikit-learn,meduz/scikit-learn,NelisVerhoef/scikit-learn,jm-begon/scikit-learn,AlexRobson/scikit-learn,lazywei/scikit-learn,xzh86/scikit-learn,mattgiguere/scikit-learn,arjoly/scikit-learn,etkirsch/scikit-learn,tawsifkhan/scikit-learn,hitszxp/scikit-learn,wanggang3333/scikit-learn,chrsrds/scikit-learn,huzq/scikit-learn,Garrett-R/scikit-learn,elkingtonmcb/scikit-learn,liangz0707/scikit-learn,AlexandreAbraham/scikit-learn,loli/sklearn-ensembletrees,ycaihua/scikit-learn,chrsrds/scikit-learn,xavierwu/scikit-learn,anirudhjayaraman/scikit-learn,ycaihua/scikit-learn,wzbozon/scikit-learn,sonnyhu/scikit-learn,mhue/scikit-learn,NunoEdgarGub1/scikit-learn,ilo10/scikit-learn,hdmetor/scikit-learn,pv/scikit-learn,aminert/scikit-learn,mayblue9/scikit-learn,sumspr/scikit-learn,bnaul/scikit-learn,0x0all/scikit-learn,PatrickChrist/scikit-learn,treycausey/scikit-learn,rvraghav93/scikit-learn,Aasmi/scikit-learn,hsuantien/scikit-learn,yanlend/scikit-learn,aflaxman/scikit-learn,schets/scikit-learn,dsullivan7/scikit-learn,nelson-liu/scikit-learn,sonnyhu/scikit-learn,alexeyum/scikit-learn,pnedunuri/scikit-learn,OshynSong/scikit-learn,joshloyal/scikit-learn,LohithBlaze/scikit-learn,arahuja/scikit-learn,belltailjp/scikit-learn,lucidfrontier45/scikit-learn,ZENGXH/scikit-learn,dingocuster/scikit-learn,IndraVikas/scikit-learn,pompiduskus/scikit-learn,nelson-liu/scikit-learn,alexsavio/scikit-learn,ssaeger/scikit-learn,altairpearl/scikit-learn,anurag313/scikit-learn,fengzhyuan/scikit-learn,luo66/scikit-learn,yyjiang/scikit-learn,LohithBlaze/scikit-learn,vybstat/scikit-learn,vivekmishra1991/scikit-learn,kmike/scikit-learn,toastedcornflakes/scikit-learn,bikong2/scikit-learn,zorroblue/scikit-learn,Garrett-R/scikit-learn,sgenoud/scikit-learn,carrillo/scikit-learn,bthirion/scikit-learn,frank-tancf/scikit-learn,tosolveit/scikit-learn,pratapvardhan/scikit-learn,bikong2/scikit-learn,LiaoPan/scikit-learn,gclenaghan/scikit-learn,Fireblend/scikit-learn,sinhrks/scikit-learn,TomDLT/scikit-learn,chrisburr/scikit-learn,ClimbsRocks/scikit-learn,ashhher3/scikit-learn,ivannz/scikit-learn,ivannz/scikit-learn,iismd17/scikit-learn,altairpearl/scikit-learn,ilyes14/scikit-learn,deepesch/scikit-learn,PatrickOReilly/scikit-learn,florian-f/sklearn,jakobworldpeace/scikit-learn,giorgiop/scikit-learn,0asa/scikit-learn,3manuek/scikit-learn,Srisai85/scikit-learn,ankurankan/scikit-learn,bikong2/scikit-learn,alvarofierroclavero/scikit-learn,manashmndl/scikit-learn,lazywei/scikit-learn,dsullivan7/scikit-learn,anirudhjayaraman/scikit-learn,phdowling/scikit-learn,untom/scikit-learn,altairpearl/scikit-learn,JsNoNo/scikit-learn,rohanp/scikit-learn,sumspr/scikit-learn,xiaoxiamii/scikit-learn,hlin117/scikit-learn,hugobowne/scikit-learn,appapantula/scikit-learn,ogrisel/scikit-learn,huzq/scikit-learn,akionakamura/scikit-learn,vivekmishra1991/scikit-learn,beepee14/scikit-learn,JsNoNo/scikit-learn,kmike/scikit-learn,vigilv/scikit-learn,ilo10/scikit-learn,Vimos/scikit-learn,hsiaoyi0504/scikit-learn,Djabbz/scikit-learn,zorroblue/scikit-learn,moutai/scikit-learn,aewhatley/scikit-learn,betatim/scikit-learn,depet/scikit-learn,sergeyf/scikit-learn,CforED/Machine-Learning,h2educ/scikit-learn,glennq/scikit-learn,xyguo/scikit-learn,eg-zhang/scikit-learn,heli522/scikit-learn,vybstat/scikit-learn,HolgerPeters/scikit-learn,fzalkow/scikit-learn,rishikksh20/scikit-learn,harshaneelhg/scikit-learn,jlegendary/scikit-learn,ssaeger/scikit-learn,victorbergelin/scikit-learn,giorgiop/scikit-learn,glouppe/scikit-learn,vybstat/scikit-learn,mlyundin/scikit-learn,ephes/scikit-learn,rexshihaoren/scikit-learn,Srisai85/scikit-learn,liyu1990/sklearn,MartinDelzant/scikit-learn,nikitasingh981/scikit-learn,equialgo/scikit-learn,mayblue9/scikit-learn,cainiaocome/scikit-learn,yanlend/scikit-learn,kylerbrown/scikit-learn,robbymeals/scikit-learn,alexsavio/scikit-learn,pratapvardhan/scikit-learn,voxlol/scikit-learn,murali-munna/scikit-learn,andaag/scikit-learn,saiwing-yeung/scikit-learn,sonnyhu/scikit-learn,ZenDevelopmentSystems/scikit-learn,jseabold/scikit-learn,treycausey/scikit-learn,kashif/scikit-learn,jorik041/scikit-learn,fabianp/scikit-learn,wanggang3333/scikit-learn,h2educ/scikit-learn,hlin117/scikit-learn,dsullivan7/scikit-learn,spallavolu/scikit-learn,krez13/scikit-learn,AlexRobson/scikit-learn,tomlof/scikit-learn,nvoron23/scikit-learn,Clyde-fare/scikit-learn,RomainBrault/scikit-learn,kmike/scikit-learn,Lawrence-Liu/scikit-learn,petosegan/scikit-learn,IssamLaradji/scikit-learn,r-mart/scikit-learn,hlin117/scikit-learn,TomDLT/scikit-learn,manhhomienbienthuy/scikit-learn,chrisburr/scikit-learn,kaichogami/scikit-learn,ashhher3/scikit-learn,shahankhatch/scikit-learn,wlamond/scikit-learn,Sentient07/scikit-learn,rahul-c1/scikit-learn,lucidfrontier45/scikit-learn,ssaeger/scikit-learn,DSLituiev/scikit-learn,trungnt13/scikit-learn,pythonvietnam/scikit-learn,icdishb/scikit-learn,idlead/scikit-learn,yanlend/scikit-learn,aewhatley/scikit-learn,Adai0808/scikit-learn,Obus/scikit-learn,rsivapr/scikit-learn,jmschrei/scikit-learn,zihua/scikit-learn,maheshakya/scikit-learn,nhejazi/scikit-learn,MatthieuBizien/scikit-learn,jblackburne/scikit-learn,ngoix/OCRF,simon-pepin/scikit-learn,herilalaina/scikit-learn,xubenben/scikit-learn,Windy-Ground/scikit-learn,heli522/scikit-learn,jayflo/scikit-learn,jseabold/scikit-learn,quheng/scikit-learn,yask123/scikit-learn,lbishal/scikit-learn,harshaneelhg/scikit-learn,rvraghav93/scikit-learn,jaidevd/scikit-learn,vinayak-mehta/scikit-learn,yunfeilu/scikit-learn,yonglehou/scikit-learn,scikit-learn/scikit-learn,jm-begon/scikit-learn,cainiaocome/scikit-learn,samzhang111/scikit-learn,Myasuka/scikit-learn,xavierwu/scikit-learn,adamgreenhall/scikit-learn,rsivapr/scikit-learn,kjung/scikit-learn,pythonvietnam/scikit-learn,hugobowne/scikit-learn,luo66/scikit-learn,Sentient07/scikit-learn,YinongLong/scikit-learn,florian-f/sklearn,AnasGhrab/scikit-learn,dsquareindia/scikit-learn,etkirsch/scikit-learn,kevin-intel/scikit-learn,nesterione/scikit-learn,clemkoa/scikit-learn,gotomypc/scikit-learn,cl4rke/scikit-learn,Jimmy-Morzaria/scikit-learn,Lawrence-Liu/scikit-learn,h2educ/scikit-learn,schets/scikit-learn,kagayakidan/scikit-learn,mlyundin/scikit-learn,dhruv13J/scikit-learn,nmayorov/scikit-learn,smartscheduling/scikit-learn-categorical-tree,jakobworldpeace/scikit-learn,schets/scikit-learn,giorgiop/scikit-learn,AlexandreAbraham/scikit-learn,JosmanPS/scikit-learn,equialgo/scikit-learn,trankmichael/scikit-learn,nhejazi/scikit-learn,anurag313/scikit-learn,jereze/scikit-learn,xuewei4d/scikit-learn,robin-lai/scikit-learn,macks22/scikit-learn,MatthieuBizien/scikit-learn,PrashntS/scikit-learn,maheshakya/scikit-learn,macks22/scikit-learn,chrsrds/scikit-learn,ashhher3/scikit-learn,arahuja/scikit-learn,ky822/scikit-learn,evgchz/scikit-learn,pompiduskus/scikit-learn,JeanKossaifi/scikit-learn,tdhopper/scikit-learn,nvoron23/scikit-learn,fbagirov/scikit-learn,alexsavio/scikit-learn,shenzebang/scikit-learn,mugizico/scikit-learn,lesteve/scikit-learn,3manuek/scikit-learn,0x0all/scikit-learn,cdegroc/scikit-learn,joernhees/scikit-learn,alexeyum/scikit-learn,vybstat/scikit-learn,ChanderG/scikit-learn,mblondel/scikit-learn,wazeerzulfikar/scikit-learn,shenzebang/scikit-learn,terkkila/scikit-learn,IshankGulati/scikit-learn,costypetrisor/scikit-learn,fzalkow/scikit-learn,siutanwong/scikit-learn,mhdella/scikit-learn,jseabold/scikit-learn,ngoix/OCRF,ogrisel/scikit-learn,abhishekgahlot/scikit-learn,bigdataelephants/scikit-learn,devanshdalal/scikit-learn,shikhardb/scikit-learn,jorge2703/scikit-learn,nikitasingh981/scikit-learn,sgenoud/scikit-learn,vortex-ape/scikit-learn,ningchi/scikit-learn,meduz/scikit-learn,hugobowne/scikit-learn,manhhomienbienthuy/scikit-learn,victorbergelin/scikit-learn,idlead/scikit-learn,RomainBrault/scikit-learn,Lawrence-Liu/scikit-learn,Achuth17/scikit-learn,vibhorag/scikit-learn,nelson-liu/scikit-learn,liangz0707/scikit-learn,robbymeals/scikit-learn,appapantula/scikit-learn,anntzer/scikit-learn,mugizico/scikit-learn,JosmanPS/scikit-learn,altairpearl/scikit-learn,ephes/scikit-learn,fabioticconi/scikit-learn,ashhher3/scikit-learn,rajat1994/scikit-learn,AlexanderFabisch/scikit-learn,ZENGXH/scikit-learn,jmschrei/scikit-learn,NelisVerhoef/scikit-learn,kashif/scikit-learn,rahuldhote/scikit-learn,iismd17/scikit-learn,sgenoud/scikit-learn,lazywei/scikit-learn,xzh86/scikit-learn,xavierwu/scikit-learn,bnaul/scikit-learn,jorge2703/scikit-learn,samuel1208/scikit-learn,nrhine1/scikit-learn,rishikksh20/scikit-learn,mhue/scikit-learn,q1ang/scikit-learn,billy-inn/scikit-learn,AnasGhrab/scikit-learn,zorroblue/scikit-learn,JPFrancoia/scikit-learn,rahul-c1/scikit-learn,nvoron23/scikit-learn,mikebenfield/scikit-learn,procoder317/scikit-learn,aetilley/scikit-learn,loli/semisupervisedforests,toastedcornflakes/scikit-learn,OshynSong/scikit-learn,kagayakidan/scikit-learn,JeanKossaifi/scikit-learn,cwu2011/scikit-learn,poryfly/scikit-learn,iismd17/scikit-learn,ishanic/scikit-learn,luo66/scikit-learn,HolgerPeters/scikit-learn,kagayakidan/scikit-learn,dhruv13J/scikit-learn,ishanic/scikit-learn,lin-credible/scikit-learn,mfjb/scikit-learn,yyjiang/scikit-learn,JPFrancoia/scikit-learn,ilo10/scikit-learn,samuel1208/scikit-learn,hsuantien/scikit-learn,0asa/scikit-learn,aflaxman/scikit-learn,glennq/scikit-learn,btabibian/scikit-learn,sumspr/scikit-learn,ldirer/scikit-learn,lesteve/scikit-learn,trungnt13/scikit-learn,djgagne/scikit-learn,CforED/Machine-Learning,mhdella/scikit-learn,LiaoPan/scikit-learn,fabioticconi/scikit-learn,jakirkham/scikit-learn,michigraber/scikit-learn,scikit-learn/scikit-learn,kashif/scikit-learn,fredhusser/scikit-learn,carrillo/scikit-learn,yask123/scikit-learn,siutanwong/scikit-learn,eg-zhang/scikit-learn,wzbozon/scikit-learn,marcocaccin/scikit-learn,potash/scikit-learn,Obus/scikit-learn,jpautom/scikit-learn,huobaowangxi/scikit-learn,hdmetor/scikit-learn,zaxtax/scikit-learn,rsivapr/scikit-learn,anirudhjayaraman/scikit-learn,hitszxp/scikit-learn,glemaitre/scikit-learn,glemaitre/scikit-learn,RachitKansal/scikit-learn,sumspr/scikit-learn,ClimbsRocks/scikit-learn,mlyundin/scikit-learn,MechCoder/scikit-learn,dsquareindia/scikit-learn,voxlol/scikit-learn,3manuek/scikit-learn,liyu1990/sklearn,espg/scikit-learn,Titan-C/scikit-learn,vigilv/scikit-learn,sinhrks/scikit-learn,sergeyf/scikit-learn,jkarnows/scikit-learn,ngoix/OCRF,clemkoa/scikit-learn,ilo10/scikit-learn,huzq/scikit-learn,andrewnc/scikit-learn,liberatorqjw/scikit-learn,devanshdalal/scikit-learn,dsquareindia/scikit-learn,chrsrds/scikit-learn,mugizico/scikit-learn,CVML/scikit-learn,robin-lai/scikit-learn,PrashntS/scikit-learn,mojoboss/scikit-learn,ElDeveloper/scikit-learn,Nyker510/scikit-learn,thientu/scikit-learn,JsNoNo/scikit-learn,h2educ/scikit-learn,toastedcornflakes/scikit-learn,giorgiop/scikit-learn,Myasuka/scikit-learn,roxyboy/scikit-learn,raghavrv/scikit-learn,henrykironde/scikit-learn,samuel1208/scikit-learn,justincassidy/scikit-learn,joshloyal/scikit-learn,beepee14/scikit-learn,madjelan/scikit-learn,r-mart/scikit-learn,ZenDevelopmentSystems/scikit-learn,ankurankan/scikit-learn,Akshay0724/scikit-learn,rajat1994/scikit-learn,saiwing-yeung/scikit-learn,jjx02230808/project0223,fengzhyuan/scikit-learn,tosolveit/scikit-learn,mxjl620/scikit-learn,vinayak-mehta/scikit-learn,cl4rke/scikit-learn,plissonf/scikit-learn,ClimbsRocks/scikit-learn,IndraVikas/scikit-learn,ogrisel/scikit-learn,moutai/scikit-learn,zaxtax/scikit-learn,wlamond/scikit-learn,wzbozon/scikit-learn,zorojean/scikit-learn,andrewnc/scikit-learn,lenovor/scikit-learn,ningchi/scikit-learn,loli/sklearn-ensembletrees,xubenben/scikit-learn,f3r/scikit-learn,petosegan/scikit-learn,treycausey/scikit-learn,mfjb/scikit-learn,xavierwu/scikit-learn,simon-pepin/scikit-learn,liyu1990/sklearn,btabibian/scikit-learn,YinongLong/scikit-learn,belltailjp/scikit-learn,dhruv13J/scikit-learn,Garrett-R/scikit-learn,jlegendary/scikit-learn,loli/semisupervisedforests,q1ang/scikit-learn,pianomania/scikit-learn,depet/scikit-learn,OshynSong/scikit-learn,nikitasingh981/scikit-learn,jseabold/scikit-learn,LohithBlaze/scikit-learn,Myasuka/scikit-learn,ElDeveloper/scikit-learn,fyffyt/scikit-learn,waterponey/scikit-learn,hrjn/scikit-learn,xiaoxiamii/scikit-learn,wanggang3333/scikit-learn,raghavrv/scikit-learn,MartinDelzant/scikit-learn,RPGOne/scikit-learn,cybernet14/scikit-learn,Jimmy-Morzaria/scikit-learn,ChanChiChoi/scikit-learn,MartinDelzant/scikit-learn,zaxtax/scikit-learn,Titan-C/scikit-learn,IshankGulati/scikit-learn,bnaul/scikit-learn,aabadie/scikit-learn,mrshu/scikit-learn,wlamond/scikit-learn,hrjn/scikit-learn,khkaminska/scikit-learn,herilalaina/scikit-learn,murali-munna/scikit-learn,NelisVerhoef/scikit-learn,saiwing-yeung/scikit-learn,cainiaocome/scikit-learn,glemaitre/scikit-learn,yask123/scikit-learn,abimannans/scikit-learn,beepee14/scikit-learn,yonglehou/scikit-learn,jzt5132/scikit-learn,vigilv/scikit-learn,madjelan/scikit-learn,justincassidy/scikit-learn,amueller/scikit-learn,lucidfrontier45/scikit-learn,Fireblend/scikit-learn,Aasmi/scikit-learn,larsmans/scikit-learn,loli/sklearn-ensembletrees,russel1237/scikit-learn,RomainBrault/scikit-learn,cauchycui/scikit-learn,cauchycui/scikit-learn,cauchycui/scikit-learn,icdishb/scikit-learn,zhenv5/scikit-learn,vibhorag/scikit-learn,Sentient07/scikit-learn,sinhrks/scikit-learn,bigdataelephants/scikit-learn,vshtanko/scikit-learn,AlexandreAbraham/scikit-learn,bigdataelephants/scikit-learn,untom/scikit-learn,liangz0707/scikit-learn,AIML/scikit-learn,tawsifkhan/scikit-learn,RomainBrault/scikit-learn,jakirkham/scikit-learn,walterreade/scikit-learn,davidgbe/scikit-learn,kevin-intel/scikit-learn,florian-f/sklearn,poryfly/scikit-learn,zihua/scikit-learn,jkarnows/scikit-learn,wlamond/scikit-learn,mhue/scikit-learn,jakirkham/scikit-learn,AlexanderFabisch/scikit-learn,MohammedWasim/scikit-learn,UNR-AERIAL/scikit-learn,heli522/scikit-learn,tdhopper/scikit-learn,Akshay0724/scikit-learn,DSLituiev/scikit-learn,jaidevd/scikit-learn,ningchi/scikit-learn,BiaDarkia/scikit-learn,cdegroc/scikit-learn,huzq/scikit-learn,lin-credible/scikit-learn,nrhine1/scikit-learn,joshloyal/scikit-learn,evgchz/scikit-learn,jmetzen/scikit-learn,macks22/scikit-learn,rahuldhote/scikit-learn,larsmans/scikit-learn,bthirion/scikit-learn,robbymeals/scikit-learn,cwu2011/scikit-learn,huobaowangxi/scikit-learn,nomadcube/scikit-learn,jayflo/scikit-learn,mhue/scikit-learn,rvraghav93/scikit-learn,kjung/scikit-learn,olologin/scikit-learn,petosegan/scikit-learn,wazeerzulfikar/scikit-learn,betatim/scikit-learn,BiaDarkia/scikit-learn,btabibian/scikit-learn,gclenaghan/scikit-learn,massmutual/scikit-learn,elkingtonmcb/scikit-learn,mojoboss/scikit-learn,PrashntS/scikit-learn,poryfly/scikit-learn,etkirsch/scikit-learn,larsmans/scikit-learn,rishikksh20/scikit-learn,glouppe/scikit-learn,sarahgrogan/scikit-learn,vermouthmjl/scikit-learn,Garrett-R/scikit-learn,nvoron23/scikit-learn,cauchycui/scikit-learn,BiaDarkia/scikit-learn,sergeyf/scikit-learn,qifeigit/scikit-learn,nesterione/scikit-learn,aminert/scikit-learn,eickenberg/scikit-learn,lazywei/scikit-learn,massmutual/scikit-learn,devanshdalal/scikit-learn,Myasuka/scikit-learn,Akshay0724/scikit-learn,herilalaina/scikit-learn,fredhusser/scikit-learn,mblondel/scikit-learn,jayflo/scikit-learn,0asa/scikit-learn,billy-inn/scikit-learn,qifeigit/scikit-learn,lucidfrontier45/scikit-learn,manhhomienbienthuy/scikit-learn,mehdidc/scikit-learn,huobaowangxi/scikit-learn,thilbern/scikit-learn,hdmetor/scikit-learn,0x0all/scikit-learn,kmike/scikit-learn,raghavrv/scikit-learn,espg/scikit-learn,jblackburne/scikit-learn,B3AU/waveTree,B3AU/waveTree,ltiao/scikit-learn,quheng/scikit-learn,andrewnc/scikit-learn,joernhees/scikit-learn,carrillo/scikit-learn,trankmichael/scikit-learn,fyffyt/scikit-learn,mrshu/scikit-learn,aabadie/scikit-learn,CVML/scikit-learn,jm-begon/scikit-learn,IssamLaradji/scikit-learn,idlead/scikit-learn,DonBeo/scikit-learn,abimannans/scikit-learn,Nyker510/scikit-learn,RachitKansal/scikit-learn,shahankhatch/scikit-learn,hugobowne/scikit-learn,massmutual/scikit-learn,q1ang/scikit-learn,jayflo/scikit-learn,abhishekkrthakur/scikit-learn,fbagirov/scikit-learn,bhargav/scikit-learn,manashmndl/scikit-learn,amueller/scikit-learn,ahoyosid/scikit-learn,aewhatley/scikit-learn,vinayak-mehta/scikit-learn,PatrickOReilly/scikit-learn,shenzebang/scikit-learn,olologin/scikit-learn,Djabbz/scikit-learn,bnaul/scikit-learn,kevin-intel/scikit-learn,AlexanderFabisch/scikit-learn,theoryno3/scikit-learn,mehdidc/scikit-learn,kaichogami/scikit-learn,siutanwong/scikit-learn,shangwuhencc/scikit-learn,herilalaina/scikit-learn,untom/scikit-learn,phdowling/scikit-learn,arabenjamin/scikit-learn,bhargav/scikit-learn,PatrickChrist/scikit-learn,TomDLT/scikit-learn,jjx02230808/project0223,NunoEdgarGub1/scikit-learn,jpautom/scikit-learn,smartscheduling/scikit-learn-categorical-tree,joernhees/scikit-learn,LohithBlaze/scikit-learn,saiwing-yeung/scikit-learn,thilbern/scikit-learn,rrohan/scikit-learn,rajat1994/scikit-learn,cwu2011/scikit-learn,plissonf/scikit-learn,ivannz/scikit-learn,arahuja/scikit-learn,akionakamura/scikit-learn,mehdidc/scikit-learn,RayMick/scikit-learn,theoryno3/scikit-learn,loli/semisupervisedforests,heli522/scikit-learn,ephes/scikit-learn,vermouthmjl/scikit-learn,pianomania/scikit-learn,alvarofierroclavero/scikit-learn,CVML/scikit-learn,f3r/scikit-learn,NunoEdgarGub1/scikit-learn,cdegroc/scikit-learn,roxyboy/scikit-learn,Lawrence-Liu/scikit-learn,lbishal/scikit-learn,jblackburne/scikit-learn,jakobworldpeace/scikit-learn,nikitasingh981/scikit-learn,glouppe/scikit-learn,RayMick/scikit-learn,sonnyhu/scikit-learn,rrohan/scikit-learn,jkarnows/scikit-learn,lenovor/scikit-learn,potash/scikit-learn,sinhrks/scikit-learn,Djabbz/scikit-learn,lbishal/scikit-learn,shangwuhencc/scikit-learn,ivannz/scikit-learn,anurag313/scikit-learn,themrmax/scikit-learn,shusenl/scikit-learn,CforED/Machine-Learning,yunfeilu/scikit-learn,mjudsp/Tsallis,B3AU/waveTree,ycaihua/scikit-learn,Barmaley-exe/scikit-learn,zorojean/scikit-learn,xzh86/scikit-learn,Achuth17/scikit-learn,dingocuster/scikit-learn,mxjl620/scikit-learn,xiaoxiamii/scikit-learn,alvarofierroclavero/scikit-learn,zhenv5/scikit-learn,DSLituiev/scikit-learn,BiaDarkia/scikit-learn,thilbern/scikit-learn,djgagne/scikit-learn,B3AU/waveTree,IshankGulati/scikit-learn,Aasmi/scikit-learn,hsuantien/scikit-learn,ilyes14/scikit-learn,waterponey/scikit-learn,plissonf/scikit-learn,mwv/scikit-learn,florian-f/sklearn,kaichogami/scikit-learn,vibhorag/scikit-learn,khkaminska/scikit-learn,ZENGXH/scikit-learn,gotomypc/scikit-learn,lin-credible/scikit-learn,pypot/scikit-learn,smartscheduling/scikit-learn-categorical-tree,HolgerPeters/scikit-learn,zhenv5/scikit-learn,MartinSavc/scikit-learn,walterreade/scikit-learn,tdhopper/scikit-learn,AIML/scikit-learn,Sentient07/scikit-learn,466152112/scikit-learn,fyffyt/scikit-learn,DonBeo/scikit-learn,treycausey/scikit-learn,ChanderG/scikit-learn,abhishekgahlot/scikit-learn,dsullivan7/scikit-learn,phdowling/scikit-learn,abimannans/scikit-learn,mattilyra/scikit-learn,cl4rke/scikit-learn,yonglehou/scikit-learn,hrjn/scikit-learn,dsquareindia/scikit-learn,AIML/scikit-learn,vshtanko/scikit-learn,wazeerzulfikar/scikit-learn,jakirkham/scikit-learn,jmetzen/scikit-learn,0asa/scikit-learn,justincassidy/scikit-learn,liyu1990/sklearn,idlead/scikit-learn,shyamalschandra/scikit-learn,PatrickChrist/scikit-learn,ltiao/scikit-learn,ndingwall/scikit-learn,ngoix/OCRF,thientu/scikit-learn,yonglehou/scikit-learn,Achuth17/scikit-learn,JsNoNo/scikit-learn,spallavolu/scikit-learn,IndraVikas/scikit-learn,kevin-intel/scikit-learn,IndraVikas/scikit-learn,pkruskal/scikit-learn,deepesch/scikit-learn,Barmaley-exe/scikit-learn,davidgbe/scikit-learn,tosolveit/scikit-learn,vivekmishra1991/scikit-learn,simon-pepin/scikit-learn,Windy-Ground/scikit-learn,pratapvardhan/scikit-learn,stylianos-kampakis/scikit-learn,mrshu/scikit-learn,ChanderG/scikit-learn,ahoyosid/scikit-learn,mikebenfield/scikit-learn,NelisVerhoef/scikit-learn,ndingwall/scikit-learn,ky822/scikit-learn,terkkila/scikit-learn,MechCoder/scikit-learn,Srisai85/scikit-learn,mxjl620/scikit-learn,zorojean/scikit-learn,lin-credible/scikit-learn,vortex-ape/scikit-learn,JeanKossaifi/scikit-learn,fengzhyuan/scikit-learn,nomadcube/scikit-learn,vigilv/scikit-learn,bhargav/scikit-learn,rsivapr/scikit-learn,dingocuster/scikit-learn,meduz/scikit-learn,themrmax/scikit-learn,tmhm/scikit-learn,kjung/scikit-learn,andaag/scikit-learn,UNR-AERIAL/scikit-learn,DSLituiev/scikit-learn,zorojean/scikit-learn,khkaminska/scikit-learn,moutai/scikit-learn,shikhardb/scikit-learn,alexsavio/scikit-learn,abhishekkrthakur/scikit-learn,mjudsp/Tsallis,shangwuhencc/scikit-learn,liberatorqjw/scikit-learn,rohanp/scikit-learn,arjoly/scikit-learn,hsuantien/scikit-learn,shusenl/scikit-learn,JeanKossaifi/scikit-learn,TomDLT/scikit-learn,rahuldhote/scikit-learn,ycaihua/scikit-learn,tomlof/scikit-learn,eg-zhang/scikit-learn,pratapvardhan/scikit-learn,vshtanko/scikit-learn,hsiaoyi0504/scikit-learn,aminert/scikit-learn,Barmaley-exe/scikit-learn,anntzer/scikit-learn,nhejazi/scikit-learn,YinongLong/scikit-learn,mblondel/scikit-learn,rrohan/scikit-learn,Windy-Ground/scikit-learn,belltailjp/scikit-learn,anntzer/scikit-learn,krez13/scikit-learn | """
=======================================
Pixel importances with forests of trees
=======================================
This example shows the use of forests of trees to evaluate the importance
of the pixels in an image classification task (faces). The hotter the pixel,
the more important.
"""
print __doc__
import pylab as pl
from sklearn.datasets import fetch_olivetti_faces
from sklearn.ensemble import ExtraTreesClassifier
# Loading the digits dataset
data = fetch_olivetti_faces()
X = data.images.reshape((len(data.images), -1))
y = data.target
mask = y < 5 # Limit to 5 classes
X = X[mask]
y = y[mask]
# Build a forest and compute the pixel importances
forest = ExtraTreesClassifier(n_estimators=1000,
max_features=128,
compute_importances=True,
random_state=0)
forest.fit(X, y)
importances = forest.feature_importances_
importances = importances.reshape(data.images[0].shape)
# Plot pixel importances
pl.matshow(importances, cmap=pl.cm.hot)
pl.title("Pixel importances with forests of trees")
pl.show()
Update random forest face example to use several cores | """
=======================================
Pixel importances with forests of trees
=======================================
This example shows the use of forests of trees to evaluate the importance
of the pixels in an image classification task (faces). The hotter the pixel,
the more important.
"""
print __doc__
from time import time
import pylab as pl
from sklearn.datasets import fetch_olivetti_faces
from sklearn.ensemble import ExtraTreesClassifier
# Number of cores to use to perform parallel fitting of the forest model
n_jobs=2
# Loading the digits dataset
data = fetch_olivetti_faces()
X = data.images.reshape((len(data.images), -1))
y = data.target
mask = y < 5 # Limit to 5 classes
X = X[mask]
y = y[mask]
# Build a forest and compute the pixel importances
print "Fitting ExtraTreesClassifier on faces data with %d cores..." % n_jobs
t0 = time()
forest = ExtraTreesClassifier(n_estimators=1000,
max_features=128,
compute_importances=True,
random_state=0, n_jobs=n_jobs)
forest.fit(X, y)
print "done in %0.3fs" % (time() - t0)
importances = forest.feature_importances_
importances = importances.reshape(data.images[0].shape)
# Plot pixel importances
pl.matshow(importances, cmap=pl.cm.hot)
pl.title("Pixel importances with forests of trees")
pl.show()
| <commit_before>"""
=======================================
Pixel importances with forests of trees
=======================================
This example shows the use of forests of trees to evaluate the importance
of the pixels in an image classification task (faces). The hotter the pixel,
the more important.
"""
print __doc__
import pylab as pl
from sklearn.datasets import fetch_olivetti_faces
from sklearn.ensemble import ExtraTreesClassifier
# Loading the digits dataset
data = fetch_olivetti_faces()
X = data.images.reshape((len(data.images), -1))
y = data.target
mask = y < 5 # Limit to 5 classes
X = X[mask]
y = y[mask]
# Build a forest and compute the pixel importances
forest = ExtraTreesClassifier(n_estimators=1000,
max_features=128,
compute_importances=True,
random_state=0)
forest.fit(X, y)
importances = forest.feature_importances_
importances = importances.reshape(data.images[0].shape)
# Plot pixel importances
pl.matshow(importances, cmap=pl.cm.hot)
pl.title("Pixel importances with forests of trees")
pl.show()
<commit_msg>Update random forest face example to use several cores<commit_after> | """
=======================================
Pixel importances with forests of trees
=======================================
This example shows the use of forests of trees to evaluate the importance
of the pixels in an image classification task (faces). The hotter the pixel,
the more important.
"""
print __doc__
from time import time
import pylab as pl
from sklearn.datasets import fetch_olivetti_faces
from sklearn.ensemble import ExtraTreesClassifier
# Number of cores to use to perform parallel fitting of the forest model
n_jobs=2
# Loading the digits dataset
data = fetch_olivetti_faces()
X = data.images.reshape((len(data.images), -1))
y = data.target
mask = y < 5 # Limit to 5 classes
X = X[mask]
y = y[mask]
# Build a forest and compute the pixel importances
print "Fitting ExtraTreesClassifier on faces data with %d cores..." % n_jobs
t0 = time()
forest = ExtraTreesClassifier(n_estimators=1000,
max_features=128,
compute_importances=True,
random_state=0, n_jobs=n_jobs)
forest.fit(X, y)
print "done in %0.3fs" % (time() - t0)
importances = forest.feature_importances_
importances = importances.reshape(data.images[0].shape)
# Plot pixel importances
pl.matshow(importances, cmap=pl.cm.hot)
pl.title("Pixel importances with forests of trees")
pl.show()
| """
=======================================
Pixel importances with forests of trees
=======================================
This example shows the use of forests of trees to evaluate the importance
of the pixels in an image classification task (faces). The hotter the pixel,
the more important.
"""
print __doc__
import pylab as pl
from sklearn.datasets import fetch_olivetti_faces
from sklearn.ensemble import ExtraTreesClassifier
# Loading the digits dataset
data = fetch_olivetti_faces()
X = data.images.reshape((len(data.images), -1))
y = data.target
mask = y < 5 # Limit to 5 classes
X = X[mask]
y = y[mask]
# Build a forest and compute the pixel importances
forest = ExtraTreesClassifier(n_estimators=1000,
max_features=128,
compute_importances=True,
random_state=0)
forest.fit(X, y)
importances = forest.feature_importances_
importances = importances.reshape(data.images[0].shape)
# Plot pixel importances
pl.matshow(importances, cmap=pl.cm.hot)
pl.title("Pixel importances with forests of trees")
pl.show()
Update random forest face example to use several cores"""
=======================================
Pixel importances with forests of trees
=======================================
This example shows the use of forests of trees to evaluate the importance
of the pixels in an image classification task (faces). The hotter the pixel,
the more important.
"""
print __doc__
from time import time
import pylab as pl
from sklearn.datasets import fetch_olivetti_faces
from sklearn.ensemble import ExtraTreesClassifier
# Number of cores to use to perform parallel fitting of the forest model
n_jobs=2
# Loading the digits dataset
data = fetch_olivetti_faces()
X = data.images.reshape((len(data.images), -1))
y = data.target
mask = y < 5 # Limit to 5 classes
X = X[mask]
y = y[mask]
# Build a forest and compute the pixel importances
print "Fitting ExtraTreesClassifier on faces data with %d cores..." % n_jobs
t0 = time()
forest = ExtraTreesClassifier(n_estimators=1000,
max_features=128,
compute_importances=True,
random_state=0, n_jobs=n_jobs)
forest.fit(X, y)
print "done in %0.3fs" % (time() - t0)
importances = forest.feature_importances_
importances = importances.reshape(data.images[0].shape)
# Plot pixel importances
pl.matshow(importances, cmap=pl.cm.hot)
pl.title("Pixel importances with forests of trees")
pl.show()
| <commit_before>"""
=======================================
Pixel importances with forests of trees
=======================================
This example shows the use of forests of trees to evaluate the importance
of the pixels in an image classification task (faces). The hotter the pixel,
the more important.
"""
print __doc__
import pylab as pl
from sklearn.datasets import fetch_olivetti_faces
from sklearn.ensemble import ExtraTreesClassifier
# Loading the digits dataset
data = fetch_olivetti_faces()
X = data.images.reshape((len(data.images), -1))
y = data.target
mask = y < 5 # Limit to 5 classes
X = X[mask]
y = y[mask]
# Build a forest and compute the pixel importances
forest = ExtraTreesClassifier(n_estimators=1000,
max_features=128,
compute_importances=True,
random_state=0)
forest.fit(X, y)
importances = forest.feature_importances_
importances = importances.reshape(data.images[0].shape)
# Plot pixel importances
pl.matshow(importances, cmap=pl.cm.hot)
pl.title("Pixel importances with forests of trees")
pl.show()
<commit_msg>Update random forest face example to use several cores<commit_after>"""
=======================================
Pixel importances with forests of trees
=======================================
This example shows the use of forests of trees to evaluate the importance
of the pixels in an image classification task (faces). The hotter the pixel,
the more important.
"""
print __doc__
from time import time
import pylab as pl
from sklearn.datasets import fetch_olivetti_faces
from sklearn.ensemble import ExtraTreesClassifier
# Number of cores to use to perform parallel fitting of the forest model
n_jobs=2
# Loading the digits dataset
data = fetch_olivetti_faces()
X = data.images.reshape((len(data.images), -1))
y = data.target
mask = y < 5 # Limit to 5 classes
X = X[mask]
y = y[mask]
# Build a forest and compute the pixel importances
print "Fitting ExtraTreesClassifier on faces data with %d cores..." % n_jobs
t0 = time()
forest = ExtraTreesClassifier(n_estimators=1000,
max_features=128,
compute_importances=True,
random_state=0, n_jobs=n_jobs)
forest.fit(X, y)
print "done in %0.3fs" % (time() - t0)
importances = forest.feature_importances_
importances = importances.reshape(data.images[0].shape)
# Plot pixel importances
pl.matshow(importances, cmap=pl.cm.hot)
pl.title("Pixel importances with forests of trees")
pl.show()
|
27be11775d23e873f9e568b86b8a0b2409878d30 | job_runner/settings/env/development.py | job_runner/settings/env/development.py | import os
from job_runner.settings.base import *
DEBUG = True
TEMPLATE_DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(PROJECT_ROOT, '..', 'database.sqlite'),
}
}
JOB_RUNNER_WS_SERVER = 'ws://localhost:5000/'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'simple': {
'format': '%(levelname)s - %(asctime)s - %(name)s: %(message)s',
}
},
'handlers': {
'console': {
'level': 'DEBUG',
'formatter': 'simple',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'': {
'handlers': ['console'],
'level': 'DEBUG',
},
}
}
HOSTNAME = 'localhost:8000'
| import os
from job_runner.settings.base import *
DEBUG = True
TEMPLATE_DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(PROJECT_ROOT, '..', 'database.sqlite'),
}
}
JOB_RUNNER_WS_SERVER = 'ws://localhost:5000/'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'simple': {
'format': '%(levelname)s - %(asctime)s - %(name)s: %(message)s',
}
},
'handlers': {
'console': {
'level': 'DEBUG',
'formatter': 'simple',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django.db.backends': {
'handlers': ['console'],
'level': 'INFO',
'propagate': False,
},
'': {
'handlers': ['console'],
'level': 'DEBUG',
},
}
}
HOSTNAME = 'localhost:8000'
| Hide the django backend db log messages. | Hide the django backend db log messages.
| Python | bsd-3-clause | spilgames/job-runner,spilgames/job-runner | import os
from job_runner.settings.base import *
DEBUG = True
TEMPLATE_DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(PROJECT_ROOT, '..', 'database.sqlite'),
}
}
JOB_RUNNER_WS_SERVER = 'ws://localhost:5000/'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'simple': {
'format': '%(levelname)s - %(asctime)s - %(name)s: %(message)s',
}
},
'handlers': {
'console': {
'level': 'DEBUG',
'formatter': 'simple',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'': {
'handlers': ['console'],
'level': 'DEBUG',
},
}
}
HOSTNAME = 'localhost:8000'
Hide the django backend db log messages. | import os
from job_runner.settings.base import *
DEBUG = True
TEMPLATE_DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(PROJECT_ROOT, '..', 'database.sqlite'),
}
}
JOB_RUNNER_WS_SERVER = 'ws://localhost:5000/'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'simple': {
'format': '%(levelname)s - %(asctime)s - %(name)s: %(message)s',
}
},
'handlers': {
'console': {
'level': 'DEBUG',
'formatter': 'simple',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django.db.backends': {
'handlers': ['console'],
'level': 'INFO',
'propagate': False,
},
'': {
'handlers': ['console'],
'level': 'DEBUG',
},
}
}
HOSTNAME = 'localhost:8000'
| <commit_before>import os
from job_runner.settings.base import *
DEBUG = True
TEMPLATE_DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(PROJECT_ROOT, '..', 'database.sqlite'),
}
}
JOB_RUNNER_WS_SERVER = 'ws://localhost:5000/'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'simple': {
'format': '%(levelname)s - %(asctime)s - %(name)s: %(message)s',
}
},
'handlers': {
'console': {
'level': 'DEBUG',
'formatter': 'simple',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'': {
'handlers': ['console'],
'level': 'DEBUG',
},
}
}
HOSTNAME = 'localhost:8000'
<commit_msg>Hide the django backend db log messages.<commit_after> | import os
from job_runner.settings.base import *
DEBUG = True
TEMPLATE_DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(PROJECT_ROOT, '..', 'database.sqlite'),
}
}
JOB_RUNNER_WS_SERVER = 'ws://localhost:5000/'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'simple': {
'format': '%(levelname)s - %(asctime)s - %(name)s: %(message)s',
}
},
'handlers': {
'console': {
'level': 'DEBUG',
'formatter': 'simple',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django.db.backends': {
'handlers': ['console'],
'level': 'INFO',
'propagate': False,
},
'': {
'handlers': ['console'],
'level': 'DEBUG',
},
}
}
HOSTNAME = 'localhost:8000'
| import os
from job_runner.settings.base import *
DEBUG = True
TEMPLATE_DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(PROJECT_ROOT, '..', 'database.sqlite'),
}
}
JOB_RUNNER_WS_SERVER = 'ws://localhost:5000/'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'simple': {
'format': '%(levelname)s - %(asctime)s - %(name)s: %(message)s',
}
},
'handlers': {
'console': {
'level': 'DEBUG',
'formatter': 'simple',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'': {
'handlers': ['console'],
'level': 'DEBUG',
},
}
}
HOSTNAME = 'localhost:8000'
Hide the django backend db log messages.import os
from job_runner.settings.base import *
DEBUG = True
TEMPLATE_DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(PROJECT_ROOT, '..', 'database.sqlite'),
}
}
JOB_RUNNER_WS_SERVER = 'ws://localhost:5000/'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'simple': {
'format': '%(levelname)s - %(asctime)s - %(name)s: %(message)s',
}
},
'handlers': {
'console': {
'level': 'DEBUG',
'formatter': 'simple',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django.db.backends': {
'handlers': ['console'],
'level': 'INFO',
'propagate': False,
},
'': {
'handlers': ['console'],
'level': 'DEBUG',
},
}
}
HOSTNAME = 'localhost:8000'
| <commit_before>import os
from job_runner.settings.base import *
DEBUG = True
TEMPLATE_DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(PROJECT_ROOT, '..', 'database.sqlite'),
}
}
JOB_RUNNER_WS_SERVER = 'ws://localhost:5000/'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'simple': {
'format': '%(levelname)s - %(asctime)s - %(name)s: %(message)s',
}
},
'handlers': {
'console': {
'level': 'DEBUG',
'formatter': 'simple',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'': {
'handlers': ['console'],
'level': 'DEBUG',
},
}
}
HOSTNAME = 'localhost:8000'
<commit_msg>Hide the django backend db log messages.<commit_after>import os
from job_runner.settings.base import *
DEBUG = True
TEMPLATE_DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(PROJECT_ROOT, '..', 'database.sqlite'),
}
}
JOB_RUNNER_WS_SERVER = 'ws://localhost:5000/'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'simple': {
'format': '%(levelname)s - %(asctime)s - %(name)s: %(message)s',
}
},
'handlers': {
'console': {
'level': 'DEBUG',
'formatter': 'simple',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django.db.backends': {
'handlers': ['console'],
'level': 'INFO',
'propagate': False,
},
'': {
'handlers': ['console'],
'level': 'DEBUG',
},
}
}
HOSTNAME = 'localhost:8000'
|
9de6036f722fb63378a74f08d7f05d184e1b175f | src/buildercore/concurrency.py | src/buildercore/concurrency.py | from . import bluegreen, context_handler
# TODO: move as buildercore.concurrency.concurrency_for
def concurrency_for(stackname, concurrency_name):
"""concurrency default is to perform updates one machine at a time.
Concurrency can be:
- serial: one at a time
- parallel: all together
- blue-green: 50% at a time"""
concurrency_names = ['serial', 'parallel', 'blue-green']
if concurrency_name == 'blue-green':
context = context_handler.load_context(stackname)
return bluegreen.BlueGreenConcurrency(context['aws']['region'])
if concurrency_name == 'serial' or concurrency_name == 'parallel':
# maybe return a fabric object in the future
return concurrency_name
if concurrency_name is None:
return 'parallel'
raise ValueError("Concurrency %s is not supported. Supported models: %s" % concurrency_name, concurrency_names)
| from . import bluegreen, context_handler
# TODO: move as buildercore.concurrency.concurrency_for
def concurrency_for(stackname, concurrency_name):
"""concurrency default is to perform updates one machine at a time.
Concurrency can be:
- serial: one at a time
- parallel: all together
- blue-green: 50% at a time"""
concurrency_names = ['serial', 'parallel', 'blue-green']
if concurrency_name == 'blue-green':
context = context_handler.load_context(stackname)
return bluegreen.BlueGreenConcurrency(context['aws']['region'])
if concurrency_name == 'serial' or concurrency_name == 'parallel':
# maybe return a fabric object in the future
return concurrency_name
if concurrency_name is None:
return 'parallel'
raise ValueError("Concurrency %s is not supported. Supported models: %s" % (concurrency_name, concurrency_names))
| Correct tuple for error message arguments | Correct tuple for error message arguments
| Python | mit | elifesciences/builder,elifesciences/builder | from . import bluegreen, context_handler
# TODO: move as buildercore.concurrency.concurrency_for
def concurrency_for(stackname, concurrency_name):
"""concurrency default is to perform updates one machine at a time.
Concurrency can be:
- serial: one at a time
- parallel: all together
- blue-green: 50% at a time"""
concurrency_names = ['serial', 'parallel', 'blue-green']
if concurrency_name == 'blue-green':
context = context_handler.load_context(stackname)
return bluegreen.BlueGreenConcurrency(context['aws']['region'])
if concurrency_name == 'serial' or concurrency_name == 'parallel':
# maybe return a fabric object in the future
return concurrency_name
if concurrency_name is None:
return 'parallel'
raise ValueError("Concurrency %s is not supported. Supported models: %s" % concurrency_name, concurrency_names)
Correct tuple for error message arguments | from . import bluegreen, context_handler
# TODO: move as buildercore.concurrency.concurrency_for
def concurrency_for(stackname, concurrency_name):
"""concurrency default is to perform updates one machine at a time.
Concurrency can be:
- serial: one at a time
- parallel: all together
- blue-green: 50% at a time"""
concurrency_names = ['serial', 'parallel', 'blue-green']
if concurrency_name == 'blue-green':
context = context_handler.load_context(stackname)
return bluegreen.BlueGreenConcurrency(context['aws']['region'])
if concurrency_name == 'serial' or concurrency_name == 'parallel':
# maybe return a fabric object in the future
return concurrency_name
if concurrency_name is None:
return 'parallel'
raise ValueError("Concurrency %s is not supported. Supported models: %s" % (concurrency_name, concurrency_names))
| <commit_before>from . import bluegreen, context_handler
# TODO: move as buildercore.concurrency.concurrency_for
def concurrency_for(stackname, concurrency_name):
"""concurrency default is to perform updates one machine at a time.
Concurrency can be:
- serial: one at a time
- parallel: all together
- blue-green: 50% at a time"""
concurrency_names = ['serial', 'parallel', 'blue-green']
if concurrency_name == 'blue-green':
context = context_handler.load_context(stackname)
return bluegreen.BlueGreenConcurrency(context['aws']['region'])
if concurrency_name == 'serial' or concurrency_name == 'parallel':
# maybe return a fabric object in the future
return concurrency_name
if concurrency_name is None:
return 'parallel'
raise ValueError("Concurrency %s is not supported. Supported models: %s" % concurrency_name, concurrency_names)
<commit_msg>Correct tuple for error message arguments<commit_after> | from . import bluegreen, context_handler
# TODO: move as buildercore.concurrency.concurrency_for
def concurrency_for(stackname, concurrency_name):
"""concurrency default is to perform updates one machine at a time.
Concurrency can be:
- serial: one at a time
- parallel: all together
- blue-green: 50% at a time"""
concurrency_names = ['serial', 'parallel', 'blue-green']
if concurrency_name == 'blue-green':
context = context_handler.load_context(stackname)
return bluegreen.BlueGreenConcurrency(context['aws']['region'])
if concurrency_name == 'serial' or concurrency_name == 'parallel':
# maybe return a fabric object in the future
return concurrency_name
if concurrency_name is None:
return 'parallel'
raise ValueError("Concurrency %s is not supported. Supported models: %s" % (concurrency_name, concurrency_names))
| from . import bluegreen, context_handler
# TODO: move as buildercore.concurrency.concurrency_for
def concurrency_for(stackname, concurrency_name):
"""concurrency default is to perform updates one machine at a time.
Concurrency can be:
- serial: one at a time
- parallel: all together
- blue-green: 50% at a time"""
concurrency_names = ['serial', 'parallel', 'blue-green']
if concurrency_name == 'blue-green':
context = context_handler.load_context(stackname)
return bluegreen.BlueGreenConcurrency(context['aws']['region'])
if concurrency_name == 'serial' or concurrency_name == 'parallel':
# maybe return a fabric object in the future
return concurrency_name
if concurrency_name is None:
return 'parallel'
raise ValueError("Concurrency %s is not supported. Supported models: %s" % concurrency_name, concurrency_names)
Correct tuple for error message argumentsfrom . import bluegreen, context_handler
# TODO: move as buildercore.concurrency.concurrency_for
def concurrency_for(stackname, concurrency_name):
"""concurrency default is to perform updates one machine at a time.
Concurrency can be:
- serial: one at a time
- parallel: all together
- blue-green: 50% at a time"""
concurrency_names = ['serial', 'parallel', 'blue-green']
if concurrency_name == 'blue-green':
context = context_handler.load_context(stackname)
return bluegreen.BlueGreenConcurrency(context['aws']['region'])
if concurrency_name == 'serial' or concurrency_name == 'parallel':
# maybe return a fabric object in the future
return concurrency_name
if concurrency_name is None:
return 'parallel'
raise ValueError("Concurrency %s is not supported. Supported models: %s" % (concurrency_name, concurrency_names))
| <commit_before>from . import bluegreen, context_handler
# TODO: move as buildercore.concurrency.concurrency_for
def concurrency_for(stackname, concurrency_name):
"""concurrency default is to perform updates one machine at a time.
Concurrency can be:
- serial: one at a time
- parallel: all together
- blue-green: 50% at a time"""
concurrency_names = ['serial', 'parallel', 'blue-green']
if concurrency_name == 'blue-green':
context = context_handler.load_context(stackname)
return bluegreen.BlueGreenConcurrency(context['aws']['region'])
if concurrency_name == 'serial' or concurrency_name == 'parallel':
# maybe return a fabric object in the future
return concurrency_name
if concurrency_name is None:
return 'parallel'
raise ValueError("Concurrency %s is not supported. Supported models: %s" % concurrency_name, concurrency_names)
<commit_msg>Correct tuple for error message arguments<commit_after>from . import bluegreen, context_handler
# TODO: move as buildercore.concurrency.concurrency_for
def concurrency_for(stackname, concurrency_name):
"""concurrency default is to perform updates one machine at a time.
Concurrency can be:
- serial: one at a time
- parallel: all together
- blue-green: 50% at a time"""
concurrency_names = ['serial', 'parallel', 'blue-green']
if concurrency_name == 'blue-green':
context = context_handler.load_context(stackname)
return bluegreen.BlueGreenConcurrency(context['aws']['region'])
if concurrency_name == 'serial' or concurrency_name == 'parallel':
# maybe return a fabric object in the future
return concurrency_name
if concurrency_name is None:
return 'parallel'
raise ValueError("Concurrency %s is not supported. Supported models: %s" % (concurrency_name, concurrency_names))
|
29d6010c18605179860afe08f5cac218e1a65716 | dbaas/workflow/steps/util/resize/check_database_status.py | dbaas/workflow/steps/util/resize/check_database_status.py | # -*- coding: utf-8 -*-
import logging
from ...util.base import BaseStep
LOG = logging.getLogger(__name__)
class CheckDatabaseStatus(BaseStep):
def __unicode__(self):
return "Checking database status..."
def do(self, workflow_dict):
try:
if not 'database' in workflow_dict:
return False
if not 'databaseinfra' in workflow_dict:
workflow_dict['databaseinfra'] = workflow_dict['database'].databaseinfra
LOG.info("Getting driver class")
driver = workflow_dict['databaseinfra'].get_driver()
if driver.check_status():
LOG.info("Database is ok...")
workflow_dict['database'].status=1
workflow_dict['database'].save()
return True
return False
except Exception,e:
LOG.info("Error: {}".format(e))
pass
def undo(self, workflow_dict):
LOG.info("Nothing to do here...")
return True
| # -*- coding: utf-8 -*-
import logging
from ...util.base import BaseStep
LOG = logging.getLogger(__name__)
class CheckDatabaseStatus(BaseStep):
def __unicode__(self):
return "Checking database status..."
def do(self, workflow_dict):
try:
if not 'database' in workflow_dict:
return False
if not 'databaseinfra' in workflow_dict:
workflow_dict['databaseinfra'] = workflow_dict['database'].databaseinfra
LOG.info("Getting driver class")
driver = workflow_dict['databaseinfra'].get_driver()
from time import sleep
sleep(60)
if driver.check_status():
LOG.info("Database is ok...")
workflow_dict['database'].status=1
workflow_dict['database'].save()
return True
return False
except Exception,e:
LOG.info("Error: {}".format(e))
pass
def undo(self, workflow_dict):
LOG.info("Nothing to do here...")
return True
| Change check_db connection resize step to wait database start process | Change check_db connection resize step to wait database start process
| Python | bsd-3-clause | globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service | # -*- coding: utf-8 -*-
import logging
from ...util.base import BaseStep
LOG = logging.getLogger(__name__)
class CheckDatabaseStatus(BaseStep):
def __unicode__(self):
return "Checking database status..."
def do(self, workflow_dict):
try:
if not 'database' in workflow_dict:
return False
if not 'databaseinfra' in workflow_dict:
workflow_dict['databaseinfra'] = workflow_dict['database'].databaseinfra
LOG.info("Getting driver class")
driver = workflow_dict['databaseinfra'].get_driver()
if driver.check_status():
LOG.info("Database is ok...")
workflow_dict['database'].status=1
workflow_dict['database'].save()
return True
return False
except Exception,e:
LOG.info("Error: {}".format(e))
pass
def undo(self, workflow_dict):
LOG.info("Nothing to do here...")
return True
Change check_db connection resize step to wait database start process | # -*- coding: utf-8 -*-
import logging
from ...util.base import BaseStep
LOG = logging.getLogger(__name__)
class CheckDatabaseStatus(BaseStep):
def __unicode__(self):
return "Checking database status..."
def do(self, workflow_dict):
try:
if not 'database' in workflow_dict:
return False
if not 'databaseinfra' in workflow_dict:
workflow_dict['databaseinfra'] = workflow_dict['database'].databaseinfra
LOG.info("Getting driver class")
driver = workflow_dict['databaseinfra'].get_driver()
from time import sleep
sleep(60)
if driver.check_status():
LOG.info("Database is ok...")
workflow_dict['database'].status=1
workflow_dict['database'].save()
return True
return False
except Exception,e:
LOG.info("Error: {}".format(e))
pass
def undo(self, workflow_dict):
LOG.info("Nothing to do here...")
return True
| <commit_before># -*- coding: utf-8 -*-
import logging
from ...util.base import BaseStep
LOG = logging.getLogger(__name__)
class CheckDatabaseStatus(BaseStep):
def __unicode__(self):
return "Checking database status..."
def do(self, workflow_dict):
try:
if not 'database' in workflow_dict:
return False
if not 'databaseinfra' in workflow_dict:
workflow_dict['databaseinfra'] = workflow_dict['database'].databaseinfra
LOG.info("Getting driver class")
driver = workflow_dict['databaseinfra'].get_driver()
if driver.check_status():
LOG.info("Database is ok...")
workflow_dict['database'].status=1
workflow_dict['database'].save()
return True
return False
except Exception,e:
LOG.info("Error: {}".format(e))
pass
def undo(self, workflow_dict):
LOG.info("Nothing to do here...")
return True
<commit_msg>Change check_db connection resize step to wait database start process<commit_after> | # -*- coding: utf-8 -*-
import logging
from ...util.base import BaseStep
LOG = logging.getLogger(__name__)
class CheckDatabaseStatus(BaseStep):
def __unicode__(self):
return "Checking database status..."
def do(self, workflow_dict):
try:
if not 'database' in workflow_dict:
return False
if not 'databaseinfra' in workflow_dict:
workflow_dict['databaseinfra'] = workflow_dict['database'].databaseinfra
LOG.info("Getting driver class")
driver = workflow_dict['databaseinfra'].get_driver()
from time import sleep
sleep(60)
if driver.check_status():
LOG.info("Database is ok...")
workflow_dict['database'].status=1
workflow_dict['database'].save()
return True
return False
except Exception,e:
LOG.info("Error: {}".format(e))
pass
def undo(self, workflow_dict):
LOG.info("Nothing to do here...")
return True
| # -*- coding: utf-8 -*-
import logging
from ...util.base import BaseStep
LOG = logging.getLogger(__name__)
class CheckDatabaseStatus(BaseStep):
def __unicode__(self):
return "Checking database status..."
def do(self, workflow_dict):
try:
if not 'database' in workflow_dict:
return False
if not 'databaseinfra' in workflow_dict:
workflow_dict['databaseinfra'] = workflow_dict['database'].databaseinfra
LOG.info("Getting driver class")
driver = workflow_dict['databaseinfra'].get_driver()
if driver.check_status():
LOG.info("Database is ok...")
workflow_dict['database'].status=1
workflow_dict['database'].save()
return True
return False
except Exception,e:
LOG.info("Error: {}".format(e))
pass
def undo(self, workflow_dict):
LOG.info("Nothing to do here...")
return True
Change check_db connection resize step to wait database start process# -*- coding: utf-8 -*-
import logging
from ...util.base import BaseStep
LOG = logging.getLogger(__name__)
class CheckDatabaseStatus(BaseStep):
def __unicode__(self):
return "Checking database status..."
def do(self, workflow_dict):
try:
if not 'database' in workflow_dict:
return False
if not 'databaseinfra' in workflow_dict:
workflow_dict['databaseinfra'] = workflow_dict['database'].databaseinfra
LOG.info("Getting driver class")
driver = workflow_dict['databaseinfra'].get_driver()
from time import sleep
sleep(60)
if driver.check_status():
LOG.info("Database is ok...")
workflow_dict['database'].status=1
workflow_dict['database'].save()
return True
return False
except Exception,e:
LOG.info("Error: {}".format(e))
pass
def undo(self, workflow_dict):
LOG.info("Nothing to do here...")
return True
| <commit_before># -*- coding: utf-8 -*-
import logging
from ...util.base import BaseStep
LOG = logging.getLogger(__name__)
class CheckDatabaseStatus(BaseStep):
def __unicode__(self):
return "Checking database status..."
def do(self, workflow_dict):
try:
if not 'database' in workflow_dict:
return False
if not 'databaseinfra' in workflow_dict:
workflow_dict['databaseinfra'] = workflow_dict['database'].databaseinfra
LOG.info("Getting driver class")
driver = workflow_dict['databaseinfra'].get_driver()
if driver.check_status():
LOG.info("Database is ok...")
workflow_dict['database'].status=1
workflow_dict['database'].save()
return True
return False
except Exception,e:
LOG.info("Error: {}".format(e))
pass
def undo(self, workflow_dict):
LOG.info("Nothing to do here...")
return True
<commit_msg>Change check_db connection resize step to wait database start process<commit_after># -*- coding: utf-8 -*-
import logging
from ...util.base import BaseStep
LOG = logging.getLogger(__name__)
class CheckDatabaseStatus(BaseStep):
def __unicode__(self):
return "Checking database status..."
def do(self, workflow_dict):
try:
if not 'database' in workflow_dict:
return False
if not 'databaseinfra' in workflow_dict:
workflow_dict['databaseinfra'] = workflow_dict['database'].databaseinfra
LOG.info("Getting driver class")
driver = workflow_dict['databaseinfra'].get_driver()
from time import sleep
sleep(60)
if driver.check_status():
LOG.info("Database is ok...")
workflow_dict['database'].status=1
workflow_dict['database'].save()
return True
return False
except Exception,e:
LOG.info("Error: {}".format(e))
pass
def undo(self, workflow_dict):
LOG.info("Nothing to do here...")
return True
|
08167d79efb97a0728564caf96a96b08763ddf60 | bibliopixel/util/importer.py | bibliopixel/util/importer.py | import importlib
def import_symbol(typename):
"""Import a module or typename within a module from its name."""
try:
return importlib.import_module(typename)
except ImportError as e:
parts = typename.split('.')
if len(parts) > 1:
typename = parts.pop()
# Call import_module recursively.
namespace = import_symbol('.'.join(parts))
try:
return getattr(namespace, typename)
except AttributeError:
pass
raise e
except:
raise
def make_object(*args, typename, **kwds):
"""Make an object from a symbol."""
return import_symbol(typename)(*args, **kwds)
| import importlib
def import_symbol(typename, package=None):
"""Import a module or typename within a module from its name."""
try:
return importlib.import_module(typename, package=package)
except ImportError as e:
parts = typename.split('.')
if len(parts) > 1:
typename = parts.pop()
# Call import_module recursively.
namespace = import_symbol('.'.join(parts), package=package)
try:
return getattr(namespace, typename)
except AttributeError:
pass
raise e
except:
raise
def make_object(*args, typename, package=None, **kwds):
"""Make an object from a symbol."""
return import_symbol(typename, package)(*args, **kwds)
| Add a package argument to import_symbol. | Add a package argument to import_symbol.
| Python | mit | ManiacalLabs/BiblioPixel,rec/BiblioPixel,rec/BiblioPixel,rec/BiblioPixel,rec/BiblioPixel,ManiacalLabs/BiblioPixel,ManiacalLabs/BiblioPixel,ManiacalLabs/BiblioPixel | import importlib
def import_symbol(typename):
"""Import a module or typename within a module from its name."""
try:
return importlib.import_module(typename)
except ImportError as e:
parts = typename.split('.')
if len(parts) > 1:
typename = parts.pop()
# Call import_module recursively.
namespace = import_symbol('.'.join(parts))
try:
return getattr(namespace, typename)
except AttributeError:
pass
raise e
except:
raise
def make_object(*args, typename, **kwds):
"""Make an object from a symbol."""
return import_symbol(typename)(*args, **kwds)
Add a package argument to import_symbol. | import importlib
def import_symbol(typename, package=None):
"""Import a module or typename within a module from its name."""
try:
return importlib.import_module(typename, package=package)
except ImportError as e:
parts = typename.split('.')
if len(parts) > 1:
typename = parts.pop()
# Call import_module recursively.
namespace = import_symbol('.'.join(parts), package=package)
try:
return getattr(namespace, typename)
except AttributeError:
pass
raise e
except:
raise
def make_object(*args, typename, package=None, **kwds):
"""Make an object from a symbol."""
return import_symbol(typename, package)(*args, **kwds)
| <commit_before>import importlib
def import_symbol(typename):
"""Import a module or typename within a module from its name."""
try:
return importlib.import_module(typename)
except ImportError as e:
parts = typename.split('.')
if len(parts) > 1:
typename = parts.pop()
# Call import_module recursively.
namespace = import_symbol('.'.join(parts))
try:
return getattr(namespace, typename)
except AttributeError:
pass
raise e
except:
raise
def make_object(*args, typename, **kwds):
"""Make an object from a symbol."""
return import_symbol(typename)(*args, **kwds)
<commit_msg>Add a package argument to import_symbol.<commit_after> | import importlib
def import_symbol(typename, package=None):
"""Import a module or typename within a module from its name."""
try:
return importlib.import_module(typename, package=package)
except ImportError as e:
parts = typename.split('.')
if len(parts) > 1:
typename = parts.pop()
# Call import_module recursively.
namespace = import_symbol('.'.join(parts), package=package)
try:
return getattr(namespace, typename)
except AttributeError:
pass
raise e
except:
raise
def make_object(*args, typename, package=None, **kwds):
"""Make an object from a symbol."""
return import_symbol(typename, package)(*args, **kwds)
| import importlib
def import_symbol(typename):
"""Import a module or typename within a module from its name."""
try:
return importlib.import_module(typename)
except ImportError as e:
parts = typename.split('.')
if len(parts) > 1:
typename = parts.pop()
# Call import_module recursively.
namespace = import_symbol('.'.join(parts))
try:
return getattr(namespace, typename)
except AttributeError:
pass
raise e
except:
raise
def make_object(*args, typename, **kwds):
"""Make an object from a symbol."""
return import_symbol(typename)(*args, **kwds)
Add a package argument to import_symbol.import importlib
def import_symbol(typename, package=None):
"""Import a module or typename within a module from its name."""
try:
return importlib.import_module(typename, package=package)
except ImportError as e:
parts = typename.split('.')
if len(parts) > 1:
typename = parts.pop()
# Call import_module recursively.
namespace = import_symbol('.'.join(parts), package=package)
try:
return getattr(namespace, typename)
except AttributeError:
pass
raise e
except:
raise
def make_object(*args, typename, package=None, **kwds):
"""Make an object from a symbol."""
return import_symbol(typename, package)(*args, **kwds)
| <commit_before>import importlib
def import_symbol(typename):
"""Import a module or typename within a module from its name."""
try:
return importlib.import_module(typename)
except ImportError as e:
parts = typename.split('.')
if len(parts) > 1:
typename = parts.pop()
# Call import_module recursively.
namespace = import_symbol('.'.join(parts))
try:
return getattr(namespace, typename)
except AttributeError:
pass
raise e
except:
raise
def make_object(*args, typename, **kwds):
"""Make an object from a symbol."""
return import_symbol(typename)(*args, **kwds)
<commit_msg>Add a package argument to import_symbol.<commit_after>import importlib
def import_symbol(typename, package=None):
"""Import a module or typename within a module from its name."""
try:
return importlib.import_module(typename, package=package)
except ImportError as e:
parts = typename.split('.')
if len(parts) > 1:
typename = parts.pop()
# Call import_module recursively.
namespace = import_symbol('.'.join(parts), package=package)
try:
return getattr(namespace, typename)
except AttributeError:
pass
raise e
except:
raise
def make_object(*args, typename, package=None, **kwds):
"""Make an object from a symbol."""
return import_symbol(typename, package)(*args, **kwds)
|
5c7f6f90f4ae88e70c738ab13170ad1c64447db3 | xfdf2ascii.py | xfdf2ascii.py | #!/usr/bin/env python
from lxml import etree
import sys
comments_file = open("examples/Technical_Document_Comments.xfdf", "r")
comments_xml = etree.parse(comments_file)
root = comments_xml.getroot()
prefix = None
try:
prefix = root.tag.partition("}")[0].partition("{")[-1]
except:
pass
if prefix:
highlights = root[0].findall("{%s}highlight" % prefix)
else:
highlights = root[0].findall("highlight")
with sys.stdout as out:
line = u"\tIssue\tSection\tPage\tBy\tObservation Description\n".encode("utf-8")
out.write(line)
issue = 1
for h in highlights:
try:
page = h.get("page")
except:
continue
try:
author = h.get("title")
except:
continue
try:
content = h.find("{%s}contents" % prefix).text
except:
continue
content = content.replace("\n","-").replace("\r","")
line = u"NA\t{2}\tSECTION\t{0}\t{3}\t{1}\n".format(page,content,issue,author).encode("utf-8")
out.write(line)
issue += 1
| #!/usr/bin/env python
from lxml import etree
import sys
comments_file = open("examples/Technical_Document_Comments.xfdf", "r")
comments_xml = etree.parse(comments_file)
root = comments_xml.getroot()
prefix = None
try:
prefix = root.tag.partition("}")[0].partition("{")[-1]
except:
pass
if prefix:
highlights = root[0].findall("{%s}highlight" % prefix)
else:
highlights = root[0].findall("highlight")
with sys.stdout as out:
line = u"\tIssue\tSection\tPage\tBy\tObservation Description\n".encode("utf-8")
out.write(line)
issue = 1
for h in highlights:
try:
page = int(h.get("page"))+1
except:
continue
try:
author = h.get("title")
except:
continue
try:
content = h.find("{%s}contents" % prefix).text
except:
continue
content = content.replace("\n","-").replace("\r","")
line = u"NA\t{2}\tSECTION\t{0}\t{3}\t{1}\n".format(page,content,issue,author).encode("utf-8")
out.write(line)
issue += 1
| Correct the off-by one error from logical to physical pages | Correct the off-by one error from logical to physical pages
| Python | mit | juandesant/xfdf2ascii | #!/usr/bin/env python
from lxml import etree
import sys
comments_file = open("examples/Technical_Document_Comments.xfdf", "r")
comments_xml = etree.parse(comments_file)
root = comments_xml.getroot()
prefix = None
try:
prefix = root.tag.partition("}")[0].partition("{")[-1]
except:
pass
if prefix:
highlights = root[0].findall("{%s}highlight" % prefix)
else:
highlights = root[0].findall("highlight")
with sys.stdout as out:
line = u"\tIssue\tSection\tPage\tBy\tObservation Description\n".encode("utf-8")
out.write(line)
issue = 1
for h in highlights:
try:
page = h.get("page")
except:
continue
try:
author = h.get("title")
except:
continue
try:
content = h.find("{%s}contents" % prefix).text
except:
continue
content = content.replace("\n","-").replace("\r","")
line = u"NA\t{2}\tSECTION\t{0}\t{3}\t{1}\n".format(page,content,issue,author).encode("utf-8")
out.write(line)
issue += 1
Correct the off-by one error from logical to physical pages | #!/usr/bin/env python
from lxml import etree
import sys
comments_file = open("examples/Technical_Document_Comments.xfdf", "r")
comments_xml = etree.parse(comments_file)
root = comments_xml.getroot()
prefix = None
try:
prefix = root.tag.partition("}")[0].partition("{")[-1]
except:
pass
if prefix:
highlights = root[0].findall("{%s}highlight" % prefix)
else:
highlights = root[0].findall("highlight")
with sys.stdout as out:
line = u"\tIssue\tSection\tPage\tBy\tObservation Description\n".encode("utf-8")
out.write(line)
issue = 1
for h in highlights:
try:
page = int(h.get("page"))+1
except:
continue
try:
author = h.get("title")
except:
continue
try:
content = h.find("{%s}contents" % prefix).text
except:
continue
content = content.replace("\n","-").replace("\r","")
line = u"NA\t{2}\tSECTION\t{0}\t{3}\t{1}\n".format(page,content,issue,author).encode("utf-8")
out.write(line)
issue += 1
| <commit_before>#!/usr/bin/env python
from lxml import etree
import sys
comments_file = open("examples/Technical_Document_Comments.xfdf", "r")
comments_xml = etree.parse(comments_file)
root = comments_xml.getroot()
prefix = None
try:
prefix = root.tag.partition("}")[0].partition("{")[-1]
except:
pass
if prefix:
highlights = root[0].findall("{%s}highlight" % prefix)
else:
highlights = root[0].findall("highlight")
with sys.stdout as out:
line = u"\tIssue\tSection\tPage\tBy\tObservation Description\n".encode("utf-8")
out.write(line)
issue = 1
for h in highlights:
try:
page = h.get("page")
except:
continue
try:
author = h.get("title")
except:
continue
try:
content = h.find("{%s}contents" % prefix).text
except:
continue
content = content.replace("\n","-").replace("\r","")
line = u"NA\t{2}\tSECTION\t{0}\t{3}\t{1}\n".format(page,content,issue,author).encode("utf-8")
out.write(line)
issue += 1
<commit_msg>Correct the off-by one error from logical to physical pages<commit_after> | #!/usr/bin/env python
from lxml import etree
import sys
comments_file = open("examples/Technical_Document_Comments.xfdf", "r")
comments_xml = etree.parse(comments_file)
root = comments_xml.getroot()
prefix = None
try:
prefix = root.tag.partition("}")[0].partition("{")[-1]
except:
pass
if prefix:
highlights = root[0].findall("{%s}highlight" % prefix)
else:
highlights = root[0].findall("highlight")
with sys.stdout as out:
line = u"\tIssue\tSection\tPage\tBy\tObservation Description\n".encode("utf-8")
out.write(line)
issue = 1
for h in highlights:
try:
page = int(h.get("page"))+1
except:
continue
try:
author = h.get("title")
except:
continue
try:
content = h.find("{%s}contents" % prefix).text
except:
continue
content = content.replace("\n","-").replace("\r","")
line = u"NA\t{2}\tSECTION\t{0}\t{3}\t{1}\n".format(page,content,issue,author).encode("utf-8")
out.write(line)
issue += 1
| #!/usr/bin/env python
from lxml import etree
import sys
comments_file = open("examples/Technical_Document_Comments.xfdf", "r")
comments_xml = etree.parse(comments_file)
root = comments_xml.getroot()
prefix = None
try:
prefix = root.tag.partition("}")[0].partition("{")[-1]
except:
pass
if prefix:
highlights = root[0].findall("{%s}highlight" % prefix)
else:
highlights = root[0].findall("highlight")
with sys.stdout as out:
line = u"\tIssue\tSection\tPage\tBy\tObservation Description\n".encode("utf-8")
out.write(line)
issue = 1
for h in highlights:
try:
page = h.get("page")
except:
continue
try:
author = h.get("title")
except:
continue
try:
content = h.find("{%s}contents" % prefix).text
except:
continue
content = content.replace("\n","-").replace("\r","")
line = u"NA\t{2}\tSECTION\t{0}\t{3}\t{1}\n".format(page,content,issue,author).encode("utf-8")
out.write(line)
issue += 1
Correct the off-by one error from logical to physical pages#!/usr/bin/env python
from lxml import etree
import sys
comments_file = open("examples/Technical_Document_Comments.xfdf", "r")
comments_xml = etree.parse(comments_file)
root = comments_xml.getroot()
prefix = None
try:
prefix = root.tag.partition("}")[0].partition("{")[-1]
except:
pass
if prefix:
highlights = root[0].findall("{%s}highlight" % prefix)
else:
highlights = root[0].findall("highlight")
with sys.stdout as out:
line = u"\tIssue\tSection\tPage\tBy\tObservation Description\n".encode("utf-8")
out.write(line)
issue = 1
for h in highlights:
try:
page = int(h.get("page"))+1
except:
continue
try:
author = h.get("title")
except:
continue
try:
content = h.find("{%s}contents" % prefix).text
except:
continue
content = content.replace("\n","-").replace("\r","")
line = u"NA\t{2}\tSECTION\t{0}\t{3}\t{1}\n".format(page,content,issue,author).encode("utf-8")
out.write(line)
issue += 1
| <commit_before>#!/usr/bin/env python
from lxml import etree
import sys
comments_file = open("examples/Technical_Document_Comments.xfdf", "r")
comments_xml = etree.parse(comments_file)
root = comments_xml.getroot()
prefix = None
try:
prefix = root.tag.partition("}")[0].partition("{")[-1]
except:
pass
if prefix:
highlights = root[0].findall("{%s}highlight" % prefix)
else:
highlights = root[0].findall("highlight")
with sys.stdout as out:
line = u"\tIssue\tSection\tPage\tBy\tObservation Description\n".encode("utf-8")
out.write(line)
issue = 1
for h in highlights:
try:
page = h.get("page")
except:
continue
try:
author = h.get("title")
except:
continue
try:
content = h.find("{%s}contents" % prefix).text
except:
continue
content = content.replace("\n","-").replace("\r","")
line = u"NA\t{2}\tSECTION\t{0}\t{3}\t{1}\n".format(page,content,issue,author).encode("utf-8")
out.write(line)
issue += 1
<commit_msg>Correct the off-by one error from logical to physical pages<commit_after>#!/usr/bin/env python
from lxml import etree
import sys
comments_file = open("examples/Technical_Document_Comments.xfdf", "r")
comments_xml = etree.parse(comments_file)
root = comments_xml.getroot()
prefix = None
try:
prefix = root.tag.partition("}")[0].partition("{")[-1]
except:
pass
if prefix:
highlights = root[0].findall("{%s}highlight" % prefix)
else:
highlights = root[0].findall("highlight")
with sys.stdout as out:
line = u"\tIssue\tSection\tPage\tBy\tObservation Description\n".encode("utf-8")
out.write(line)
issue = 1
for h in highlights:
try:
page = int(h.get("page"))+1
except:
continue
try:
author = h.get("title")
except:
continue
try:
content = h.find("{%s}contents" % prefix).text
except:
continue
content = content.replace("\n","-").replace("\r","")
line = u"NA\t{2}\tSECTION\t{0}\t{3}\t{1}\n".format(page,content,issue,author).encode("utf-8")
out.write(line)
issue += 1
|
a5a92b81244076e8cf04c06398ce63a87d1357dd | adhocracy/tests/test_doctest_files.py | adhocracy/tests/test_doctest_files.py | from glob import glob
import doctest
from doctest import DocFileSuite
from os.path import dirname
import unittest
from adhocracy.tests.testbrowser import ADHOCRACY_LAYER, ADHOCRACY_LAYER_APP
from adhocracy.tests.testbrowser import app_url, instance_url
from adhocracy.tests.testbrowser import Browser
def find_use_cases():
here = dirname(__file__)
paths = glob('{here}/use_cases/*.rst'.format(here=here))
# we need relative paths for DocFileSuite
pathes = [path.replace(here, '.') for path in paths]
return pathes
def make_browser():
return Browser(wsgi_app=ADHOCRACY_LAYER_APP)
flags = (doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE)
globs = {"browser": make_browser(),
'make_browser': make_browser,
"app": ADHOCRACY_LAYER_APP,
"app_url": app_url,
"instance_url": instance_url
}
use_cases = find_use_cases()
class DoctestTestCase(unittest.TestCase):
def __new__(self, test):
return getattr(self, test)()
@classmethod
def test_suite(self):
return DocFileSuite(
*use_cases,
#add here aditional testfiles
setUp=ADHOCRACY_LAYER.setUp,
tearDown=ADHOCRACY_LAYER.tearDown,
globs=globs,
optionflags=flags
)
| from glob import glob
import doctest
from doctest import DocFileSuite
from os.path import dirname
import unittest
from adhocracy import model
from adhocracy.tests import testtools
from adhocracy.tests.testbrowser import ADHOCRACY_LAYER, ADHOCRACY_LAYER_APP
from adhocracy.tests.testbrowser import app_url, instance_url
from adhocracy.tests.testbrowser import Browser
def find_use_cases():
here = dirname(__file__)
paths = glob('{here}/use_cases/*.rst'.format(here=here))
# we need relative paths for DocFileSuite
pathes = [path.replace(here, '.') for path in paths]
return pathes
def make_browser():
return Browser(wsgi_app=ADHOCRACY_LAYER_APP)
use_cases = find_use_cases()
flags = (doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE)
globs = {"browser": make_browser(),
'make_browser': make_browser,
"app": ADHOCRACY_LAYER_APP,
"app_url": app_url,
"instance_url": instance_url,
'testtools': testtools,
'model': model
}
class DoctestTestCase(unittest.TestCase):
def __new__(self, test):
return getattr(self, test)()
@classmethod
def test_suite(self):
return DocFileSuite(
*use_cases,
#add here aditional testfiles
setUp=ADHOCRACY_LAYER.setUp,
tearDown=ADHOCRACY_LAYER.tearDown,
globs=globs,
optionflags=flags
)
| Add the modules models and testtools to the doctest globals | Add the modules models and testtools to the doctest globals
| Python | agpl-3.0 | SysTheron/adhocracy,DanielNeugebauer/adhocracy,alkadis/vcv,alkadis/vcv,alkadis/vcv,SysTheron/adhocracy,DanielNeugebauer/adhocracy,DanielNeugebauer/adhocracy,liqd/adhocracy,phihag/adhocracy,liqd/adhocracy,phihag/adhocracy,SysTheron/adhocracy,liqd/adhocracy,phihag/adhocracy,DanielNeugebauer/adhocracy,phihag/adhocracy,alkadis/vcv,phihag/adhocracy,alkadis/vcv,liqd/adhocracy,DanielNeugebauer/adhocracy | from glob import glob
import doctest
from doctest import DocFileSuite
from os.path import dirname
import unittest
from adhocracy.tests.testbrowser import ADHOCRACY_LAYER, ADHOCRACY_LAYER_APP
from adhocracy.tests.testbrowser import app_url, instance_url
from adhocracy.tests.testbrowser import Browser
def find_use_cases():
here = dirname(__file__)
paths = glob('{here}/use_cases/*.rst'.format(here=here))
# we need relative paths for DocFileSuite
pathes = [path.replace(here, '.') for path in paths]
return pathes
def make_browser():
return Browser(wsgi_app=ADHOCRACY_LAYER_APP)
flags = (doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE)
globs = {"browser": make_browser(),
'make_browser': make_browser,
"app": ADHOCRACY_LAYER_APP,
"app_url": app_url,
"instance_url": instance_url
}
use_cases = find_use_cases()
class DoctestTestCase(unittest.TestCase):
def __new__(self, test):
return getattr(self, test)()
@classmethod
def test_suite(self):
return DocFileSuite(
*use_cases,
#add here aditional testfiles
setUp=ADHOCRACY_LAYER.setUp,
tearDown=ADHOCRACY_LAYER.tearDown,
globs=globs,
optionflags=flags
)
Add the modules models and testtools to the doctest globals | from glob import glob
import doctest
from doctest import DocFileSuite
from os.path import dirname
import unittest
from adhocracy import model
from adhocracy.tests import testtools
from adhocracy.tests.testbrowser import ADHOCRACY_LAYER, ADHOCRACY_LAYER_APP
from adhocracy.tests.testbrowser import app_url, instance_url
from adhocracy.tests.testbrowser import Browser
def find_use_cases():
here = dirname(__file__)
paths = glob('{here}/use_cases/*.rst'.format(here=here))
# we need relative paths for DocFileSuite
pathes = [path.replace(here, '.') for path in paths]
return pathes
def make_browser():
return Browser(wsgi_app=ADHOCRACY_LAYER_APP)
use_cases = find_use_cases()
flags = (doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE)
globs = {"browser": make_browser(),
'make_browser': make_browser,
"app": ADHOCRACY_LAYER_APP,
"app_url": app_url,
"instance_url": instance_url,
'testtools': testtools,
'model': model
}
class DoctestTestCase(unittest.TestCase):
def __new__(self, test):
return getattr(self, test)()
@classmethod
def test_suite(self):
return DocFileSuite(
*use_cases,
#add here aditional testfiles
setUp=ADHOCRACY_LAYER.setUp,
tearDown=ADHOCRACY_LAYER.tearDown,
globs=globs,
optionflags=flags
)
| <commit_before>from glob import glob
import doctest
from doctest import DocFileSuite
from os.path import dirname
import unittest
from adhocracy.tests.testbrowser import ADHOCRACY_LAYER, ADHOCRACY_LAYER_APP
from adhocracy.tests.testbrowser import app_url, instance_url
from adhocracy.tests.testbrowser import Browser
def find_use_cases():
here = dirname(__file__)
paths = glob('{here}/use_cases/*.rst'.format(here=here))
# we need relative paths for DocFileSuite
pathes = [path.replace(here, '.') for path in paths]
return pathes
def make_browser():
return Browser(wsgi_app=ADHOCRACY_LAYER_APP)
flags = (doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE)
globs = {"browser": make_browser(),
'make_browser': make_browser,
"app": ADHOCRACY_LAYER_APP,
"app_url": app_url,
"instance_url": instance_url
}
use_cases = find_use_cases()
class DoctestTestCase(unittest.TestCase):
def __new__(self, test):
return getattr(self, test)()
@classmethod
def test_suite(self):
return DocFileSuite(
*use_cases,
#add here aditional testfiles
setUp=ADHOCRACY_LAYER.setUp,
tearDown=ADHOCRACY_LAYER.tearDown,
globs=globs,
optionflags=flags
)
<commit_msg>Add the modules models and testtools to the doctest globals<commit_after> | from glob import glob
import doctest
from doctest import DocFileSuite
from os.path import dirname
import unittest
from adhocracy import model
from adhocracy.tests import testtools
from adhocracy.tests.testbrowser import ADHOCRACY_LAYER, ADHOCRACY_LAYER_APP
from adhocracy.tests.testbrowser import app_url, instance_url
from adhocracy.tests.testbrowser import Browser
def find_use_cases():
here = dirname(__file__)
paths = glob('{here}/use_cases/*.rst'.format(here=here))
# we need relative paths for DocFileSuite
pathes = [path.replace(here, '.') for path in paths]
return pathes
def make_browser():
return Browser(wsgi_app=ADHOCRACY_LAYER_APP)
use_cases = find_use_cases()
flags = (doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE)
globs = {"browser": make_browser(),
'make_browser': make_browser,
"app": ADHOCRACY_LAYER_APP,
"app_url": app_url,
"instance_url": instance_url,
'testtools': testtools,
'model': model
}
class DoctestTestCase(unittest.TestCase):
def __new__(self, test):
return getattr(self, test)()
@classmethod
def test_suite(self):
return DocFileSuite(
*use_cases,
#add here aditional testfiles
setUp=ADHOCRACY_LAYER.setUp,
tearDown=ADHOCRACY_LAYER.tearDown,
globs=globs,
optionflags=flags
)
| from glob import glob
import doctest
from doctest import DocFileSuite
from os.path import dirname
import unittest
from adhocracy.tests.testbrowser import ADHOCRACY_LAYER, ADHOCRACY_LAYER_APP
from adhocracy.tests.testbrowser import app_url, instance_url
from adhocracy.tests.testbrowser import Browser
def find_use_cases():
here = dirname(__file__)
paths = glob('{here}/use_cases/*.rst'.format(here=here))
# we need relative paths for DocFileSuite
pathes = [path.replace(here, '.') for path in paths]
return pathes
def make_browser():
return Browser(wsgi_app=ADHOCRACY_LAYER_APP)
flags = (doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE)
globs = {"browser": make_browser(),
'make_browser': make_browser,
"app": ADHOCRACY_LAYER_APP,
"app_url": app_url,
"instance_url": instance_url
}
use_cases = find_use_cases()
class DoctestTestCase(unittest.TestCase):
def __new__(self, test):
return getattr(self, test)()
@classmethod
def test_suite(self):
return DocFileSuite(
*use_cases,
#add here aditional testfiles
setUp=ADHOCRACY_LAYER.setUp,
tearDown=ADHOCRACY_LAYER.tearDown,
globs=globs,
optionflags=flags
)
Add the modules models and testtools to the doctest globalsfrom glob import glob
import doctest
from doctest import DocFileSuite
from os.path import dirname
import unittest
from adhocracy import model
from adhocracy.tests import testtools
from adhocracy.tests.testbrowser import ADHOCRACY_LAYER, ADHOCRACY_LAYER_APP
from adhocracy.tests.testbrowser import app_url, instance_url
from adhocracy.tests.testbrowser import Browser
def find_use_cases():
here = dirname(__file__)
paths = glob('{here}/use_cases/*.rst'.format(here=here))
# we need relative paths for DocFileSuite
pathes = [path.replace(here, '.') for path in paths]
return pathes
def make_browser():
return Browser(wsgi_app=ADHOCRACY_LAYER_APP)
use_cases = find_use_cases()
flags = (doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE)
globs = {"browser": make_browser(),
'make_browser': make_browser,
"app": ADHOCRACY_LAYER_APP,
"app_url": app_url,
"instance_url": instance_url,
'testtools': testtools,
'model': model
}
class DoctestTestCase(unittest.TestCase):
def __new__(self, test):
return getattr(self, test)()
@classmethod
def test_suite(self):
return DocFileSuite(
*use_cases,
#add here aditional testfiles
setUp=ADHOCRACY_LAYER.setUp,
tearDown=ADHOCRACY_LAYER.tearDown,
globs=globs,
optionflags=flags
)
| <commit_before>from glob import glob
import doctest
from doctest import DocFileSuite
from os.path import dirname
import unittest
from adhocracy.tests.testbrowser import ADHOCRACY_LAYER, ADHOCRACY_LAYER_APP
from adhocracy.tests.testbrowser import app_url, instance_url
from adhocracy.tests.testbrowser import Browser
def find_use_cases():
here = dirname(__file__)
paths = glob('{here}/use_cases/*.rst'.format(here=here))
# we need relative paths for DocFileSuite
pathes = [path.replace(here, '.') for path in paths]
return pathes
def make_browser():
return Browser(wsgi_app=ADHOCRACY_LAYER_APP)
flags = (doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE)
globs = {"browser": make_browser(),
'make_browser': make_browser,
"app": ADHOCRACY_LAYER_APP,
"app_url": app_url,
"instance_url": instance_url
}
use_cases = find_use_cases()
class DoctestTestCase(unittest.TestCase):
def __new__(self, test):
return getattr(self, test)()
@classmethod
def test_suite(self):
return DocFileSuite(
*use_cases,
#add here aditional testfiles
setUp=ADHOCRACY_LAYER.setUp,
tearDown=ADHOCRACY_LAYER.tearDown,
globs=globs,
optionflags=flags
)
<commit_msg>Add the modules models and testtools to the doctest globals<commit_after>from glob import glob
import doctest
from doctest import DocFileSuite
from os.path import dirname
import unittest
from adhocracy import model
from adhocracy.tests import testtools
from adhocracy.tests.testbrowser import ADHOCRACY_LAYER, ADHOCRACY_LAYER_APP
from adhocracy.tests.testbrowser import app_url, instance_url
from adhocracy.tests.testbrowser import Browser
def find_use_cases():
here = dirname(__file__)
paths = glob('{here}/use_cases/*.rst'.format(here=here))
# we need relative paths for DocFileSuite
pathes = [path.replace(here, '.') for path in paths]
return pathes
def make_browser():
return Browser(wsgi_app=ADHOCRACY_LAYER_APP)
use_cases = find_use_cases()
flags = (doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE)
globs = {"browser": make_browser(),
'make_browser': make_browser,
"app": ADHOCRACY_LAYER_APP,
"app_url": app_url,
"instance_url": instance_url,
'testtools': testtools,
'model': model
}
class DoctestTestCase(unittest.TestCase):
def __new__(self, test):
return getattr(self, test)()
@classmethod
def test_suite(self):
return DocFileSuite(
*use_cases,
#add here aditional testfiles
setUp=ADHOCRACY_LAYER.setUp,
tearDown=ADHOCRACY_LAYER.tearDown,
globs=globs,
optionflags=flags
)
|
1ed6c3f6d79aca5d647e8ff8332096c4fc111548 | neuroimaging/utils/tests/test_utils.py | neuroimaging/utils/tests/test_utils.py | from numpy.testing import NumpyTest, NumpyTestCase
class test_Template(NumpyTestCase):
def setUp(self): pass
#print "TestCase initialization..."
def test_foo(self): pass
#print "testing foo"
def test_bar(self): pass
#print "testing bar"
if __name__ == '__main__':
NumpyTest.main()
| from numpy.testing import NumpyTest, NumpyTestCase
class test_Template(NumpyTestCase):
def setUp(self):
pass
#print "TestCase initialization..."
def test_foo(self):
self.fail('neuroimaging.utils, odict, path, etc... have _NO_ tests!')
if __name__ == '__main__':
NumpyTest().run()
| Fix test example so it runs. | BUG: Fix test example so it runs. | Python | bsd-3-clause | yarikoptic/NiPy-OLD,yarikoptic/NiPy-OLD | from numpy.testing import NumpyTest, NumpyTestCase
class test_Template(NumpyTestCase):
def setUp(self): pass
#print "TestCase initialization..."
def test_foo(self): pass
#print "testing foo"
def test_bar(self): pass
#print "testing bar"
if __name__ == '__main__':
NumpyTest.main()
BUG: Fix test example so it runs. | from numpy.testing import NumpyTest, NumpyTestCase
class test_Template(NumpyTestCase):
def setUp(self):
pass
#print "TestCase initialization..."
def test_foo(self):
self.fail('neuroimaging.utils, odict, path, etc... have _NO_ tests!')
if __name__ == '__main__':
NumpyTest().run()
| <commit_before>from numpy.testing import NumpyTest, NumpyTestCase
class test_Template(NumpyTestCase):
def setUp(self): pass
#print "TestCase initialization..."
def test_foo(self): pass
#print "testing foo"
def test_bar(self): pass
#print "testing bar"
if __name__ == '__main__':
NumpyTest.main()
<commit_msg>BUG: Fix test example so it runs.<commit_after> | from numpy.testing import NumpyTest, NumpyTestCase
class test_Template(NumpyTestCase):
def setUp(self):
pass
#print "TestCase initialization..."
def test_foo(self):
self.fail('neuroimaging.utils, odict, path, etc... have _NO_ tests!')
if __name__ == '__main__':
NumpyTest().run()
| from numpy.testing import NumpyTest, NumpyTestCase
class test_Template(NumpyTestCase):
def setUp(self): pass
#print "TestCase initialization..."
def test_foo(self): pass
#print "testing foo"
def test_bar(self): pass
#print "testing bar"
if __name__ == '__main__':
NumpyTest.main()
BUG: Fix test example so it runs.from numpy.testing import NumpyTest, NumpyTestCase
class test_Template(NumpyTestCase):
def setUp(self):
pass
#print "TestCase initialization..."
def test_foo(self):
self.fail('neuroimaging.utils, odict, path, etc... have _NO_ tests!')
if __name__ == '__main__':
NumpyTest().run()
| <commit_before>from numpy.testing import NumpyTest, NumpyTestCase
class test_Template(NumpyTestCase):
def setUp(self): pass
#print "TestCase initialization..."
def test_foo(self): pass
#print "testing foo"
def test_bar(self): pass
#print "testing bar"
if __name__ == '__main__':
NumpyTest.main()
<commit_msg>BUG: Fix test example so it runs.<commit_after>from numpy.testing import NumpyTest, NumpyTestCase
class test_Template(NumpyTestCase):
def setUp(self):
pass
#print "TestCase initialization..."
def test_foo(self):
self.fail('neuroimaging.utils, odict, path, etc... have _NO_ tests!')
if __name__ == '__main__':
NumpyTest().run()
|
3a2b536f24eee711a1329daf7403bd92840a87e3 | gpxpandas/gpxreader.py | gpxpandas/gpxreader.py | __author__ = 'max'
import gpxpy
import pandas as pd
def parse_gpx(gpx_file_name):
return gpxpy.parse(gpx_file_name)
def data_frame_for_track_segment(segment):
seg_dict = {}
for point in segment.points:
seg_dict[point.time] = [point.latitude, point.longitude,
point.elevation, point.speed]
seg_frame = pd.DataFrame(data=seg_dict)
# Switch columns and rows s.t. timestamps are rows and gps data columns.
seg_frame = seg_frame.T
seg_frame.columns = ['latitude', 'longitude', 'altitude', 'speed']
return seg_frame
def track_segment_mapping(track):
segments = (data_frame_for_track_segment(segment)
for segment in track.segments)
return segments
def pandas_data_frame_for_gpx(gpx):
tracks_frames = (track_segment_mapping(track) for track in gpx.tracks)
# Create a hierarchical DataFrame by unstacking.
tracks_frame = pd.DataFrame(tracks_frames)
return tracks_frame.unstack() | __author__ = 'max'
import gpxpy
import pandas as pd
def parse_gpx(gpx_file_name):
return gpxpy.parse(gpx_file_name)
def data_frame_for_track_segment(segment):
seg_dict = {}
for point in segment.points:
seg_dict[point.time] = [point.latitude, point.longitude,
point.elevation, point.speed]
seg_frame = pd.DataFrame(data=seg_dict)
# Switch columns and rows s.t. timestamps are rows and gps data columns.
seg_frame = seg_frame.T
seg_frame.columns = ['latitude', 'longitude', 'altitude', 'speed']
return seg_frame
def track_segment_mapping(track):
segments = (data_frame_for_track_segment(segment)
for segment in track.segments)
return segments
def pandas_data_frame_for_gpx(gpx):
tracks_frames = (track_segment_mapping(track) for track in gpx.tracks)
# Create a hierarchical DataFrame by unstacking.
tracks_frame = pd.DataFrame(tracks_frames)
assert gpx.name
return pd.DataFrame({gpx.name:tracks_frame.unstack()}) | Use gpx.name as index to gpx data_frame | Use gpx.name as index to gpx data_frame
| Python | mit | komax/gpx-pandas | __author__ = 'max'
import gpxpy
import pandas as pd
def parse_gpx(gpx_file_name):
return gpxpy.parse(gpx_file_name)
def data_frame_for_track_segment(segment):
seg_dict = {}
for point in segment.points:
seg_dict[point.time] = [point.latitude, point.longitude,
point.elevation, point.speed]
seg_frame = pd.DataFrame(data=seg_dict)
# Switch columns and rows s.t. timestamps are rows and gps data columns.
seg_frame = seg_frame.T
seg_frame.columns = ['latitude', 'longitude', 'altitude', 'speed']
return seg_frame
def track_segment_mapping(track):
segments = (data_frame_for_track_segment(segment)
for segment in track.segments)
return segments
def pandas_data_frame_for_gpx(gpx):
tracks_frames = (track_segment_mapping(track) for track in gpx.tracks)
# Create a hierarchical DataFrame by unstacking.
tracks_frame = pd.DataFrame(tracks_frames)
return tracks_frame.unstack()Use gpx.name as index to gpx data_frame | __author__ = 'max'
import gpxpy
import pandas as pd
def parse_gpx(gpx_file_name):
return gpxpy.parse(gpx_file_name)
def data_frame_for_track_segment(segment):
seg_dict = {}
for point in segment.points:
seg_dict[point.time] = [point.latitude, point.longitude,
point.elevation, point.speed]
seg_frame = pd.DataFrame(data=seg_dict)
# Switch columns and rows s.t. timestamps are rows and gps data columns.
seg_frame = seg_frame.T
seg_frame.columns = ['latitude', 'longitude', 'altitude', 'speed']
return seg_frame
def track_segment_mapping(track):
segments = (data_frame_for_track_segment(segment)
for segment in track.segments)
return segments
def pandas_data_frame_for_gpx(gpx):
tracks_frames = (track_segment_mapping(track) for track in gpx.tracks)
# Create a hierarchical DataFrame by unstacking.
tracks_frame = pd.DataFrame(tracks_frames)
assert gpx.name
return pd.DataFrame({gpx.name:tracks_frame.unstack()}) | <commit_before>__author__ = 'max'
import gpxpy
import pandas as pd
def parse_gpx(gpx_file_name):
return gpxpy.parse(gpx_file_name)
def data_frame_for_track_segment(segment):
seg_dict = {}
for point in segment.points:
seg_dict[point.time] = [point.latitude, point.longitude,
point.elevation, point.speed]
seg_frame = pd.DataFrame(data=seg_dict)
# Switch columns and rows s.t. timestamps are rows and gps data columns.
seg_frame = seg_frame.T
seg_frame.columns = ['latitude', 'longitude', 'altitude', 'speed']
return seg_frame
def track_segment_mapping(track):
segments = (data_frame_for_track_segment(segment)
for segment in track.segments)
return segments
def pandas_data_frame_for_gpx(gpx):
tracks_frames = (track_segment_mapping(track) for track in gpx.tracks)
# Create a hierarchical DataFrame by unstacking.
tracks_frame = pd.DataFrame(tracks_frames)
return tracks_frame.unstack()<commit_msg>Use gpx.name as index to gpx data_frame<commit_after> | __author__ = 'max'
import gpxpy
import pandas as pd
def parse_gpx(gpx_file_name):
return gpxpy.parse(gpx_file_name)
def data_frame_for_track_segment(segment):
seg_dict = {}
for point in segment.points:
seg_dict[point.time] = [point.latitude, point.longitude,
point.elevation, point.speed]
seg_frame = pd.DataFrame(data=seg_dict)
# Switch columns and rows s.t. timestamps are rows and gps data columns.
seg_frame = seg_frame.T
seg_frame.columns = ['latitude', 'longitude', 'altitude', 'speed']
return seg_frame
def track_segment_mapping(track):
segments = (data_frame_for_track_segment(segment)
for segment in track.segments)
return segments
def pandas_data_frame_for_gpx(gpx):
tracks_frames = (track_segment_mapping(track) for track in gpx.tracks)
# Create a hierarchical DataFrame by unstacking.
tracks_frame = pd.DataFrame(tracks_frames)
assert gpx.name
return pd.DataFrame({gpx.name:tracks_frame.unstack()}) | __author__ = 'max'
import gpxpy
import pandas as pd
def parse_gpx(gpx_file_name):
return gpxpy.parse(gpx_file_name)
def data_frame_for_track_segment(segment):
seg_dict = {}
for point in segment.points:
seg_dict[point.time] = [point.latitude, point.longitude,
point.elevation, point.speed]
seg_frame = pd.DataFrame(data=seg_dict)
# Switch columns and rows s.t. timestamps are rows and gps data columns.
seg_frame = seg_frame.T
seg_frame.columns = ['latitude', 'longitude', 'altitude', 'speed']
return seg_frame
def track_segment_mapping(track):
segments = (data_frame_for_track_segment(segment)
for segment in track.segments)
return segments
def pandas_data_frame_for_gpx(gpx):
tracks_frames = (track_segment_mapping(track) for track in gpx.tracks)
# Create a hierarchical DataFrame by unstacking.
tracks_frame = pd.DataFrame(tracks_frames)
return tracks_frame.unstack()Use gpx.name as index to gpx data_frame__author__ = 'max'
import gpxpy
import pandas as pd
def parse_gpx(gpx_file_name):
return gpxpy.parse(gpx_file_name)
def data_frame_for_track_segment(segment):
seg_dict = {}
for point in segment.points:
seg_dict[point.time] = [point.latitude, point.longitude,
point.elevation, point.speed]
seg_frame = pd.DataFrame(data=seg_dict)
# Switch columns and rows s.t. timestamps are rows and gps data columns.
seg_frame = seg_frame.T
seg_frame.columns = ['latitude', 'longitude', 'altitude', 'speed']
return seg_frame
def track_segment_mapping(track):
segments = (data_frame_for_track_segment(segment)
for segment in track.segments)
return segments
def pandas_data_frame_for_gpx(gpx):
tracks_frames = (track_segment_mapping(track) for track in gpx.tracks)
# Create a hierarchical DataFrame by unstacking.
tracks_frame = pd.DataFrame(tracks_frames)
assert gpx.name
return pd.DataFrame({gpx.name:tracks_frame.unstack()}) | <commit_before>__author__ = 'max'
import gpxpy
import pandas as pd
def parse_gpx(gpx_file_name):
return gpxpy.parse(gpx_file_name)
def data_frame_for_track_segment(segment):
seg_dict = {}
for point in segment.points:
seg_dict[point.time] = [point.latitude, point.longitude,
point.elevation, point.speed]
seg_frame = pd.DataFrame(data=seg_dict)
# Switch columns and rows s.t. timestamps are rows and gps data columns.
seg_frame = seg_frame.T
seg_frame.columns = ['latitude', 'longitude', 'altitude', 'speed']
return seg_frame
def track_segment_mapping(track):
segments = (data_frame_for_track_segment(segment)
for segment in track.segments)
return segments
def pandas_data_frame_for_gpx(gpx):
tracks_frames = (track_segment_mapping(track) for track in gpx.tracks)
# Create a hierarchical DataFrame by unstacking.
tracks_frame = pd.DataFrame(tracks_frames)
return tracks_frame.unstack()<commit_msg>Use gpx.name as index to gpx data_frame<commit_after>__author__ = 'max'
import gpxpy
import pandas as pd
def parse_gpx(gpx_file_name):
return gpxpy.parse(gpx_file_name)
def data_frame_for_track_segment(segment):
seg_dict = {}
for point in segment.points:
seg_dict[point.time] = [point.latitude, point.longitude,
point.elevation, point.speed]
seg_frame = pd.DataFrame(data=seg_dict)
# Switch columns and rows s.t. timestamps are rows and gps data columns.
seg_frame = seg_frame.T
seg_frame.columns = ['latitude', 'longitude', 'altitude', 'speed']
return seg_frame
def track_segment_mapping(track):
segments = (data_frame_for_track_segment(segment)
for segment in track.segments)
return segments
def pandas_data_frame_for_gpx(gpx):
tracks_frames = (track_segment_mapping(track) for track in gpx.tracks)
# Create a hierarchical DataFrame by unstacking.
tracks_frame = pd.DataFrame(tracks_frames)
assert gpx.name
return pd.DataFrame({gpx.name:tracks_frame.unstack()}) |
f2f422702985c3e890fa19a7f841baba837c5bba | main.py | main.py | from listing import Listing, session
from scraper import Scraper
from slack import Slack
import sys
import traceback
import time
slack = Slack()
def scrape():
results = 0
duplicates = 0
for result in Scraper().results():
results += 1
listing = Listing(result).process()
if listing is None:
duplicates += 1
continue
session.add(listing)
session.commit()
if listing.transit_stop is None:
continue
post = (
':house: {0} :moneybag: ${1} :round_pushpin: {2} :station: {3} '
':link: <{4}>'
.format(listing.name, listing.price, listing.area,
listing.transit_stop, listing.link)
)
slack.post(post)
print("%s: processed %s listings, %s were duplicates." %
(time.ctime(), results, duplicates)
)
if __name__ == '__main__':
while True:
try:
scrape()
except KeyboardInterrupt:
sys.exit(0)
except Exception as e:
print("Error:", sys.exc_info()[0])
traceback.print_exc()
time.sleep(3600)
| from listing import Listing, session
from scraper import Scraper
from slack import Slack
from random import randint
import sys
import traceback
import time
slack = Slack()
def scrape():
results = 0
duplicates = 0
for result in Scraper().results():
results += 1
listing = Listing(result).process()
if listing is None:
duplicates += 1
continue
session.add(listing)
session.commit()
if listing.transit_stop is None:
continue
post = (
':house: {0} :moneybag: ${1} :round_pushpin: {2} :station: {3} '
':link: <{4}>'
.format(listing.name, listing.price, listing.area,
listing.transit_stop, listing.link)
)
slack.post(post)
print("%s: processed %s listings, %s were duplicates." %
(time.ctime(), results, duplicates)
)
if __name__ == '__main__':
while True:
try:
scrape()
except KeyboardInterrupt:
sys.exit(0)
except Exception as e:
print("Error:", sys.exc_info()[0])
traceback.print_exc()
time.sleep(14400 + randint(-600, 600))
| Increase and fuzz sleep time | Increase and fuzz sleep time
| Python | mit | vboginskey/cribfinder | from listing import Listing, session
from scraper import Scraper
from slack import Slack
import sys
import traceback
import time
slack = Slack()
def scrape():
results = 0
duplicates = 0
for result in Scraper().results():
results += 1
listing = Listing(result).process()
if listing is None:
duplicates += 1
continue
session.add(listing)
session.commit()
if listing.transit_stop is None:
continue
post = (
':house: {0} :moneybag: ${1} :round_pushpin: {2} :station: {3} '
':link: <{4}>'
.format(listing.name, listing.price, listing.area,
listing.transit_stop, listing.link)
)
slack.post(post)
print("%s: processed %s listings, %s were duplicates." %
(time.ctime(), results, duplicates)
)
if __name__ == '__main__':
while True:
try:
scrape()
except KeyboardInterrupt:
sys.exit(0)
except Exception as e:
print("Error:", sys.exc_info()[0])
traceback.print_exc()
time.sleep(3600)
Increase and fuzz sleep time | from listing import Listing, session
from scraper import Scraper
from slack import Slack
from random import randint
import sys
import traceback
import time
slack = Slack()
def scrape():
results = 0
duplicates = 0
for result in Scraper().results():
results += 1
listing = Listing(result).process()
if listing is None:
duplicates += 1
continue
session.add(listing)
session.commit()
if listing.transit_stop is None:
continue
post = (
':house: {0} :moneybag: ${1} :round_pushpin: {2} :station: {3} '
':link: <{4}>'
.format(listing.name, listing.price, listing.area,
listing.transit_stop, listing.link)
)
slack.post(post)
print("%s: processed %s listings, %s were duplicates." %
(time.ctime(), results, duplicates)
)
if __name__ == '__main__':
while True:
try:
scrape()
except KeyboardInterrupt:
sys.exit(0)
except Exception as e:
print("Error:", sys.exc_info()[0])
traceback.print_exc()
time.sleep(14400 + randint(-600, 600))
| <commit_before>from listing import Listing, session
from scraper import Scraper
from slack import Slack
import sys
import traceback
import time
slack = Slack()
def scrape():
results = 0
duplicates = 0
for result in Scraper().results():
results += 1
listing = Listing(result).process()
if listing is None:
duplicates += 1
continue
session.add(listing)
session.commit()
if listing.transit_stop is None:
continue
post = (
':house: {0} :moneybag: ${1} :round_pushpin: {2} :station: {3} '
':link: <{4}>'
.format(listing.name, listing.price, listing.area,
listing.transit_stop, listing.link)
)
slack.post(post)
print("%s: processed %s listings, %s were duplicates." %
(time.ctime(), results, duplicates)
)
if __name__ == '__main__':
while True:
try:
scrape()
except KeyboardInterrupt:
sys.exit(0)
except Exception as e:
print("Error:", sys.exc_info()[0])
traceback.print_exc()
time.sleep(3600)
<commit_msg>Increase and fuzz sleep time<commit_after> | from listing import Listing, session
from scraper import Scraper
from slack import Slack
from random import randint
import sys
import traceback
import time
slack = Slack()
def scrape():
results = 0
duplicates = 0
for result in Scraper().results():
results += 1
listing = Listing(result).process()
if listing is None:
duplicates += 1
continue
session.add(listing)
session.commit()
if listing.transit_stop is None:
continue
post = (
':house: {0} :moneybag: ${1} :round_pushpin: {2} :station: {3} '
':link: <{4}>'
.format(listing.name, listing.price, listing.area,
listing.transit_stop, listing.link)
)
slack.post(post)
print("%s: processed %s listings, %s were duplicates." %
(time.ctime(), results, duplicates)
)
if __name__ == '__main__':
while True:
try:
scrape()
except KeyboardInterrupt:
sys.exit(0)
except Exception as e:
print("Error:", sys.exc_info()[0])
traceback.print_exc()
time.sleep(14400 + randint(-600, 600))
| from listing import Listing, session
from scraper import Scraper
from slack import Slack
import sys
import traceback
import time
slack = Slack()
def scrape():
results = 0
duplicates = 0
for result in Scraper().results():
results += 1
listing = Listing(result).process()
if listing is None:
duplicates += 1
continue
session.add(listing)
session.commit()
if listing.transit_stop is None:
continue
post = (
':house: {0} :moneybag: ${1} :round_pushpin: {2} :station: {3} '
':link: <{4}>'
.format(listing.name, listing.price, listing.area,
listing.transit_stop, listing.link)
)
slack.post(post)
print("%s: processed %s listings, %s were duplicates." %
(time.ctime(), results, duplicates)
)
if __name__ == '__main__':
while True:
try:
scrape()
except KeyboardInterrupt:
sys.exit(0)
except Exception as e:
print("Error:", sys.exc_info()[0])
traceback.print_exc()
time.sleep(3600)
Increase and fuzz sleep timefrom listing import Listing, session
from scraper import Scraper
from slack import Slack
from random import randint
import sys
import traceback
import time
slack = Slack()
def scrape():
results = 0
duplicates = 0
for result in Scraper().results():
results += 1
listing = Listing(result).process()
if listing is None:
duplicates += 1
continue
session.add(listing)
session.commit()
if listing.transit_stop is None:
continue
post = (
':house: {0} :moneybag: ${1} :round_pushpin: {2} :station: {3} '
':link: <{4}>'
.format(listing.name, listing.price, listing.area,
listing.transit_stop, listing.link)
)
slack.post(post)
print("%s: processed %s listings, %s were duplicates." %
(time.ctime(), results, duplicates)
)
if __name__ == '__main__':
while True:
try:
scrape()
except KeyboardInterrupt:
sys.exit(0)
except Exception as e:
print("Error:", sys.exc_info()[0])
traceback.print_exc()
time.sleep(14400 + randint(-600, 600))
| <commit_before>from listing import Listing, session
from scraper import Scraper
from slack import Slack
import sys
import traceback
import time
slack = Slack()
def scrape():
results = 0
duplicates = 0
for result in Scraper().results():
results += 1
listing = Listing(result).process()
if listing is None:
duplicates += 1
continue
session.add(listing)
session.commit()
if listing.transit_stop is None:
continue
post = (
':house: {0} :moneybag: ${1} :round_pushpin: {2} :station: {3} '
':link: <{4}>'
.format(listing.name, listing.price, listing.area,
listing.transit_stop, listing.link)
)
slack.post(post)
print("%s: processed %s listings, %s were duplicates." %
(time.ctime(), results, duplicates)
)
if __name__ == '__main__':
while True:
try:
scrape()
except KeyboardInterrupt:
sys.exit(0)
except Exception as e:
print("Error:", sys.exc_info()[0])
traceback.print_exc()
time.sleep(3600)
<commit_msg>Increase and fuzz sleep time<commit_after>from listing import Listing, session
from scraper import Scraper
from slack import Slack
from random import randint
import sys
import traceback
import time
slack = Slack()
def scrape():
results = 0
duplicates = 0
for result in Scraper().results():
results += 1
listing = Listing(result).process()
if listing is None:
duplicates += 1
continue
session.add(listing)
session.commit()
if listing.transit_stop is None:
continue
post = (
':house: {0} :moneybag: ${1} :round_pushpin: {2} :station: {3} '
':link: <{4}>'
.format(listing.name, listing.price, listing.area,
listing.transit_stop, listing.link)
)
slack.post(post)
print("%s: processed %s listings, %s were duplicates." %
(time.ctime(), results, duplicates)
)
if __name__ == '__main__':
while True:
try:
scrape()
except KeyboardInterrupt:
sys.exit(0)
except Exception as e:
print("Error:", sys.exc_info()[0])
traceback.print_exc()
time.sleep(14400 + randint(-600, 600))
|
56b4532bd330ad4075f882511c87cb97eaeff10e | jujupy/__init__.py | jujupy/__init__.py | from jujupy.client import *
from jujupy.client import _temp_env
__all__ = ['_temp_env']
| from jujupy.client import (
AgentsNotStarted,
AuthNotAccepted,
AGENTS_READY,
client_from_config,
ConditionList,
coalesce_agent_status,
describe_substrate,
EnvJujuClient,
EnvJujuClient1X,
EnvJujuClient25,
ensure_dir,
get_cache_path,
get_client_class,
get_local_root,
get_machine_dns_name,
get_timeout_path,
get_timeout_prefix,
GroupReporter,
IncompatibleConfigClass,
InvalidEndpoint,
jes_home_path,
JESNotSupported,
JujuData,
JUJU_DEV_FEATURE_FLAGS,
Juju2Backend,
KILL_CONTROLLER,
KVM_MACHINE,
LXC_MACHINE,
LXD_MACHINE,
Machine,
NameNotAccepted,
NoProvider,
parse_new_state_server_from_error,
SimpleEnvironment,
SoftDeadlineExceeded,
Status,
temp_bootstrap_env,
_temp_env,
temp_yaml_file,
TypeNotAccepted,
uniquify_local,
until_timeout,
)
__all__ = [
'AgentsNotStarted',
'AuthNotAccepted',
'AGENTS_READY',
'client_from_config',
'ConditionList',
'coalesce_agent_status',
'describe_substrate',
'EnvJujuClient',
'EnvJujuClient1X',
'EnvJujuClient25',
'ensure_dir',
'get_cache_path',
'get_client_class',
'get_local_root',
'get_machine_dns_name',
'get_timeout_path',
'get_timeout_prefix',
'GroupReporter',
'IncompatibleConfigClass',
'InvalidEndpoint',
'jes_home_path',
'JESNotSupported',
'JujuData',
'JUJU_DEV_FEATURE_FLAGS',
'Juju2Backend',
'KILL_CONTROLLER',
'KVM_MACHINE',
'LXC_MACHINE',
'LXD_MACHINE',
'Machine',
'NameNotAccepted',
'NoProvider',
'parse_new_state_server_from_error',
'SimpleEnvironment',
'SoftDeadlineExceeded',
'Status',
'temp_bootstrap_env',
'_temp_env',
'temp_yaml_file',
'TypeNotAccepted',
'uniquify_local',
'until_timeout',
]
| Switch to explicit imports for jujupy. | Switch to explicit imports for jujupy. | Python | agpl-3.0 | mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju | from jujupy.client import *
from jujupy.client import _temp_env
__all__ = ['_temp_env']
Switch to explicit imports for jujupy. | from jujupy.client import (
AgentsNotStarted,
AuthNotAccepted,
AGENTS_READY,
client_from_config,
ConditionList,
coalesce_agent_status,
describe_substrate,
EnvJujuClient,
EnvJujuClient1X,
EnvJujuClient25,
ensure_dir,
get_cache_path,
get_client_class,
get_local_root,
get_machine_dns_name,
get_timeout_path,
get_timeout_prefix,
GroupReporter,
IncompatibleConfigClass,
InvalidEndpoint,
jes_home_path,
JESNotSupported,
JujuData,
JUJU_DEV_FEATURE_FLAGS,
Juju2Backend,
KILL_CONTROLLER,
KVM_MACHINE,
LXC_MACHINE,
LXD_MACHINE,
Machine,
NameNotAccepted,
NoProvider,
parse_new_state_server_from_error,
SimpleEnvironment,
SoftDeadlineExceeded,
Status,
temp_bootstrap_env,
_temp_env,
temp_yaml_file,
TypeNotAccepted,
uniquify_local,
until_timeout,
)
__all__ = [
'AgentsNotStarted',
'AuthNotAccepted',
'AGENTS_READY',
'client_from_config',
'ConditionList',
'coalesce_agent_status',
'describe_substrate',
'EnvJujuClient',
'EnvJujuClient1X',
'EnvJujuClient25',
'ensure_dir',
'get_cache_path',
'get_client_class',
'get_local_root',
'get_machine_dns_name',
'get_timeout_path',
'get_timeout_prefix',
'GroupReporter',
'IncompatibleConfigClass',
'InvalidEndpoint',
'jes_home_path',
'JESNotSupported',
'JujuData',
'JUJU_DEV_FEATURE_FLAGS',
'Juju2Backend',
'KILL_CONTROLLER',
'KVM_MACHINE',
'LXC_MACHINE',
'LXD_MACHINE',
'Machine',
'NameNotAccepted',
'NoProvider',
'parse_new_state_server_from_error',
'SimpleEnvironment',
'SoftDeadlineExceeded',
'Status',
'temp_bootstrap_env',
'_temp_env',
'temp_yaml_file',
'TypeNotAccepted',
'uniquify_local',
'until_timeout',
]
| <commit_before>from jujupy.client import *
from jujupy.client import _temp_env
__all__ = ['_temp_env']
<commit_msg>Switch to explicit imports for jujupy.<commit_after> | from jujupy.client import (
AgentsNotStarted,
AuthNotAccepted,
AGENTS_READY,
client_from_config,
ConditionList,
coalesce_agent_status,
describe_substrate,
EnvJujuClient,
EnvJujuClient1X,
EnvJujuClient25,
ensure_dir,
get_cache_path,
get_client_class,
get_local_root,
get_machine_dns_name,
get_timeout_path,
get_timeout_prefix,
GroupReporter,
IncompatibleConfigClass,
InvalidEndpoint,
jes_home_path,
JESNotSupported,
JujuData,
JUJU_DEV_FEATURE_FLAGS,
Juju2Backend,
KILL_CONTROLLER,
KVM_MACHINE,
LXC_MACHINE,
LXD_MACHINE,
Machine,
NameNotAccepted,
NoProvider,
parse_new_state_server_from_error,
SimpleEnvironment,
SoftDeadlineExceeded,
Status,
temp_bootstrap_env,
_temp_env,
temp_yaml_file,
TypeNotAccepted,
uniquify_local,
until_timeout,
)
__all__ = [
'AgentsNotStarted',
'AuthNotAccepted',
'AGENTS_READY',
'client_from_config',
'ConditionList',
'coalesce_agent_status',
'describe_substrate',
'EnvJujuClient',
'EnvJujuClient1X',
'EnvJujuClient25',
'ensure_dir',
'get_cache_path',
'get_client_class',
'get_local_root',
'get_machine_dns_name',
'get_timeout_path',
'get_timeout_prefix',
'GroupReporter',
'IncompatibleConfigClass',
'InvalidEndpoint',
'jes_home_path',
'JESNotSupported',
'JujuData',
'JUJU_DEV_FEATURE_FLAGS',
'Juju2Backend',
'KILL_CONTROLLER',
'KVM_MACHINE',
'LXC_MACHINE',
'LXD_MACHINE',
'Machine',
'NameNotAccepted',
'NoProvider',
'parse_new_state_server_from_error',
'SimpleEnvironment',
'SoftDeadlineExceeded',
'Status',
'temp_bootstrap_env',
'_temp_env',
'temp_yaml_file',
'TypeNotAccepted',
'uniquify_local',
'until_timeout',
]
| from jujupy.client import *
from jujupy.client import _temp_env
__all__ = ['_temp_env']
Switch to explicit imports for jujupy.from jujupy.client import (
AgentsNotStarted,
AuthNotAccepted,
AGENTS_READY,
client_from_config,
ConditionList,
coalesce_agent_status,
describe_substrate,
EnvJujuClient,
EnvJujuClient1X,
EnvJujuClient25,
ensure_dir,
get_cache_path,
get_client_class,
get_local_root,
get_machine_dns_name,
get_timeout_path,
get_timeout_prefix,
GroupReporter,
IncompatibleConfigClass,
InvalidEndpoint,
jes_home_path,
JESNotSupported,
JujuData,
JUJU_DEV_FEATURE_FLAGS,
Juju2Backend,
KILL_CONTROLLER,
KVM_MACHINE,
LXC_MACHINE,
LXD_MACHINE,
Machine,
NameNotAccepted,
NoProvider,
parse_new_state_server_from_error,
SimpleEnvironment,
SoftDeadlineExceeded,
Status,
temp_bootstrap_env,
_temp_env,
temp_yaml_file,
TypeNotAccepted,
uniquify_local,
until_timeout,
)
__all__ = [
'AgentsNotStarted',
'AuthNotAccepted',
'AGENTS_READY',
'client_from_config',
'ConditionList',
'coalesce_agent_status',
'describe_substrate',
'EnvJujuClient',
'EnvJujuClient1X',
'EnvJujuClient25',
'ensure_dir',
'get_cache_path',
'get_client_class',
'get_local_root',
'get_machine_dns_name',
'get_timeout_path',
'get_timeout_prefix',
'GroupReporter',
'IncompatibleConfigClass',
'InvalidEndpoint',
'jes_home_path',
'JESNotSupported',
'JujuData',
'JUJU_DEV_FEATURE_FLAGS',
'Juju2Backend',
'KILL_CONTROLLER',
'KVM_MACHINE',
'LXC_MACHINE',
'LXD_MACHINE',
'Machine',
'NameNotAccepted',
'NoProvider',
'parse_new_state_server_from_error',
'SimpleEnvironment',
'SoftDeadlineExceeded',
'Status',
'temp_bootstrap_env',
'_temp_env',
'temp_yaml_file',
'TypeNotAccepted',
'uniquify_local',
'until_timeout',
]
| <commit_before>from jujupy.client import *
from jujupy.client import _temp_env
__all__ = ['_temp_env']
<commit_msg>Switch to explicit imports for jujupy.<commit_after>from jujupy.client import (
AgentsNotStarted,
AuthNotAccepted,
AGENTS_READY,
client_from_config,
ConditionList,
coalesce_agent_status,
describe_substrate,
EnvJujuClient,
EnvJujuClient1X,
EnvJujuClient25,
ensure_dir,
get_cache_path,
get_client_class,
get_local_root,
get_machine_dns_name,
get_timeout_path,
get_timeout_prefix,
GroupReporter,
IncompatibleConfigClass,
InvalidEndpoint,
jes_home_path,
JESNotSupported,
JujuData,
JUJU_DEV_FEATURE_FLAGS,
Juju2Backend,
KILL_CONTROLLER,
KVM_MACHINE,
LXC_MACHINE,
LXD_MACHINE,
Machine,
NameNotAccepted,
NoProvider,
parse_new_state_server_from_error,
SimpleEnvironment,
SoftDeadlineExceeded,
Status,
temp_bootstrap_env,
_temp_env,
temp_yaml_file,
TypeNotAccepted,
uniquify_local,
until_timeout,
)
__all__ = [
'AgentsNotStarted',
'AuthNotAccepted',
'AGENTS_READY',
'client_from_config',
'ConditionList',
'coalesce_agent_status',
'describe_substrate',
'EnvJujuClient',
'EnvJujuClient1X',
'EnvJujuClient25',
'ensure_dir',
'get_cache_path',
'get_client_class',
'get_local_root',
'get_machine_dns_name',
'get_timeout_path',
'get_timeout_prefix',
'GroupReporter',
'IncompatibleConfigClass',
'InvalidEndpoint',
'jes_home_path',
'JESNotSupported',
'JujuData',
'JUJU_DEV_FEATURE_FLAGS',
'Juju2Backend',
'KILL_CONTROLLER',
'KVM_MACHINE',
'LXC_MACHINE',
'LXD_MACHINE',
'Machine',
'NameNotAccepted',
'NoProvider',
'parse_new_state_server_from_error',
'SimpleEnvironment',
'SoftDeadlineExceeded',
'Status',
'temp_bootstrap_env',
'_temp_env',
'temp_yaml_file',
'TypeNotAccepted',
'uniquify_local',
'until_timeout',
]
|
55f0ab3ba544344bda4c95b014193dffe9db70cd | examples/99bottles.py | examples/99bottles.py | #!/usr/bin/env python
# XXX: Broken - Does O_NONBLOCK work at all?
"""An implementation of the Python Concurrency Problem of 99 Bottles of Beer
See: http://wiki.python.org/moin/Concurrency/99Bottles
"""
import sys
from circuits.io import File
from circuits import Component
from circuits.net.protocols import LP
class Tail(Component):
"""A complex component which combines the ``File`` and ``LP``
(Line Protoco) components together to implement similar functionality to
the UNIX ``tail`` command.
"""
def init(self, filename):
"""Initialize the Component.
NB: This is automatically called after ``__new__`` and ``__init__``.
"""
(File(filename, "r") + LP()).register(self)
class Grep(Component):
"""A simple component that simply listens for ``line`` events from the
``Tail`` component and performs a regular expression match against each
line. If the line matches it is printed to standard output.
"""
def init(self, pattern):
self.pattern = pattern
def line(self, line):
"""Line Event Handler"""
if self.pattern in line:
print(line)
# Configure and "run" the System.
from circuits import Debugger
(Tail(sys.argv[1]) + Debugger() + Grep(sys.argv[2])).run()
| #!/usr/bin/env python
# XXX: Broken - Does O_NONBLOCK work at all?
"""An implementation of the Python Concurrency Problem of 99 Bottles of Beer
See: http://wiki.python.org/moin/Concurrency/99Bottles
"""
import sys
from circuits.io import File
from circuits import Component
from circuits.net.protocols import LP
class Tail(Component):
"""A complex component which combines the ``File`` and ``LP``
(Line Protoco) components together to implement similar functionality to
the UNIX ``tail`` command.
"""
def init(self, filename):
"""Initialize the Component.
NB: This is automatically called after ``__new__`` and ``__init__``.
"""
(File(filename, "r") + LP()).register(self)
class Grep(Component):
"""A simple component that simply listens for ``line`` events from the
``Tail`` component and performs a regular expression match against each
line. If the line matches it is printed to standard output.
"""
def init(self, pattern):
self.pattern = pattern
def line(self, line):
"""Line Event Handler"""
if self.pattern in line:
print(line)
# Configure and "run" the System.
app = Tail(sys.argv[1])
Grep(sys.argv[2]).register(app)
app.run()
| Use explicit API(s) for examples | Use explicit API(s) for examples
| Python | mit | nizox/circuits,eriol/circuits,treemo/circuits,treemo/circuits,treemo/circuits,eriol/circuits,eriol/circuits | #!/usr/bin/env python
# XXX: Broken - Does O_NONBLOCK work at all?
"""An implementation of the Python Concurrency Problem of 99 Bottles of Beer
See: http://wiki.python.org/moin/Concurrency/99Bottles
"""
import sys
from circuits.io import File
from circuits import Component
from circuits.net.protocols import LP
class Tail(Component):
"""A complex component which combines the ``File`` and ``LP``
(Line Protoco) components together to implement similar functionality to
the UNIX ``tail`` command.
"""
def init(self, filename):
"""Initialize the Component.
NB: This is automatically called after ``__new__`` and ``__init__``.
"""
(File(filename, "r") + LP()).register(self)
class Grep(Component):
"""A simple component that simply listens for ``line`` events from the
``Tail`` component and performs a regular expression match against each
line. If the line matches it is printed to standard output.
"""
def init(self, pattern):
self.pattern = pattern
def line(self, line):
"""Line Event Handler"""
if self.pattern in line:
print(line)
# Configure and "run" the System.
from circuits import Debugger
(Tail(sys.argv[1]) + Debugger() + Grep(sys.argv[2])).run()
Use explicit API(s) for examples | #!/usr/bin/env python
# XXX: Broken - Does O_NONBLOCK work at all?
"""An implementation of the Python Concurrency Problem of 99 Bottles of Beer
See: http://wiki.python.org/moin/Concurrency/99Bottles
"""
import sys
from circuits.io import File
from circuits import Component
from circuits.net.protocols import LP
class Tail(Component):
"""A complex component which combines the ``File`` and ``LP``
(Line Protoco) components together to implement similar functionality to
the UNIX ``tail`` command.
"""
def init(self, filename):
"""Initialize the Component.
NB: This is automatically called after ``__new__`` and ``__init__``.
"""
(File(filename, "r") + LP()).register(self)
class Grep(Component):
"""A simple component that simply listens for ``line`` events from the
``Tail`` component and performs a regular expression match against each
line. If the line matches it is printed to standard output.
"""
def init(self, pattern):
self.pattern = pattern
def line(self, line):
"""Line Event Handler"""
if self.pattern in line:
print(line)
# Configure and "run" the System.
app = Tail(sys.argv[1])
Grep(sys.argv[2]).register(app)
app.run()
| <commit_before>#!/usr/bin/env python
# XXX: Broken - Does O_NONBLOCK work at all?
"""An implementation of the Python Concurrency Problem of 99 Bottles of Beer
See: http://wiki.python.org/moin/Concurrency/99Bottles
"""
import sys
from circuits.io import File
from circuits import Component
from circuits.net.protocols import LP
class Tail(Component):
"""A complex component which combines the ``File`` and ``LP``
(Line Protoco) components together to implement similar functionality to
the UNIX ``tail`` command.
"""
def init(self, filename):
"""Initialize the Component.
NB: This is automatically called after ``__new__`` and ``__init__``.
"""
(File(filename, "r") + LP()).register(self)
class Grep(Component):
"""A simple component that simply listens for ``line`` events from the
``Tail`` component and performs a regular expression match against each
line. If the line matches it is printed to standard output.
"""
def init(self, pattern):
self.pattern = pattern
def line(self, line):
"""Line Event Handler"""
if self.pattern in line:
print(line)
# Configure and "run" the System.
from circuits import Debugger
(Tail(sys.argv[1]) + Debugger() + Grep(sys.argv[2])).run()
<commit_msg>Use explicit API(s) for examples<commit_after> | #!/usr/bin/env python
# XXX: Broken - Does O_NONBLOCK work at all?
"""An implementation of the Python Concurrency Problem of 99 Bottles of Beer
See: http://wiki.python.org/moin/Concurrency/99Bottles
"""
import sys
from circuits.io import File
from circuits import Component
from circuits.net.protocols import LP
class Tail(Component):
"""A complex component which combines the ``File`` and ``LP``
(Line Protoco) components together to implement similar functionality to
the UNIX ``tail`` command.
"""
def init(self, filename):
"""Initialize the Component.
NB: This is automatically called after ``__new__`` and ``__init__``.
"""
(File(filename, "r") + LP()).register(self)
class Grep(Component):
"""A simple component that simply listens for ``line`` events from the
``Tail`` component and performs a regular expression match against each
line. If the line matches it is printed to standard output.
"""
def init(self, pattern):
self.pattern = pattern
def line(self, line):
"""Line Event Handler"""
if self.pattern in line:
print(line)
# Configure and "run" the System.
app = Tail(sys.argv[1])
Grep(sys.argv[2]).register(app)
app.run()
| #!/usr/bin/env python
# XXX: Broken - Does O_NONBLOCK work at all?
"""An implementation of the Python Concurrency Problem of 99 Bottles of Beer
See: http://wiki.python.org/moin/Concurrency/99Bottles
"""
import sys
from circuits.io import File
from circuits import Component
from circuits.net.protocols import LP
class Tail(Component):
"""A complex component which combines the ``File`` and ``LP``
(Line Protoco) components together to implement similar functionality to
the UNIX ``tail`` command.
"""
def init(self, filename):
"""Initialize the Component.
NB: This is automatically called after ``__new__`` and ``__init__``.
"""
(File(filename, "r") + LP()).register(self)
class Grep(Component):
"""A simple component that simply listens for ``line`` events from the
``Tail`` component and performs a regular expression match against each
line. If the line matches it is printed to standard output.
"""
def init(self, pattern):
self.pattern = pattern
def line(self, line):
"""Line Event Handler"""
if self.pattern in line:
print(line)
# Configure and "run" the System.
from circuits import Debugger
(Tail(sys.argv[1]) + Debugger() + Grep(sys.argv[2])).run()
Use explicit API(s) for examples#!/usr/bin/env python
# XXX: Broken - Does O_NONBLOCK work at all?
"""An implementation of the Python Concurrency Problem of 99 Bottles of Beer
See: http://wiki.python.org/moin/Concurrency/99Bottles
"""
import sys
from circuits.io import File
from circuits import Component
from circuits.net.protocols import LP
class Tail(Component):
"""A complex component which combines the ``File`` and ``LP``
(Line Protoco) components together to implement similar functionality to
the UNIX ``tail`` command.
"""
def init(self, filename):
"""Initialize the Component.
NB: This is automatically called after ``__new__`` and ``__init__``.
"""
(File(filename, "r") + LP()).register(self)
class Grep(Component):
"""A simple component that simply listens for ``line`` events from the
``Tail`` component and performs a regular expression match against each
line. If the line matches it is printed to standard output.
"""
def init(self, pattern):
self.pattern = pattern
def line(self, line):
"""Line Event Handler"""
if self.pattern in line:
print(line)
# Configure and "run" the System.
app = Tail(sys.argv[1])
Grep(sys.argv[2]).register(app)
app.run()
| <commit_before>#!/usr/bin/env python
# XXX: Broken - Does O_NONBLOCK work at all?
"""An implementation of the Python Concurrency Problem of 99 Bottles of Beer
See: http://wiki.python.org/moin/Concurrency/99Bottles
"""
import sys
from circuits.io import File
from circuits import Component
from circuits.net.protocols import LP
class Tail(Component):
"""A complex component which combines the ``File`` and ``LP``
(Line Protoco) components together to implement similar functionality to
the UNIX ``tail`` command.
"""
def init(self, filename):
"""Initialize the Component.
NB: This is automatically called after ``__new__`` and ``__init__``.
"""
(File(filename, "r") + LP()).register(self)
class Grep(Component):
"""A simple component that simply listens for ``line`` events from the
``Tail`` component and performs a regular expression match against each
line. If the line matches it is printed to standard output.
"""
def init(self, pattern):
self.pattern = pattern
def line(self, line):
"""Line Event Handler"""
if self.pattern in line:
print(line)
# Configure and "run" the System.
from circuits import Debugger
(Tail(sys.argv[1]) + Debugger() + Grep(sys.argv[2])).run()
<commit_msg>Use explicit API(s) for examples<commit_after>#!/usr/bin/env python
# XXX: Broken - Does O_NONBLOCK work at all?
"""An implementation of the Python Concurrency Problem of 99 Bottles of Beer
See: http://wiki.python.org/moin/Concurrency/99Bottles
"""
import sys
from circuits.io import File
from circuits import Component
from circuits.net.protocols import LP
class Tail(Component):
"""A complex component which combines the ``File`` and ``LP``
(Line Protoco) components together to implement similar functionality to
the UNIX ``tail`` command.
"""
def init(self, filename):
"""Initialize the Component.
NB: This is automatically called after ``__new__`` and ``__init__``.
"""
(File(filename, "r") + LP()).register(self)
class Grep(Component):
"""A simple component that simply listens for ``line`` events from the
``Tail`` component and performs a regular expression match against each
line. If the line matches it is printed to standard output.
"""
def init(self, pattern):
self.pattern = pattern
def line(self, line):
"""Line Event Handler"""
if self.pattern in line:
print(line)
# Configure and "run" the System.
app = Tail(sys.argv[1])
Grep(sys.argv[2]).register(app)
app.run()
|
497451ee060b1dd7655ad648f4026633e6e3318c | leaflets/models.py | leaflets/models.py | from django.db import models
from django.contrib import admin
class Leaflet(models.Model):
'''
Represents a given (generated) leaflet.
'''
competition = models.ForeignKey('competitions.Competition')
year = models.IntegerField()
issue = models.IntegerField()
# Fields added via foreign keys:
# problemset_set
# TODO: more than one problemset can point to given leaflet, is that a
# problem?
def __unicode__(self):
return u"Leaflet for " + self.competition.__unicode__()
class Meta:
ordering = ['competition', 'year', 'issue']
verbose_name = 'Leaflet'
verbose_name_plural = 'Leaflets'
# Register to the admin site
admin.site.register(Leaflet)
| import reversion
from django.db import models
from django.contrib import admin
class Leaflet(models.Model):
'''
Represents a given (generated) leaflet.
'''
def generate_name(self, filename):
return "leaflets/{competition}-{year}-{issue}.pdf"\
.format(competition=self.competition,
year=self.year,
issue=self.issue)
def __unicode__(self):
return "{competition}-{year}-{issue}"\
.format(competition=self.competition,
year=self.year,
issue=self.issue)
competition = models.ForeignKey('competitions.Competition')
year = models.IntegerField()
issue = models.IntegerField()
leaflet = models.FileField(upload_to=generate_name)
# Fields added via foreign keys:
# problemset_set
# TODO: more than one problemset can point to given leaflet, is that a
# problem?
class Meta:
ordering = ['competition', '-year', 'issue']
verbose_name = 'Leaflet'
verbose_name_plural = 'Leaflets'
class LeafletAdmin(reversion.VersionAdmin):
list_display = ('competition',
'year',
'issue',
)
# Register to the admin site
admin.site.register(Leaflet, LeafletAdmin)
| Add capability to upload Leaflets | Add capability to upload Leaflets
| Python | mit | matus-stehlik/roots,matus-stehlik/roots,matus-stehlik/glowing-batman,rtrembecky/roots,tbabej/roots,matus-stehlik/roots,tbabej/roots,tbabej/roots,rtrembecky/roots,matus-stehlik/glowing-batman,rtrembecky/roots | from django.db import models
from django.contrib import admin
class Leaflet(models.Model):
'''
Represents a given (generated) leaflet.
'''
competition = models.ForeignKey('competitions.Competition')
year = models.IntegerField()
issue = models.IntegerField()
# Fields added via foreign keys:
# problemset_set
# TODO: more than one problemset can point to given leaflet, is that a
# problem?
def __unicode__(self):
return u"Leaflet for " + self.competition.__unicode__()
class Meta:
ordering = ['competition', 'year', 'issue']
verbose_name = 'Leaflet'
verbose_name_plural = 'Leaflets'
# Register to the admin site
admin.site.register(Leaflet)
Add capability to upload Leaflets | import reversion
from django.db import models
from django.contrib import admin
class Leaflet(models.Model):
'''
Represents a given (generated) leaflet.
'''
def generate_name(self, filename):
return "leaflets/{competition}-{year}-{issue}.pdf"\
.format(competition=self.competition,
year=self.year,
issue=self.issue)
def __unicode__(self):
return "{competition}-{year}-{issue}"\
.format(competition=self.competition,
year=self.year,
issue=self.issue)
competition = models.ForeignKey('competitions.Competition')
year = models.IntegerField()
issue = models.IntegerField()
leaflet = models.FileField(upload_to=generate_name)
# Fields added via foreign keys:
# problemset_set
# TODO: more than one problemset can point to given leaflet, is that a
# problem?
class Meta:
ordering = ['competition', '-year', 'issue']
verbose_name = 'Leaflet'
verbose_name_plural = 'Leaflets'
class LeafletAdmin(reversion.VersionAdmin):
list_display = ('competition',
'year',
'issue',
)
# Register to the admin site
admin.site.register(Leaflet, LeafletAdmin)
| <commit_before>from django.db import models
from django.contrib import admin
class Leaflet(models.Model):
'''
Represents a given (generated) leaflet.
'''
competition = models.ForeignKey('competitions.Competition')
year = models.IntegerField()
issue = models.IntegerField()
# Fields added via foreign keys:
# problemset_set
# TODO: more than one problemset can point to given leaflet, is that a
# problem?
def __unicode__(self):
return u"Leaflet for " + self.competition.__unicode__()
class Meta:
ordering = ['competition', 'year', 'issue']
verbose_name = 'Leaflet'
verbose_name_plural = 'Leaflets'
# Register to the admin site
admin.site.register(Leaflet)
<commit_msg>Add capability to upload Leaflets<commit_after> | import reversion
from django.db import models
from django.contrib import admin
class Leaflet(models.Model):
'''
Represents a given (generated) leaflet.
'''
def generate_name(self, filename):
return "leaflets/{competition}-{year}-{issue}.pdf"\
.format(competition=self.competition,
year=self.year,
issue=self.issue)
def __unicode__(self):
return "{competition}-{year}-{issue}"\
.format(competition=self.competition,
year=self.year,
issue=self.issue)
competition = models.ForeignKey('competitions.Competition')
year = models.IntegerField()
issue = models.IntegerField()
leaflet = models.FileField(upload_to=generate_name)
# Fields added via foreign keys:
# problemset_set
# TODO: more than one problemset can point to given leaflet, is that a
# problem?
class Meta:
ordering = ['competition', '-year', 'issue']
verbose_name = 'Leaflet'
verbose_name_plural = 'Leaflets'
class LeafletAdmin(reversion.VersionAdmin):
list_display = ('competition',
'year',
'issue',
)
# Register to the admin site
admin.site.register(Leaflet, LeafletAdmin)
| from django.db import models
from django.contrib import admin
class Leaflet(models.Model):
'''
Represents a given (generated) leaflet.
'''
competition = models.ForeignKey('competitions.Competition')
year = models.IntegerField()
issue = models.IntegerField()
# Fields added via foreign keys:
# problemset_set
# TODO: more than one problemset can point to given leaflet, is that a
# problem?
def __unicode__(self):
return u"Leaflet for " + self.competition.__unicode__()
class Meta:
ordering = ['competition', 'year', 'issue']
verbose_name = 'Leaflet'
verbose_name_plural = 'Leaflets'
# Register to the admin site
admin.site.register(Leaflet)
Add capability to upload Leafletsimport reversion
from django.db import models
from django.contrib import admin
class Leaflet(models.Model):
'''
Represents a given (generated) leaflet.
'''
def generate_name(self, filename):
return "leaflets/{competition}-{year}-{issue}.pdf"\
.format(competition=self.competition,
year=self.year,
issue=self.issue)
def __unicode__(self):
return "{competition}-{year}-{issue}"\
.format(competition=self.competition,
year=self.year,
issue=self.issue)
competition = models.ForeignKey('competitions.Competition')
year = models.IntegerField()
issue = models.IntegerField()
leaflet = models.FileField(upload_to=generate_name)
# Fields added via foreign keys:
# problemset_set
# TODO: more than one problemset can point to given leaflet, is that a
# problem?
class Meta:
ordering = ['competition', '-year', 'issue']
verbose_name = 'Leaflet'
verbose_name_plural = 'Leaflets'
class LeafletAdmin(reversion.VersionAdmin):
list_display = ('competition',
'year',
'issue',
)
# Register to the admin site
admin.site.register(Leaflet, LeafletAdmin)
| <commit_before>from django.db import models
from django.contrib import admin
class Leaflet(models.Model):
'''
Represents a given (generated) leaflet.
'''
competition = models.ForeignKey('competitions.Competition')
year = models.IntegerField()
issue = models.IntegerField()
# Fields added via foreign keys:
# problemset_set
# TODO: more than one problemset can point to given leaflet, is that a
# problem?
def __unicode__(self):
return u"Leaflet for " + self.competition.__unicode__()
class Meta:
ordering = ['competition', 'year', 'issue']
verbose_name = 'Leaflet'
verbose_name_plural = 'Leaflets'
# Register to the admin site
admin.site.register(Leaflet)
<commit_msg>Add capability to upload Leaflets<commit_after>import reversion
from django.db import models
from django.contrib import admin
class Leaflet(models.Model):
'''
Represents a given (generated) leaflet.
'''
def generate_name(self, filename):
return "leaflets/{competition}-{year}-{issue}.pdf"\
.format(competition=self.competition,
year=self.year,
issue=self.issue)
def __unicode__(self):
return "{competition}-{year}-{issue}"\
.format(competition=self.competition,
year=self.year,
issue=self.issue)
competition = models.ForeignKey('competitions.Competition')
year = models.IntegerField()
issue = models.IntegerField()
leaflet = models.FileField(upload_to=generate_name)
# Fields added via foreign keys:
# problemset_set
# TODO: more than one problemset can point to given leaflet, is that a
# problem?
class Meta:
ordering = ['competition', '-year', 'issue']
verbose_name = 'Leaflet'
verbose_name_plural = 'Leaflets'
class LeafletAdmin(reversion.VersionAdmin):
list_display = ('competition',
'year',
'issue',
)
# Register to the admin site
admin.site.register(Leaflet, LeafletAdmin)
|
3f394b37174d97b53fdef8ce662e258c6b2aa337 | src/appleseed.python/studio/__init__.py | src/appleseed.python/studio/__init__.py |
#
# This source file is part of appleseed.
# Visit http://appleseedhq.net/ for additional information and resources.
#
# This software is released under the MIT license.
#
# Copyright (c) 2017 Gleb Mishchenko, The appleseedhq Organization
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
import os
# Prevent Qt.py importing PySide2 and PyQt5.
if not os.getenv("QT_PREFERRED_BINDING"):
os.environ["QT_PREFERRED_BINDING"] = "PyQt4"
from _appleseedstudio import *
|
#
# This source file is part of appleseed.
# Visit http://appleseedhq.net/ for additional information and resources.
#
# This software is released under the MIT license.
#
# Copyright (c) 2017 Gleb Mishchenko, The appleseedhq Organization
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
import os
# Prevent Qt.py importing PySide2 and PyQt5.
if not os.getenv("QT_PREFERRED_BINDING"):
os.environ["QT_PREFERRED_BINDING"] = os.pathsep.join(["PySide", "PyQt4"])
from _appleseedstudio import *
| Add PySide to as.studio init preferred binding | Add PySide to as.studio init preferred binding
| Python | mit | luisbarrancos/appleseed,est77/appleseed,appleseedhq/appleseed,pjessesco/appleseed,gospodnetic/appleseed,Vertexwahn/appleseed,appleseedhq/appleseed,dictoon/appleseed,Aakash1312/appleseed,Biart95/appleseed,Vertexwahn/appleseed,dictoon/appleseed,Biart95/appleseed,aytekaman/appleseed,pjessesco/appleseed,aytekaman/appleseed,est77/appleseed,Biart95/appleseed,pjessesco/appleseed,Biart95/appleseed,gospodnetic/appleseed,dictoon/appleseed,pjessesco/appleseed,luisbarrancos/appleseed,Vertexwahn/appleseed,dictoon/appleseed,luisbarrancos/appleseed,appleseedhq/appleseed,Biart95/appleseed,luisbarrancos/appleseed,est77/appleseed,luisbarrancos/appleseed,pjessesco/appleseed,Aakash1312/appleseed,aytekaman/appleseed,gospodnetic/appleseed,Vertexwahn/appleseed,gospodnetic/appleseed,Aakash1312/appleseed,Aakash1312/appleseed,est77/appleseed,appleseedhq/appleseed,est77/appleseed,Vertexwahn/appleseed,aytekaman/appleseed,Aakash1312/appleseed,appleseedhq/appleseed,aytekaman/appleseed,gospodnetic/appleseed,dictoon/appleseed |
#
# This source file is part of appleseed.
# Visit http://appleseedhq.net/ for additional information and resources.
#
# This software is released under the MIT license.
#
# Copyright (c) 2017 Gleb Mishchenko, The appleseedhq Organization
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
import os
# Prevent Qt.py importing PySide2 and PyQt5.
if not os.getenv("QT_PREFERRED_BINDING"):
os.environ["QT_PREFERRED_BINDING"] = "PyQt4"
from _appleseedstudio import *
Add PySide to as.studio init preferred binding |
#
# This source file is part of appleseed.
# Visit http://appleseedhq.net/ for additional information and resources.
#
# This software is released under the MIT license.
#
# Copyright (c) 2017 Gleb Mishchenko, The appleseedhq Organization
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
import os
# Prevent Qt.py importing PySide2 and PyQt5.
if not os.getenv("QT_PREFERRED_BINDING"):
os.environ["QT_PREFERRED_BINDING"] = os.pathsep.join(["PySide", "PyQt4"])
from _appleseedstudio import *
| <commit_before>
#
# This source file is part of appleseed.
# Visit http://appleseedhq.net/ for additional information and resources.
#
# This software is released under the MIT license.
#
# Copyright (c) 2017 Gleb Mishchenko, The appleseedhq Organization
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
import os
# Prevent Qt.py importing PySide2 and PyQt5.
if not os.getenv("QT_PREFERRED_BINDING"):
os.environ["QT_PREFERRED_BINDING"] = "PyQt4"
from _appleseedstudio import *
<commit_msg>Add PySide to as.studio init preferred binding<commit_after> |
#
# This source file is part of appleseed.
# Visit http://appleseedhq.net/ for additional information and resources.
#
# This software is released under the MIT license.
#
# Copyright (c) 2017 Gleb Mishchenko, The appleseedhq Organization
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
import os
# Prevent Qt.py importing PySide2 and PyQt5.
if not os.getenv("QT_PREFERRED_BINDING"):
os.environ["QT_PREFERRED_BINDING"] = os.pathsep.join(["PySide", "PyQt4"])
from _appleseedstudio import *
|
#
# This source file is part of appleseed.
# Visit http://appleseedhq.net/ for additional information and resources.
#
# This software is released under the MIT license.
#
# Copyright (c) 2017 Gleb Mishchenko, The appleseedhq Organization
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
import os
# Prevent Qt.py importing PySide2 and PyQt5.
if not os.getenv("QT_PREFERRED_BINDING"):
os.environ["QT_PREFERRED_BINDING"] = "PyQt4"
from _appleseedstudio import *
Add PySide to as.studio init preferred binding
#
# This source file is part of appleseed.
# Visit http://appleseedhq.net/ for additional information and resources.
#
# This software is released under the MIT license.
#
# Copyright (c) 2017 Gleb Mishchenko, The appleseedhq Organization
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
import os
# Prevent Qt.py importing PySide2 and PyQt5.
if not os.getenv("QT_PREFERRED_BINDING"):
os.environ["QT_PREFERRED_BINDING"] = os.pathsep.join(["PySide", "PyQt4"])
from _appleseedstudio import *
| <commit_before>
#
# This source file is part of appleseed.
# Visit http://appleseedhq.net/ for additional information and resources.
#
# This software is released under the MIT license.
#
# Copyright (c) 2017 Gleb Mishchenko, The appleseedhq Organization
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
import os
# Prevent Qt.py importing PySide2 and PyQt5.
if not os.getenv("QT_PREFERRED_BINDING"):
os.environ["QT_PREFERRED_BINDING"] = "PyQt4"
from _appleseedstudio import *
<commit_msg>Add PySide to as.studio init preferred binding<commit_after>
#
# This source file is part of appleseed.
# Visit http://appleseedhq.net/ for additional information and resources.
#
# This software is released under the MIT license.
#
# Copyright (c) 2017 Gleb Mishchenko, The appleseedhq Organization
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
import os
# Prevent Qt.py importing PySide2 and PyQt5.
if not os.getenv("QT_PREFERRED_BINDING"):
os.environ["QT_PREFERRED_BINDING"] = os.pathsep.join(["PySide", "PyQt4"])
from _appleseedstudio import *
|
4f9e602bfbf145adfc93270d915325b59c710a46 | conman/routes/migrations/0001_initial.py | conman/routes/migrations/0001_initial.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
]
operations = [
migrations.CreateModel(
name='Route',
fields=[
('id', models.AutoField(auto_created=True, verbose_name='ID', primary_key=True, serialize=False)),
('slug', models.SlugField(help_text='\n Used to create the location of the Route. The Root Route needs\n "slug" to be blank; all other Routes need a value unique to the parent.\n It can only contain letters, numbers, underscores, or hyphens.\n ', default='', max_length=255)),
('url', models.TextField(unique=True, editable=False, db_index=True)),
('parent', models.ForeignKey(blank=True, null=True, related_name='children', to='routes.Route')),
('polymorphic_ctype', models.ForeignKey(null=True, related_name='polymorphic_routes.route_set+', editable=False, to='contenttypes.ContentType')),
],
),
migrations.AlterUniqueTogether(
name='route',
unique_together=set([('parent', 'slug')]),
),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
]
operations = [
migrations.CreateModel(
name='Route',
fields=[
('id', models.AutoField(auto_created=True, verbose_name='ID', primary_key=True, serialize=False)),
('slug', models.SlugField(max_length=255, default='', help_text='The url fragment at this point in the Route hierarchy.')),
('url', models.TextField(unique=True, editable=False, db_index=True)),
('parent', models.ForeignKey(blank=True, null=True, related_name='children', to='routes.Route')),
('polymorphic_ctype', models.ForeignKey(null=True, related_name='polymorphic_routes.route_set+', editable=False, to='contenttypes.ContentType')),
],
),
migrations.AlterUniqueTogether(
name='route',
unique_together=set([('parent', 'slug')]),
),
]
| Update new migration to match existing docstring | Update new migration to match existing docstring
| Python | bsd-2-clause | Ian-Foote/django-conman,meshy/django-conman,meshy/django-conman | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
]
operations = [
migrations.CreateModel(
name='Route',
fields=[
('id', models.AutoField(auto_created=True, verbose_name='ID', primary_key=True, serialize=False)),
('slug', models.SlugField(help_text='\n Used to create the location of the Route. The Root Route needs\n "slug" to be blank; all other Routes need a value unique to the parent.\n It can only contain letters, numbers, underscores, or hyphens.\n ', default='', max_length=255)),
('url', models.TextField(unique=True, editable=False, db_index=True)),
('parent', models.ForeignKey(blank=True, null=True, related_name='children', to='routes.Route')),
('polymorphic_ctype', models.ForeignKey(null=True, related_name='polymorphic_routes.route_set+', editable=False, to='contenttypes.ContentType')),
],
),
migrations.AlterUniqueTogether(
name='route',
unique_together=set([('parent', 'slug')]),
),
]
Update new migration to match existing docstring | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
]
operations = [
migrations.CreateModel(
name='Route',
fields=[
('id', models.AutoField(auto_created=True, verbose_name='ID', primary_key=True, serialize=False)),
('slug', models.SlugField(max_length=255, default='', help_text='The url fragment at this point in the Route hierarchy.')),
('url', models.TextField(unique=True, editable=False, db_index=True)),
('parent', models.ForeignKey(blank=True, null=True, related_name='children', to='routes.Route')),
('polymorphic_ctype', models.ForeignKey(null=True, related_name='polymorphic_routes.route_set+', editable=False, to='contenttypes.ContentType')),
],
),
migrations.AlterUniqueTogether(
name='route',
unique_together=set([('parent', 'slug')]),
),
]
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
]
operations = [
migrations.CreateModel(
name='Route',
fields=[
('id', models.AutoField(auto_created=True, verbose_name='ID', primary_key=True, serialize=False)),
('slug', models.SlugField(help_text='\n Used to create the location of the Route. The Root Route needs\n "slug" to be blank; all other Routes need a value unique to the parent.\n It can only contain letters, numbers, underscores, or hyphens.\n ', default='', max_length=255)),
('url', models.TextField(unique=True, editable=False, db_index=True)),
('parent', models.ForeignKey(blank=True, null=True, related_name='children', to='routes.Route')),
('polymorphic_ctype', models.ForeignKey(null=True, related_name='polymorphic_routes.route_set+', editable=False, to='contenttypes.ContentType')),
],
),
migrations.AlterUniqueTogether(
name='route',
unique_together=set([('parent', 'slug')]),
),
]
<commit_msg>Update new migration to match existing docstring<commit_after> | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
]
operations = [
migrations.CreateModel(
name='Route',
fields=[
('id', models.AutoField(auto_created=True, verbose_name='ID', primary_key=True, serialize=False)),
('slug', models.SlugField(max_length=255, default='', help_text='The url fragment at this point in the Route hierarchy.')),
('url', models.TextField(unique=True, editable=False, db_index=True)),
('parent', models.ForeignKey(blank=True, null=True, related_name='children', to='routes.Route')),
('polymorphic_ctype', models.ForeignKey(null=True, related_name='polymorphic_routes.route_set+', editable=False, to='contenttypes.ContentType')),
],
),
migrations.AlterUniqueTogether(
name='route',
unique_together=set([('parent', 'slug')]),
),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
]
operations = [
migrations.CreateModel(
name='Route',
fields=[
('id', models.AutoField(auto_created=True, verbose_name='ID', primary_key=True, serialize=False)),
('slug', models.SlugField(help_text='\n Used to create the location of the Route. The Root Route needs\n "slug" to be blank; all other Routes need a value unique to the parent.\n It can only contain letters, numbers, underscores, or hyphens.\n ', default='', max_length=255)),
('url', models.TextField(unique=True, editable=False, db_index=True)),
('parent', models.ForeignKey(blank=True, null=True, related_name='children', to='routes.Route')),
('polymorphic_ctype', models.ForeignKey(null=True, related_name='polymorphic_routes.route_set+', editable=False, to='contenttypes.ContentType')),
],
),
migrations.AlterUniqueTogether(
name='route',
unique_together=set([('parent', 'slug')]),
),
]
Update new migration to match existing docstring# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
]
operations = [
migrations.CreateModel(
name='Route',
fields=[
('id', models.AutoField(auto_created=True, verbose_name='ID', primary_key=True, serialize=False)),
('slug', models.SlugField(max_length=255, default='', help_text='The url fragment at this point in the Route hierarchy.')),
('url', models.TextField(unique=True, editable=False, db_index=True)),
('parent', models.ForeignKey(blank=True, null=True, related_name='children', to='routes.Route')),
('polymorphic_ctype', models.ForeignKey(null=True, related_name='polymorphic_routes.route_set+', editable=False, to='contenttypes.ContentType')),
],
),
migrations.AlterUniqueTogether(
name='route',
unique_together=set([('parent', 'slug')]),
),
]
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
]
operations = [
migrations.CreateModel(
name='Route',
fields=[
('id', models.AutoField(auto_created=True, verbose_name='ID', primary_key=True, serialize=False)),
('slug', models.SlugField(help_text='\n Used to create the location of the Route. The Root Route needs\n "slug" to be blank; all other Routes need a value unique to the parent.\n It can only contain letters, numbers, underscores, or hyphens.\n ', default='', max_length=255)),
('url', models.TextField(unique=True, editable=False, db_index=True)),
('parent', models.ForeignKey(blank=True, null=True, related_name='children', to='routes.Route')),
('polymorphic_ctype', models.ForeignKey(null=True, related_name='polymorphic_routes.route_set+', editable=False, to='contenttypes.ContentType')),
],
),
migrations.AlterUniqueTogether(
name='route',
unique_together=set([('parent', 'slug')]),
),
]
<commit_msg>Update new migration to match existing docstring<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
]
operations = [
migrations.CreateModel(
name='Route',
fields=[
('id', models.AutoField(auto_created=True, verbose_name='ID', primary_key=True, serialize=False)),
('slug', models.SlugField(max_length=255, default='', help_text='The url fragment at this point in the Route hierarchy.')),
('url', models.TextField(unique=True, editable=False, db_index=True)),
('parent', models.ForeignKey(blank=True, null=True, related_name='children', to='routes.Route')),
('polymorphic_ctype', models.ForeignKey(null=True, related_name='polymorphic_routes.route_set+', editable=False, to='contenttypes.ContentType')),
],
),
migrations.AlterUniqueTogether(
name='route',
unique_together=set([('parent', 'slug')]),
),
]
|
2ac7b22e592557ea8be70311e641b1f42f6c7128 | tests/settings.py | tests/settings.py | import dj_database_url
DATABASES = {
'default': dj_database_url.config(
default='postgres://localhost/test_utils',
),
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
)
MIDDLEWARE_CLASSES = ()
SECRET_KEY = 'test'
| import dj_database_url
DATABASES = {
'default': dj_database_url.config(
default='postgres://localhost/test_utils',
),
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
)
MIDDLEWARE_CLASSES = ()
PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher']
SECRET_KEY = 'test'
| Use md5 password hasher for tests | Use md5 password hasher for tests
| Python | bsd-2-clause | incuna/incuna-test-utils,incuna/incuna-test-utils | import dj_database_url
DATABASES = {
'default': dj_database_url.config(
default='postgres://localhost/test_utils',
),
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
)
MIDDLEWARE_CLASSES = ()
SECRET_KEY = 'test'
Use md5 password hasher for tests | import dj_database_url
DATABASES = {
'default': dj_database_url.config(
default='postgres://localhost/test_utils',
),
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
)
MIDDLEWARE_CLASSES = ()
PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher']
SECRET_KEY = 'test'
| <commit_before>import dj_database_url
DATABASES = {
'default': dj_database_url.config(
default='postgres://localhost/test_utils',
),
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
)
MIDDLEWARE_CLASSES = ()
SECRET_KEY = 'test'
<commit_msg>Use md5 password hasher for tests<commit_after> | import dj_database_url
DATABASES = {
'default': dj_database_url.config(
default='postgres://localhost/test_utils',
),
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
)
MIDDLEWARE_CLASSES = ()
PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher']
SECRET_KEY = 'test'
| import dj_database_url
DATABASES = {
'default': dj_database_url.config(
default='postgres://localhost/test_utils',
),
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
)
MIDDLEWARE_CLASSES = ()
SECRET_KEY = 'test'
Use md5 password hasher for testsimport dj_database_url
DATABASES = {
'default': dj_database_url.config(
default='postgres://localhost/test_utils',
),
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
)
MIDDLEWARE_CLASSES = ()
PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher']
SECRET_KEY = 'test'
| <commit_before>import dj_database_url
DATABASES = {
'default': dj_database_url.config(
default='postgres://localhost/test_utils',
),
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
)
MIDDLEWARE_CLASSES = ()
SECRET_KEY = 'test'
<commit_msg>Use md5 password hasher for tests<commit_after>import dj_database_url
DATABASES = {
'default': dj_database_url.config(
default='postgres://localhost/test_utils',
),
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
)
MIDDLEWARE_CLASSES = ()
PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher']
SECRET_KEY = 'test'
|
d9d2c7d341894e28a5ad73469ec0d9d23d78429e | vispy/visuals/graphs/layouts/__init__.py | vispy/visuals/graphs/layouts/__init__.py | from .random import random # noqa
from .circular import circular # noqa
| import inspect
from .random import random
from .circular import circular
from .force_directed import fruchterman_reingold
_layout_map = {
'random': random,
'circular': circular,
'force_directed': fruchterman_reingold
}
def get(name, *args, **kwargs):
if name not in _layout_map:
raise KeyError("Graph layout '{}' not found.".format(name))
layout = _layout_map[name]
if inspect.isclass(layout):
layout = layout(*args, **kwargs)
return layout
| Add new way of retreiving graph layouts | Add new way of retreiving graph layouts
| Python | bsd-3-clause | ghisvail/vispy,michaelaye/vispy,Eric89GXL/vispy,michaelaye/vispy,Eric89GXL/vispy,Eric89GXL/vispy,drufat/vispy,drufat/vispy,ghisvail/vispy,michaelaye/vispy,drufat/vispy,ghisvail/vispy | from .random import random # noqa
from .circular import circular # noqa
Add new way of retreiving graph layouts | import inspect
from .random import random
from .circular import circular
from .force_directed import fruchterman_reingold
_layout_map = {
'random': random,
'circular': circular,
'force_directed': fruchterman_reingold
}
def get(name, *args, **kwargs):
if name not in _layout_map:
raise KeyError("Graph layout '{}' not found.".format(name))
layout = _layout_map[name]
if inspect.isclass(layout):
layout = layout(*args, **kwargs)
return layout
| <commit_before>from .random import random # noqa
from .circular import circular # noqa
<commit_msg>Add new way of retreiving graph layouts<commit_after> | import inspect
from .random import random
from .circular import circular
from .force_directed import fruchterman_reingold
_layout_map = {
'random': random,
'circular': circular,
'force_directed': fruchterman_reingold
}
def get(name, *args, **kwargs):
if name not in _layout_map:
raise KeyError("Graph layout '{}' not found.".format(name))
layout = _layout_map[name]
if inspect.isclass(layout):
layout = layout(*args, **kwargs)
return layout
| from .random import random # noqa
from .circular import circular # noqa
Add new way of retreiving graph layoutsimport inspect
from .random import random
from .circular import circular
from .force_directed import fruchterman_reingold
_layout_map = {
'random': random,
'circular': circular,
'force_directed': fruchterman_reingold
}
def get(name, *args, **kwargs):
if name not in _layout_map:
raise KeyError("Graph layout '{}' not found.".format(name))
layout = _layout_map[name]
if inspect.isclass(layout):
layout = layout(*args, **kwargs)
return layout
| <commit_before>from .random import random # noqa
from .circular import circular # noqa
<commit_msg>Add new way of retreiving graph layouts<commit_after>import inspect
from .random import random
from .circular import circular
from .force_directed import fruchterman_reingold
_layout_map = {
'random': random,
'circular': circular,
'force_directed': fruchterman_reingold
}
def get(name, *args, **kwargs):
if name not in _layout_map:
raise KeyError("Graph layout '{}' not found.".format(name))
layout = _layout_map[name]
if inspect.isclass(layout):
layout = layout(*args, **kwargs)
return layout
|
739ae88d817cb86723b126360aaf3dd6df3045c0 | tests/test_log.py | tests/test_log.py | import json
import logging
from unittest.mock import Mock, patch
from jsonrpcclient.log import _trim_string, _trim_values
def test_trim_string():
message = _trim_string("foo" * 100)
assert "..." in message
def test_trim_values():
message = _trim_values({"list": [0] * 100})
assert "..." in message["list"]
def test_trim_values_nested():
message = _trim_values({"obj": {"obj2": {"string2": "foo" * 100}}})
assert "..." in message["obj"]["obj2"]["string2"]
def test_trim_values_batch():
message = _trim_values([{"list": [0] * 100}])
assert "..." in message[0]["list"]
def test_trim_message():
message = _trim_values([{"list": [0] * 100}])
assert "..." in message[0]["list"]
| import json
import logging
from unittest.mock import Mock, patch
from jsonrpcclient.log import _trim_string, _trim_values, _trim_message
def test_trim_string():
message = _trim_string("foo" * 100)
assert "..." in message
def test_trim_values():
message = _trim_values({"list": [0] * 100})
assert "..." in message["list"]
def test_trim_values_nested():
message = _trim_values({"obj": {"obj2": {"string2": "foo" * 100}}})
assert "..." in message["obj"]["obj2"]["string2"]
def test_trim_values_batch():
message = _trim_values([{"list": [0] * 100}])
assert "..." in message[0]["list"]
def test_trim_message():
message = _trim_message("foo" * 100)
assert "..." in message
| Add coverage to some of log.py | Add coverage to some of log.py
| Python | mit | bcb/jsonrpcclient | import json
import logging
from unittest.mock import Mock, patch
from jsonrpcclient.log import _trim_string, _trim_values
def test_trim_string():
message = _trim_string("foo" * 100)
assert "..." in message
def test_trim_values():
message = _trim_values({"list": [0] * 100})
assert "..." in message["list"]
def test_trim_values_nested():
message = _trim_values({"obj": {"obj2": {"string2": "foo" * 100}}})
assert "..." in message["obj"]["obj2"]["string2"]
def test_trim_values_batch():
message = _trim_values([{"list": [0] * 100}])
assert "..." in message[0]["list"]
def test_trim_message():
message = _trim_values([{"list": [0] * 100}])
assert "..." in message[0]["list"]
Add coverage to some of log.py | import json
import logging
from unittest.mock import Mock, patch
from jsonrpcclient.log import _trim_string, _trim_values, _trim_message
def test_trim_string():
message = _trim_string("foo" * 100)
assert "..." in message
def test_trim_values():
message = _trim_values({"list": [0] * 100})
assert "..." in message["list"]
def test_trim_values_nested():
message = _trim_values({"obj": {"obj2": {"string2": "foo" * 100}}})
assert "..." in message["obj"]["obj2"]["string2"]
def test_trim_values_batch():
message = _trim_values([{"list": [0] * 100}])
assert "..." in message[0]["list"]
def test_trim_message():
message = _trim_message("foo" * 100)
assert "..." in message
| <commit_before>import json
import logging
from unittest.mock import Mock, patch
from jsonrpcclient.log import _trim_string, _trim_values
def test_trim_string():
message = _trim_string("foo" * 100)
assert "..." in message
def test_trim_values():
message = _trim_values({"list": [0] * 100})
assert "..." in message["list"]
def test_trim_values_nested():
message = _trim_values({"obj": {"obj2": {"string2": "foo" * 100}}})
assert "..." in message["obj"]["obj2"]["string2"]
def test_trim_values_batch():
message = _trim_values([{"list": [0] * 100}])
assert "..." in message[0]["list"]
def test_trim_message():
message = _trim_values([{"list": [0] * 100}])
assert "..." in message[0]["list"]
<commit_msg>Add coverage to some of log.py<commit_after> | import json
import logging
from unittest.mock import Mock, patch
from jsonrpcclient.log import _trim_string, _trim_values, _trim_message
def test_trim_string():
message = _trim_string("foo" * 100)
assert "..." in message
def test_trim_values():
message = _trim_values({"list": [0] * 100})
assert "..." in message["list"]
def test_trim_values_nested():
message = _trim_values({"obj": {"obj2": {"string2": "foo" * 100}}})
assert "..." in message["obj"]["obj2"]["string2"]
def test_trim_values_batch():
message = _trim_values([{"list": [0] * 100}])
assert "..." in message[0]["list"]
def test_trim_message():
message = _trim_message("foo" * 100)
assert "..." in message
| import json
import logging
from unittest.mock import Mock, patch
from jsonrpcclient.log import _trim_string, _trim_values
def test_trim_string():
message = _trim_string("foo" * 100)
assert "..." in message
def test_trim_values():
message = _trim_values({"list": [0] * 100})
assert "..." in message["list"]
def test_trim_values_nested():
message = _trim_values({"obj": {"obj2": {"string2": "foo" * 100}}})
assert "..." in message["obj"]["obj2"]["string2"]
def test_trim_values_batch():
message = _trim_values([{"list": [0] * 100}])
assert "..." in message[0]["list"]
def test_trim_message():
message = _trim_values([{"list": [0] * 100}])
assert "..." in message[0]["list"]
Add coverage to some of log.pyimport json
import logging
from unittest.mock import Mock, patch
from jsonrpcclient.log import _trim_string, _trim_values, _trim_message
def test_trim_string():
message = _trim_string("foo" * 100)
assert "..." in message
def test_trim_values():
message = _trim_values({"list": [0] * 100})
assert "..." in message["list"]
def test_trim_values_nested():
message = _trim_values({"obj": {"obj2": {"string2": "foo" * 100}}})
assert "..." in message["obj"]["obj2"]["string2"]
def test_trim_values_batch():
message = _trim_values([{"list": [0] * 100}])
assert "..." in message[0]["list"]
def test_trim_message():
message = _trim_message("foo" * 100)
assert "..." in message
| <commit_before>import json
import logging
from unittest.mock import Mock, patch
from jsonrpcclient.log import _trim_string, _trim_values
def test_trim_string():
message = _trim_string("foo" * 100)
assert "..." in message
def test_trim_values():
message = _trim_values({"list": [0] * 100})
assert "..." in message["list"]
def test_trim_values_nested():
message = _trim_values({"obj": {"obj2": {"string2": "foo" * 100}}})
assert "..." in message["obj"]["obj2"]["string2"]
def test_trim_values_batch():
message = _trim_values([{"list": [0] * 100}])
assert "..." in message[0]["list"]
def test_trim_message():
message = _trim_values([{"list": [0] * 100}])
assert "..." in message[0]["list"]
<commit_msg>Add coverage to some of log.py<commit_after>import json
import logging
from unittest.mock import Mock, patch
from jsonrpcclient.log import _trim_string, _trim_values, _trim_message
def test_trim_string():
message = _trim_string("foo" * 100)
assert "..." in message
def test_trim_values():
message = _trim_values({"list": [0] * 100})
assert "..." in message["list"]
def test_trim_values_nested():
message = _trim_values({"obj": {"obj2": {"string2": "foo" * 100}}})
assert "..." in message["obj"]["obj2"]["string2"]
def test_trim_values_batch():
message = _trim_values([{"list": [0] * 100}])
assert "..." in message[0]["list"]
def test_trim_message():
message = _trim_message("foo" * 100)
assert "..." in message
|
1927c503fda892490fb7262ba480e429a0f416fb | intermol/orderedset.py | intermol/orderedset.py | import collections
from copy import deepcopy
class OrderedSet(collections.Set):
def __init__(self, iterable=()):
self.d = collections.OrderedDict.fromkeys(iterable)
def add(self, key):
self.d[key] = None
def discard(self, key):
del self.d[key]
def difference_update(self, *args, **kwargs):
intersection = set(self.d.keys()).intersection(args[0])
self.intersection_update(intersection)
def intersection_update(self, *args, **kwargs):
for part in args[0]:
del self.d[part]
def __len__(self):
return len(self.d)
def __contains__(self, element):
return element in self.d
def __iter__(self):
return self.d.__iter__()
def __le__(self, other):
if not isinstance(other, collections.Set):
return NotImplemented
if len(self) > len(other):
return False
for e1, e2 in zip(self, other):
if e1 != e2:
return False
return True
def __repr__(self):
class_name = self.__class__.__name__
if not self:
return '{0!s}()'.format(class_name)
return '{0!s}({1!r})'.format(class_name, list(self))
def __deepcopy__(self, memo):
result = OrderedSet()
for elt in self:
result.add(deepcopy(elt,memo))
return result
| from collections.abc import Set
from collections import OrderedDict
from copy import deepcopy
class OrderedSet(Set):
def __init__(self, iterable=()):
self.d = OrderedDict.fromkeys(iterable)
def add(self, key):
self.d[key] = None
def discard(self, key):
del self.d[key]
def difference_update(self, *args, **kwargs):
intersection = set(self.d.keys()).intersection(args[0])
self.intersection_update(intersection)
def intersection_update(self, *args, **kwargs):
for part in args[0]:
del self.d[part]
def __len__(self):
return len(self.d)
def __contains__(self, element):
return element in self.d
def __iter__(self):
return self.d.__iter__()
def __le__(self, other):
if not isinstance(other, Set):
return NotImplemented
if len(self) > len(other):
return False
for e1, e2 in zip(self, other):
if e1 != e2:
return False
return True
def __repr__(self):
class_name = self.__class__.__name__
if not self:
return '{0!s}()'.format(class_name)
return '{0!s}({1!r})'.format(class_name, list(self))
def __deepcopy__(self, memo):
result = OrderedSet()
for elt in self:
result.add(deepcopy(elt,memo))
return result
| Update collections imports for deprecations | Update collections imports for deprecations
| Python | mit | shirtsgroup/InterMol,shirtsgroup/InterMol | import collections
from copy import deepcopy
class OrderedSet(collections.Set):
def __init__(self, iterable=()):
self.d = collections.OrderedDict.fromkeys(iterable)
def add(self, key):
self.d[key] = None
def discard(self, key):
del self.d[key]
def difference_update(self, *args, **kwargs):
intersection = set(self.d.keys()).intersection(args[0])
self.intersection_update(intersection)
def intersection_update(self, *args, **kwargs):
for part in args[0]:
del self.d[part]
def __len__(self):
return len(self.d)
def __contains__(self, element):
return element in self.d
def __iter__(self):
return self.d.__iter__()
def __le__(self, other):
if not isinstance(other, collections.Set):
return NotImplemented
if len(self) > len(other):
return False
for e1, e2 in zip(self, other):
if e1 != e2:
return False
return True
def __repr__(self):
class_name = self.__class__.__name__
if not self:
return '{0!s}()'.format(class_name)
return '{0!s}({1!r})'.format(class_name, list(self))
def __deepcopy__(self, memo):
result = OrderedSet()
for elt in self:
result.add(deepcopy(elt,memo))
return result
Update collections imports for deprecations | from collections.abc import Set
from collections import OrderedDict
from copy import deepcopy
class OrderedSet(Set):
def __init__(self, iterable=()):
self.d = OrderedDict.fromkeys(iterable)
def add(self, key):
self.d[key] = None
def discard(self, key):
del self.d[key]
def difference_update(self, *args, **kwargs):
intersection = set(self.d.keys()).intersection(args[0])
self.intersection_update(intersection)
def intersection_update(self, *args, **kwargs):
for part in args[0]:
del self.d[part]
def __len__(self):
return len(self.d)
def __contains__(self, element):
return element in self.d
def __iter__(self):
return self.d.__iter__()
def __le__(self, other):
if not isinstance(other, Set):
return NotImplemented
if len(self) > len(other):
return False
for e1, e2 in zip(self, other):
if e1 != e2:
return False
return True
def __repr__(self):
class_name = self.__class__.__name__
if not self:
return '{0!s}()'.format(class_name)
return '{0!s}({1!r})'.format(class_name, list(self))
def __deepcopy__(self, memo):
result = OrderedSet()
for elt in self:
result.add(deepcopy(elt,memo))
return result
| <commit_before>import collections
from copy import deepcopy
class OrderedSet(collections.Set):
def __init__(self, iterable=()):
self.d = collections.OrderedDict.fromkeys(iterable)
def add(self, key):
self.d[key] = None
def discard(self, key):
del self.d[key]
def difference_update(self, *args, **kwargs):
intersection = set(self.d.keys()).intersection(args[0])
self.intersection_update(intersection)
def intersection_update(self, *args, **kwargs):
for part in args[0]:
del self.d[part]
def __len__(self):
return len(self.d)
def __contains__(self, element):
return element in self.d
def __iter__(self):
return self.d.__iter__()
def __le__(self, other):
if not isinstance(other, collections.Set):
return NotImplemented
if len(self) > len(other):
return False
for e1, e2 in zip(self, other):
if e1 != e2:
return False
return True
def __repr__(self):
class_name = self.__class__.__name__
if not self:
return '{0!s}()'.format(class_name)
return '{0!s}({1!r})'.format(class_name, list(self))
def __deepcopy__(self, memo):
result = OrderedSet()
for elt in self:
result.add(deepcopy(elt,memo))
return result
<commit_msg>Update collections imports for deprecations<commit_after> | from collections.abc import Set
from collections import OrderedDict
from copy import deepcopy
class OrderedSet(Set):
def __init__(self, iterable=()):
self.d = OrderedDict.fromkeys(iterable)
def add(self, key):
self.d[key] = None
def discard(self, key):
del self.d[key]
def difference_update(self, *args, **kwargs):
intersection = set(self.d.keys()).intersection(args[0])
self.intersection_update(intersection)
def intersection_update(self, *args, **kwargs):
for part in args[0]:
del self.d[part]
def __len__(self):
return len(self.d)
def __contains__(self, element):
return element in self.d
def __iter__(self):
return self.d.__iter__()
def __le__(self, other):
if not isinstance(other, Set):
return NotImplemented
if len(self) > len(other):
return False
for e1, e2 in zip(self, other):
if e1 != e2:
return False
return True
def __repr__(self):
class_name = self.__class__.__name__
if not self:
return '{0!s}()'.format(class_name)
return '{0!s}({1!r})'.format(class_name, list(self))
def __deepcopy__(self, memo):
result = OrderedSet()
for elt in self:
result.add(deepcopy(elt,memo))
return result
| import collections
from copy import deepcopy
class OrderedSet(collections.Set):
def __init__(self, iterable=()):
self.d = collections.OrderedDict.fromkeys(iterable)
def add(self, key):
self.d[key] = None
def discard(self, key):
del self.d[key]
def difference_update(self, *args, **kwargs):
intersection = set(self.d.keys()).intersection(args[0])
self.intersection_update(intersection)
def intersection_update(self, *args, **kwargs):
for part in args[0]:
del self.d[part]
def __len__(self):
return len(self.d)
def __contains__(self, element):
return element in self.d
def __iter__(self):
return self.d.__iter__()
def __le__(self, other):
if not isinstance(other, collections.Set):
return NotImplemented
if len(self) > len(other):
return False
for e1, e2 in zip(self, other):
if e1 != e2:
return False
return True
def __repr__(self):
class_name = self.__class__.__name__
if not self:
return '{0!s}()'.format(class_name)
return '{0!s}({1!r})'.format(class_name, list(self))
def __deepcopy__(self, memo):
result = OrderedSet()
for elt in self:
result.add(deepcopy(elt,memo))
return result
Update collections imports for deprecationsfrom collections.abc import Set
from collections import OrderedDict
from copy import deepcopy
class OrderedSet(Set):
def __init__(self, iterable=()):
self.d = OrderedDict.fromkeys(iterable)
def add(self, key):
self.d[key] = None
def discard(self, key):
del self.d[key]
def difference_update(self, *args, **kwargs):
intersection = set(self.d.keys()).intersection(args[0])
self.intersection_update(intersection)
def intersection_update(self, *args, **kwargs):
for part in args[0]:
del self.d[part]
def __len__(self):
return len(self.d)
def __contains__(self, element):
return element in self.d
def __iter__(self):
return self.d.__iter__()
def __le__(self, other):
if not isinstance(other, Set):
return NotImplemented
if len(self) > len(other):
return False
for e1, e2 in zip(self, other):
if e1 != e2:
return False
return True
def __repr__(self):
class_name = self.__class__.__name__
if not self:
return '{0!s}()'.format(class_name)
return '{0!s}({1!r})'.format(class_name, list(self))
def __deepcopy__(self, memo):
result = OrderedSet()
for elt in self:
result.add(deepcopy(elt,memo))
return result
| <commit_before>import collections
from copy import deepcopy
class OrderedSet(collections.Set):
def __init__(self, iterable=()):
self.d = collections.OrderedDict.fromkeys(iterable)
def add(self, key):
self.d[key] = None
def discard(self, key):
del self.d[key]
def difference_update(self, *args, **kwargs):
intersection = set(self.d.keys()).intersection(args[0])
self.intersection_update(intersection)
def intersection_update(self, *args, **kwargs):
for part in args[0]:
del self.d[part]
def __len__(self):
return len(self.d)
def __contains__(self, element):
return element in self.d
def __iter__(self):
return self.d.__iter__()
def __le__(self, other):
if not isinstance(other, collections.Set):
return NotImplemented
if len(self) > len(other):
return False
for e1, e2 in zip(self, other):
if e1 != e2:
return False
return True
def __repr__(self):
class_name = self.__class__.__name__
if not self:
return '{0!s}()'.format(class_name)
return '{0!s}({1!r})'.format(class_name, list(self))
def __deepcopy__(self, memo):
result = OrderedSet()
for elt in self:
result.add(deepcopy(elt,memo))
return result
<commit_msg>Update collections imports for deprecations<commit_after>from collections.abc import Set
from collections import OrderedDict
from copy import deepcopy
class OrderedSet(Set):
def __init__(self, iterable=()):
self.d = OrderedDict.fromkeys(iterable)
def add(self, key):
self.d[key] = None
def discard(self, key):
del self.d[key]
def difference_update(self, *args, **kwargs):
intersection = set(self.d.keys()).intersection(args[0])
self.intersection_update(intersection)
def intersection_update(self, *args, **kwargs):
for part in args[0]:
del self.d[part]
def __len__(self):
return len(self.d)
def __contains__(self, element):
return element in self.d
def __iter__(self):
return self.d.__iter__()
def __le__(self, other):
if not isinstance(other, Set):
return NotImplemented
if len(self) > len(other):
return False
for e1, e2 in zip(self, other):
if e1 != e2:
return False
return True
def __repr__(self):
class_name = self.__class__.__name__
if not self:
return '{0!s}()'.format(class_name)
return '{0!s}({1!r})'.format(class_name, list(self))
def __deepcopy__(self, memo):
result = OrderedSet()
for elt in self:
result.add(deepcopy(elt,memo))
return result
|
3bd409a0c7f252811c7e8488493270d225e8616a | src/main/python/piglatin.py | src/main/python/piglatin.py | import sys
def parseCommandLine(argv):
return argv[1] if len(argv) > 1 else ""
if __name__ == "__main__":
latin = parseCommandLine(sys.argv)
print(latin)
print("igpay atinlay")
| import sys
def parseCommandLine(argv):
print 'Inside parser'
return argv[1] if len(argv) > 1 else ""
if __name__ == "__main__":
latin = parseCommandLine(sys.argv)
print(latin)
print("igpay atinlay")
| Test case: failing print for python3 | Test case: failing print for python3
| Python | mit | oneyoke/sw_asgmt_2 | import sys
def parseCommandLine(argv):
return argv[1] if len(argv) > 1 else ""
if __name__ == "__main__":
latin = parseCommandLine(sys.argv)
print(latin)
print("igpay atinlay")
Test case: failing print for python3 | import sys
def parseCommandLine(argv):
print 'Inside parser'
return argv[1] if len(argv) > 1 else ""
if __name__ == "__main__":
latin = parseCommandLine(sys.argv)
print(latin)
print("igpay atinlay")
| <commit_before>import sys
def parseCommandLine(argv):
return argv[1] if len(argv) > 1 else ""
if __name__ == "__main__":
latin = parseCommandLine(sys.argv)
print(latin)
print("igpay atinlay")
<commit_msg>Test case: failing print for python3<commit_after> | import sys
def parseCommandLine(argv):
print 'Inside parser'
return argv[1] if len(argv) > 1 else ""
if __name__ == "__main__":
latin = parseCommandLine(sys.argv)
print(latin)
print("igpay atinlay")
| import sys
def parseCommandLine(argv):
return argv[1] if len(argv) > 1 else ""
if __name__ == "__main__":
latin = parseCommandLine(sys.argv)
print(latin)
print("igpay atinlay")
Test case: failing print for python3import sys
def parseCommandLine(argv):
print 'Inside parser'
return argv[1] if len(argv) > 1 else ""
if __name__ == "__main__":
latin = parseCommandLine(sys.argv)
print(latin)
print("igpay atinlay")
| <commit_before>import sys
def parseCommandLine(argv):
return argv[1] if len(argv) > 1 else ""
if __name__ == "__main__":
latin = parseCommandLine(sys.argv)
print(latin)
print("igpay atinlay")
<commit_msg>Test case: failing print for python3<commit_after>import sys
def parseCommandLine(argv):
print 'Inside parser'
return argv[1] if len(argv) > 1 else ""
if __name__ == "__main__":
latin = parseCommandLine(sys.argv)
print(latin)
print("igpay atinlay")
|
5185222f82b22868a28cb93a6a11632a9e1f6456 | libdotify/utils.py | libdotify/utils.py | import os
import shlex
import logging
import subprocess
import contextlib
@contextlib.contextmanager
def cd(newdir):
prevdir = os.getcwd()
os.chdir(os.path.expanduser(newdir))
try:
yield
finally:
os.chdir(prevdir)
def execute(cmd):
args = shlex.split(cmd)
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out = p.communicate()
for line in out:
if not line: continue
logging.info(line.rstrip())
if p.returncode != 0:
logging.error("{} failed with {}".format(cmd, p.returncode))
def git_update(dotmodule, dst, plugins):
for plugin in plugins:
if "\.git" in plugin:
name = os.path.basename(plugin).rpartition(".git")[0] # replace trailing ".git"
else:
name = os.path.basename(plugin)
path = "{}{}".format(dst, name)
if os.path.exists(path):
logging.debug("updating {} plugin {}".format(dotmodule, name))
with cd(path):
execute("git pull")
else:
logging.debug("cloning {} plugin {}".format(dotmodule, name))
os.makedirs(dst)
with cd(dst):
execute("git clone --recursive {}".format(plugin))
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| import os
import shlex
import logging
import subprocess
import contextlib
@contextlib.contextmanager
def cd(newdir):
prevdir = os.getcwd()
os.chdir(os.path.expanduser(newdir))
try:
yield
finally:
os.chdir(prevdir)
def execute(cmd):
args = shlex.split(cmd)
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out = p.communicate()
for line in out:
if not line: continue
logging.info(line.rstrip())
if p.returncode != 0:
logging.error("{} failed with {}".format(cmd, p.returncode))
def git_update(dotmodule, dst, plugins):
for plugin in plugins:
if ".git" in plugin:
name = os.path.basename(plugin).rpartition(".git")[0] # replace trailing ".git"
else:
name = os.path.basename(plugin)
path = "{}{}".format(dst, name)
print("path: {} ({})".format(path, name))
if os.path.exists(path):
logging.debug("updating {} plugin {}".format(dotmodule, name))
with cd(path):
execute("git pull")
else:
logging.debug("cloning {} plugin {}".format(dotmodule, name))
try:
os.makedirs(os.path.split(dst)[0])
except FileExistsError:
pass
with cd(dst):
execute("git clone --recursive {}".format(plugin))
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| Fix path creation/git detection bugs | [libdotify] Fix path creation/git detection bugs
| Python | mit | tobi-wan-kenobi/dotfiles | import os
import shlex
import logging
import subprocess
import contextlib
@contextlib.contextmanager
def cd(newdir):
prevdir = os.getcwd()
os.chdir(os.path.expanduser(newdir))
try:
yield
finally:
os.chdir(prevdir)
def execute(cmd):
args = shlex.split(cmd)
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out = p.communicate()
for line in out:
if not line: continue
logging.info(line.rstrip())
if p.returncode != 0:
logging.error("{} failed with {}".format(cmd, p.returncode))
def git_update(dotmodule, dst, plugins):
for plugin in plugins:
if "\.git" in plugin:
name = os.path.basename(plugin).rpartition(".git")[0] # replace trailing ".git"
else:
name = os.path.basename(plugin)
path = "{}{}".format(dst, name)
if os.path.exists(path):
logging.debug("updating {} plugin {}".format(dotmodule, name))
with cd(path):
execute("git pull")
else:
logging.debug("cloning {} plugin {}".format(dotmodule, name))
os.makedirs(dst)
with cd(dst):
execute("git clone --recursive {}".format(plugin))
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
[libdotify] Fix path creation/git detection bugs | import os
import shlex
import logging
import subprocess
import contextlib
@contextlib.contextmanager
def cd(newdir):
prevdir = os.getcwd()
os.chdir(os.path.expanduser(newdir))
try:
yield
finally:
os.chdir(prevdir)
def execute(cmd):
args = shlex.split(cmd)
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out = p.communicate()
for line in out:
if not line: continue
logging.info(line.rstrip())
if p.returncode != 0:
logging.error("{} failed with {}".format(cmd, p.returncode))
def git_update(dotmodule, dst, plugins):
for plugin in plugins:
if ".git" in plugin:
name = os.path.basename(plugin).rpartition(".git")[0] # replace trailing ".git"
else:
name = os.path.basename(plugin)
path = "{}{}".format(dst, name)
print("path: {} ({})".format(path, name))
if os.path.exists(path):
logging.debug("updating {} plugin {}".format(dotmodule, name))
with cd(path):
execute("git pull")
else:
logging.debug("cloning {} plugin {}".format(dotmodule, name))
try:
os.makedirs(os.path.split(dst)[0])
except FileExistsError:
pass
with cd(dst):
execute("git clone --recursive {}".format(plugin))
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| <commit_before>import os
import shlex
import logging
import subprocess
import contextlib
@contextlib.contextmanager
def cd(newdir):
prevdir = os.getcwd()
os.chdir(os.path.expanduser(newdir))
try:
yield
finally:
os.chdir(prevdir)
def execute(cmd):
args = shlex.split(cmd)
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out = p.communicate()
for line in out:
if not line: continue
logging.info(line.rstrip())
if p.returncode != 0:
logging.error("{} failed with {}".format(cmd, p.returncode))
def git_update(dotmodule, dst, plugins):
for plugin in plugins:
if "\.git" in plugin:
name = os.path.basename(plugin).rpartition(".git")[0] # replace trailing ".git"
else:
name = os.path.basename(plugin)
path = "{}{}".format(dst, name)
if os.path.exists(path):
logging.debug("updating {} plugin {}".format(dotmodule, name))
with cd(path):
execute("git pull")
else:
logging.debug("cloning {} plugin {}".format(dotmodule, name))
os.makedirs(dst)
with cd(dst):
execute("git clone --recursive {}".format(plugin))
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
<commit_msg>[libdotify] Fix path creation/git detection bugs<commit_after> | import os
import shlex
import logging
import subprocess
import contextlib
@contextlib.contextmanager
def cd(newdir):
prevdir = os.getcwd()
os.chdir(os.path.expanduser(newdir))
try:
yield
finally:
os.chdir(prevdir)
def execute(cmd):
args = shlex.split(cmd)
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out = p.communicate()
for line in out:
if not line: continue
logging.info(line.rstrip())
if p.returncode != 0:
logging.error("{} failed with {}".format(cmd, p.returncode))
def git_update(dotmodule, dst, plugins):
for plugin in plugins:
if ".git" in plugin:
name = os.path.basename(plugin).rpartition(".git")[0] # replace trailing ".git"
else:
name = os.path.basename(plugin)
path = "{}{}".format(dst, name)
print("path: {} ({})".format(path, name))
if os.path.exists(path):
logging.debug("updating {} plugin {}".format(dotmodule, name))
with cd(path):
execute("git pull")
else:
logging.debug("cloning {} plugin {}".format(dotmodule, name))
try:
os.makedirs(os.path.split(dst)[0])
except FileExistsError:
pass
with cd(dst):
execute("git clone --recursive {}".format(plugin))
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| import os
import shlex
import logging
import subprocess
import contextlib
@contextlib.contextmanager
def cd(newdir):
prevdir = os.getcwd()
os.chdir(os.path.expanduser(newdir))
try:
yield
finally:
os.chdir(prevdir)
def execute(cmd):
args = shlex.split(cmd)
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out = p.communicate()
for line in out:
if not line: continue
logging.info(line.rstrip())
if p.returncode != 0:
logging.error("{} failed with {}".format(cmd, p.returncode))
def git_update(dotmodule, dst, plugins):
for plugin in plugins:
if "\.git" in plugin:
name = os.path.basename(plugin).rpartition(".git")[0] # replace trailing ".git"
else:
name = os.path.basename(plugin)
path = "{}{}".format(dst, name)
if os.path.exists(path):
logging.debug("updating {} plugin {}".format(dotmodule, name))
with cd(path):
execute("git pull")
else:
logging.debug("cloning {} plugin {}".format(dotmodule, name))
os.makedirs(dst)
with cd(dst):
execute("git clone --recursive {}".format(plugin))
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
[libdotify] Fix path creation/git detection bugsimport os
import shlex
import logging
import subprocess
import contextlib
@contextlib.contextmanager
def cd(newdir):
prevdir = os.getcwd()
os.chdir(os.path.expanduser(newdir))
try:
yield
finally:
os.chdir(prevdir)
def execute(cmd):
args = shlex.split(cmd)
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out = p.communicate()
for line in out:
if not line: continue
logging.info(line.rstrip())
if p.returncode != 0:
logging.error("{} failed with {}".format(cmd, p.returncode))
def git_update(dotmodule, dst, plugins):
for plugin in plugins:
if ".git" in plugin:
name = os.path.basename(plugin).rpartition(".git")[0] # replace trailing ".git"
else:
name = os.path.basename(plugin)
path = "{}{}".format(dst, name)
print("path: {} ({})".format(path, name))
if os.path.exists(path):
logging.debug("updating {} plugin {}".format(dotmodule, name))
with cd(path):
execute("git pull")
else:
logging.debug("cloning {} plugin {}".format(dotmodule, name))
try:
os.makedirs(os.path.split(dst)[0])
except FileExistsError:
pass
with cd(dst):
execute("git clone --recursive {}".format(plugin))
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| <commit_before>import os
import shlex
import logging
import subprocess
import contextlib
@contextlib.contextmanager
def cd(newdir):
prevdir = os.getcwd()
os.chdir(os.path.expanduser(newdir))
try:
yield
finally:
os.chdir(prevdir)
def execute(cmd):
args = shlex.split(cmd)
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out = p.communicate()
for line in out:
if not line: continue
logging.info(line.rstrip())
if p.returncode != 0:
logging.error("{} failed with {}".format(cmd, p.returncode))
def git_update(dotmodule, dst, plugins):
for plugin in plugins:
if "\.git" in plugin:
name = os.path.basename(plugin).rpartition(".git")[0] # replace trailing ".git"
else:
name = os.path.basename(plugin)
path = "{}{}".format(dst, name)
if os.path.exists(path):
logging.debug("updating {} plugin {}".format(dotmodule, name))
with cd(path):
execute("git pull")
else:
logging.debug("cloning {} plugin {}".format(dotmodule, name))
os.makedirs(dst)
with cd(dst):
execute("git clone --recursive {}".format(plugin))
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
<commit_msg>[libdotify] Fix path creation/git detection bugs<commit_after>import os
import shlex
import logging
import subprocess
import contextlib
@contextlib.contextmanager
def cd(newdir):
prevdir = os.getcwd()
os.chdir(os.path.expanduser(newdir))
try:
yield
finally:
os.chdir(prevdir)
def execute(cmd):
args = shlex.split(cmd)
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out = p.communicate()
for line in out:
if not line: continue
logging.info(line.rstrip())
if p.returncode != 0:
logging.error("{} failed with {}".format(cmd, p.returncode))
def git_update(dotmodule, dst, plugins):
for plugin in plugins:
if ".git" in plugin:
name = os.path.basename(plugin).rpartition(".git")[0] # replace trailing ".git"
else:
name = os.path.basename(plugin)
path = "{}{}".format(dst, name)
print("path: {} ({})".format(path, name))
if os.path.exists(path):
logging.debug("updating {} plugin {}".format(dotmodule, name))
with cd(path):
execute("git pull")
else:
logging.debug("cloning {} plugin {}".format(dotmodule, name))
try:
os.makedirs(os.path.split(dst)[0])
except FileExistsError:
pass
with cd(dst):
execute("git clone --recursive {}".format(plugin))
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
a09edcdf11c0d6c6b43cbff5029ac8cfb5741170 | application.py | application.py | #!/usr/bin/env python
import os
from app import create_app, db
from flask.ext.script import Manager
from flask.ext.migrate import Migrate, MigrateCommand
application = create_app(os.getenv('FLASH_CONFIG') or 'development')
manager = Manager(application)
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
| #!/usr/bin/env python
import os
from app import create_app, db
from flask.ext.script import Manager, Server
from flask.ext.migrate import Migrate, MigrateCommand
application = create_app(os.getenv('FLASH_CONFIG') or 'development')
manager = Manager(application)
manager.add_command("runserver", Server(port=5000))
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
| Update to run on port 5000 | Update to run on port 5000
For development we will want to run multiple apps, so they should each bind to a different port number.
The default port is 5000 anyway, but we should state the port explicitly in the code which is why I've added it here.
| Python | mit | mtekel/digitalmarketplace-api,RichardKnop/digitalmarketplace-api,alphagov/digitalmarketplace-api,mtekel/digitalmarketplace-api,mtekel/digitalmarketplace-api,mtekel/digitalmarketplace-api,RichardKnop/digitalmarketplace-api,RichardKnop/digitalmarketplace-api,RichardKnop/digitalmarketplace-api,alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api | #!/usr/bin/env python
import os
from app import create_app, db
from flask.ext.script import Manager
from flask.ext.migrate import Migrate, MigrateCommand
application = create_app(os.getenv('FLASH_CONFIG') or 'development')
manager = Manager(application)
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
Update to run on port 5000
For development we will want to run multiple apps, so they should each bind to a different port number.
The default port is 5000 anyway, but we should state the port explicitly in the code which is why I've added it here. | #!/usr/bin/env python
import os
from app import create_app, db
from flask.ext.script import Manager, Server
from flask.ext.migrate import Migrate, MigrateCommand
application = create_app(os.getenv('FLASH_CONFIG') or 'development')
manager = Manager(application)
manager.add_command("runserver", Server(port=5000))
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
| <commit_before>#!/usr/bin/env python
import os
from app import create_app, db
from flask.ext.script import Manager
from flask.ext.migrate import Migrate, MigrateCommand
application = create_app(os.getenv('FLASH_CONFIG') or 'development')
manager = Manager(application)
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
<commit_msg>Update to run on port 5000
For development we will want to run multiple apps, so they should each bind to a different port number.
The default port is 5000 anyway, but we should state the port explicitly in the code which is why I've added it here.<commit_after> | #!/usr/bin/env python
import os
from app import create_app, db
from flask.ext.script import Manager, Server
from flask.ext.migrate import Migrate, MigrateCommand
application = create_app(os.getenv('FLASH_CONFIG') or 'development')
manager = Manager(application)
manager.add_command("runserver", Server(port=5000))
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
| #!/usr/bin/env python
import os
from app import create_app, db
from flask.ext.script import Manager
from flask.ext.migrate import Migrate, MigrateCommand
application = create_app(os.getenv('FLASH_CONFIG') or 'development')
manager = Manager(application)
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
Update to run on port 5000
For development we will want to run multiple apps, so they should each bind to a different port number.
The default port is 5000 anyway, but we should state the port explicitly in the code which is why I've added it here.#!/usr/bin/env python
import os
from app import create_app, db
from flask.ext.script import Manager, Server
from flask.ext.migrate import Migrate, MigrateCommand
application = create_app(os.getenv('FLASH_CONFIG') or 'development')
manager = Manager(application)
manager.add_command("runserver", Server(port=5000))
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
| <commit_before>#!/usr/bin/env python
import os
from app import create_app, db
from flask.ext.script import Manager
from flask.ext.migrate import Migrate, MigrateCommand
application = create_app(os.getenv('FLASH_CONFIG') or 'development')
manager = Manager(application)
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
<commit_msg>Update to run on port 5000
For development we will want to run multiple apps, so they should each bind to a different port number.
The default port is 5000 anyway, but we should state the port explicitly in the code which is why I've added it here.<commit_after>#!/usr/bin/env python
import os
from app import create_app, db
from flask.ext.script import Manager, Server
from flask.ext.migrate import Migrate, MigrateCommand
application = create_app(os.getenv('FLASH_CONFIG') or 'development')
manager = Manager(application)
manager.add_command("runserver", Server(port=5000))
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
|
9cdf014f2349b7c39974ccfc5e873dbcb8e9cee9 | application.py | application.py |
from remedy.radremedy import create_app
application, manager = create_app('remedy.config.ProductionConfig')
application.debug = True
if __name__ == '__main__':
manager.run()
| #!/usr/bin/env python
from remedy.radremedy import create_app
application, manager = create_app('remedy.config.ProductionConfig')
application.debug = True
if __name__ == '__main__':
manager.run()
| Make it easier to run | Make it easier to run
| Python | mpl-2.0 | radremedy/radremedy,radioprotector/radremedy,radioprotector/radremedy,AllieDeford/radremedy,AllieDeford/radremedy,radremedy/radremedy,radioprotector/radremedy,AllieDeford/radremedy,radremedy/radremedy,radremedy/radremedy,radioprotector/radremedy |
from remedy.radremedy import create_app
application, manager = create_app('remedy.config.ProductionConfig')
application.debug = True
if __name__ == '__main__':
manager.run()
Make it easier to run | #!/usr/bin/env python
from remedy.radremedy import create_app
application, manager = create_app('remedy.config.ProductionConfig')
application.debug = True
if __name__ == '__main__':
manager.run()
| <commit_before>
from remedy.radremedy import create_app
application, manager = create_app('remedy.config.ProductionConfig')
application.debug = True
if __name__ == '__main__':
manager.run()
<commit_msg>Make it easier to run<commit_after> | #!/usr/bin/env python
from remedy.radremedy import create_app
application, manager = create_app('remedy.config.ProductionConfig')
application.debug = True
if __name__ == '__main__':
manager.run()
|
from remedy.radremedy import create_app
application, manager = create_app('remedy.config.ProductionConfig')
application.debug = True
if __name__ == '__main__':
manager.run()
Make it easier to run#!/usr/bin/env python
from remedy.radremedy import create_app
application, manager = create_app('remedy.config.ProductionConfig')
application.debug = True
if __name__ == '__main__':
manager.run()
| <commit_before>
from remedy.radremedy import create_app
application, manager = create_app('remedy.config.ProductionConfig')
application.debug = True
if __name__ == '__main__':
manager.run()
<commit_msg>Make it easier to run<commit_after>#!/usr/bin/env python
from remedy.radremedy import create_app
application, manager = create_app('remedy.config.ProductionConfig')
application.debug = True
if __name__ == '__main__':
manager.run()
|
d2f1b9311b546c079490e5f0bdb45b9c9d570bb1 | system/test_coupling_fields.py | system/test_coupling_fields.py |
from __future__ import print_function
import os
import netCDF4 as nc
from model_test_helper import ModelTestHelper
class TestCouplingFields(ModelTestHelper):
def __init__(self):
super(TestCouplingFields, self).__init__()
def test_swflx(self):
"""
Compare short wave flux over a geographic area between low and hi res
models.
"""
hi_fields = os.path.join(self.paths['cm_1440x1080-test']['output'], 'ice',
'fields_a2i_in_ice.nc')
lo_fields = os.path.join(self.paths['cm_360x300-test']['output'], 'ice',
'fields_a2i_in_ice.nc')
f_hi = nc.Dataset(hi_fields)
f_hi.close()
f_lo = nc.Dataset(lo_fields)
f_lo.close()
|
from __future__ import print_function
import os
import netCDF4 as nc
from model_test_helper import ModelTestHelper
class TestCouplingFields(ModelTestHelper):
def __init__(self):
super(TestCouplingFields, self).__init__()
def test_swflx(self):
"""
Compare short wave flux over a geographic area between low and hi res
models.
"""
hi_paths = self.make_paths('cm_1440x1080-test')
lo_paths = self.make_paths('cm_360x300-test')
hi_fields = os.path.join(hi_paths['output'], 'ice',
'fields_a2i_in_ice.nc')
lo_fields = os.path.join(lo_paths['output'], 'ice',
'fields_a2i_in_ice.nc')
f_hi = nc.Dataset(hi_fields)
f_hi.close()
f_lo = nc.Dataset(lo_fields)
f_lo.close()
| Fix up paths in system test. | Fix up paths in system test.
| Python | apache-2.0 | CWSL/access-om |
from __future__ import print_function
import os
import netCDF4 as nc
from model_test_helper import ModelTestHelper
class TestCouplingFields(ModelTestHelper):
def __init__(self):
super(TestCouplingFields, self).__init__()
def test_swflx(self):
"""
Compare short wave flux over a geographic area between low and hi res
models.
"""
hi_fields = os.path.join(self.paths['cm_1440x1080-test']['output'], 'ice',
'fields_a2i_in_ice.nc')
lo_fields = os.path.join(self.paths['cm_360x300-test']['output'], 'ice',
'fields_a2i_in_ice.nc')
f_hi = nc.Dataset(hi_fields)
f_hi.close()
f_lo = nc.Dataset(lo_fields)
f_lo.close()
Fix up paths in system test. |
from __future__ import print_function
import os
import netCDF4 as nc
from model_test_helper import ModelTestHelper
class TestCouplingFields(ModelTestHelper):
def __init__(self):
super(TestCouplingFields, self).__init__()
def test_swflx(self):
"""
Compare short wave flux over a geographic area between low and hi res
models.
"""
hi_paths = self.make_paths('cm_1440x1080-test')
lo_paths = self.make_paths('cm_360x300-test')
hi_fields = os.path.join(hi_paths['output'], 'ice',
'fields_a2i_in_ice.nc')
lo_fields = os.path.join(lo_paths['output'], 'ice',
'fields_a2i_in_ice.nc')
f_hi = nc.Dataset(hi_fields)
f_hi.close()
f_lo = nc.Dataset(lo_fields)
f_lo.close()
| <commit_before>
from __future__ import print_function
import os
import netCDF4 as nc
from model_test_helper import ModelTestHelper
class TestCouplingFields(ModelTestHelper):
def __init__(self):
super(TestCouplingFields, self).__init__()
def test_swflx(self):
"""
Compare short wave flux over a geographic area between low and hi res
models.
"""
hi_fields = os.path.join(self.paths['cm_1440x1080-test']['output'], 'ice',
'fields_a2i_in_ice.nc')
lo_fields = os.path.join(self.paths['cm_360x300-test']['output'], 'ice',
'fields_a2i_in_ice.nc')
f_hi = nc.Dataset(hi_fields)
f_hi.close()
f_lo = nc.Dataset(lo_fields)
f_lo.close()
<commit_msg>Fix up paths in system test.<commit_after> |
from __future__ import print_function
import os
import netCDF4 as nc
from model_test_helper import ModelTestHelper
class TestCouplingFields(ModelTestHelper):
def __init__(self):
super(TestCouplingFields, self).__init__()
def test_swflx(self):
"""
Compare short wave flux over a geographic area between low and hi res
models.
"""
hi_paths = self.make_paths('cm_1440x1080-test')
lo_paths = self.make_paths('cm_360x300-test')
hi_fields = os.path.join(hi_paths['output'], 'ice',
'fields_a2i_in_ice.nc')
lo_fields = os.path.join(lo_paths['output'], 'ice',
'fields_a2i_in_ice.nc')
f_hi = nc.Dataset(hi_fields)
f_hi.close()
f_lo = nc.Dataset(lo_fields)
f_lo.close()
|
from __future__ import print_function
import os
import netCDF4 as nc
from model_test_helper import ModelTestHelper
class TestCouplingFields(ModelTestHelper):
def __init__(self):
super(TestCouplingFields, self).__init__()
def test_swflx(self):
"""
Compare short wave flux over a geographic area between low and hi res
models.
"""
hi_fields = os.path.join(self.paths['cm_1440x1080-test']['output'], 'ice',
'fields_a2i_in_ice.nc')
lo_fields = os.path.join(self.paths['cm_360x300-test']['output'], 'ice',
'fields_a2i_in_ice.nc')
f_hi = nc.Dataset(hi_fields)
f_hi.close()
f_lo = nc.Dataset(lo_fields)
f_lo.close()
Fix up paths in system test.
from __future__ import print_function
import os
import netCDF4 as nc
from model_test_helper import ModelTestHelper
class TestCouplingFields(ModelTestHelper):
def __init__(self):
super(TestCouplingFields, self).__init__()
def test_swflx(self):
"""
Compare short wave flux over a geographic area between low and hi res
models.
"""
hi_paths = self.make_paths('cm_1440x1080-test')
lo_paths = self.make_paths('cm_360x300-test')
hi_fields = os.path.join(hi_paths['output'], 'ice',
'fields_a2i_in_ice.nc')
lo_fields = os.path.join(lo_paths['output'], 'ice',
'fields_a2i_in_ice.nc')
f_hi = nc.Dataset(hi_fields)
f_hi.close()
f_lo = nc.Dataset(lo_fields)
f_lo.close()
| <commit_before>
from __future__ import print_function
import os
import netCDF4 as nc
from model_test_helper import ModelTestHelper
class TestCouplingFields(ModelTestHelper):
def __init__(self):
super(TestCouplingFields, self).__init__()
def test_swflx(self):
"""
Compare short wave flux over a geographic area between low and hi res
models.
"""
hi_fields = os.path.join(self.paths['cm_1440x1080-test']['output'], 'ice',
'fields_a2i_in_ice.nc')
lo_fields = os.path.join(self.paths['cm_360x300-test']['output'], 'ice',
'fields_a2i_in_ice.nc')
f_hi = nc.Dataset(hi_fields)
f_hi.close()
f_lo = nc.Dataset(lo_fields)
f_lo.close()
<commit_msg>Fix up paths in system test.<commit_after>
from __future__ import print_function
import os
import netCDF4 as nc
from model_test_helper import ModelTestHelper
class TestCouplingFields(ModelTestHelper):
def __init__(self):
super(TestCouplingFields, self).__init__()
def test_swflx(self):
"""
Compare short wave flux over a geographic area between low and hi res
models.
"""
hi_paths = self.make_paths('cm_1440x1080-test')
lo_paths = self.make_paths('cm_360x300-test')
hi_fields = os.path.join(hi_paths['output'], 'ice',
'fields_a2i_in_ice.nc')
lo_fields = os.path.join(lo_paths['output'], 'ice',
'fields_a2i_in_ice.nc')
f_hi = nc.Dataset(hi_fields)
f_hi.close()
f_lo = nc.Dataset(lo_fields)
f_lo.close()
|
2d30393166e6c9bd0995810226a223b44635da6d | src/stratis_cli/_version.py | src/stratis_cli/_version.py | # Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Version information.
.. moduleauthor:: mulhern <amulhern@redhat.com>
"""
__version_info__ = (3, 0, 1)
__version__ = ".".join(str(x) for x in __version_info__)
| # Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Version information.
.. moduleauthor:: mulhern <amulhern@redhat.com>
"""
__version_info__ = (3, 1, 0)
__version__ = ".".join(str(x) for x in __version_info__)
| Set stratis-cli version to 3.1.0 | Set stratis-cli version to 3.1.0
Signed-off-by: mulhern <7b51bcf507bcd7afb72bf8663752c0ddbeb517f6@redhat.com>
| Python | apache-2.0 | stratis-storage/stratis-cli,stratis-storage/stratis-cli | # Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Version information.
.. moduleauthor:: mulhern <amulhern@redhat.com>
"""
__version_info__ = (3, 0, 1)
__version__ = ".".join(str(x) for x in __version_info__)
Set stratis-cli version to 3.1.0
Signed-off-by: mulhern <7b51bcf507bcd7afb72bf8663752c0ddbeb517f6@redhat.com> | # Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Version information.
.. moduleauthor:: mulhern <amulhern@redhat.com>
"""
__version_info__ = (3, 1, 0)
__version__ = ".".join(str(x) for x in __version_info__)
| <commit_before># Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Version information.
.. moduleauthor:: mulhern <amulhern@redhat.com>
"""
__version_info__ = (3, 0, 1)
__version__ = ".".join(str(x) for x in __version_info__)
<commit_msg>Set stratis-cli version to 3.1.0
Signed-off-by: mulhern <7b51bcf507bcd7afb72bf8663752c0ddbeb517f6@redhat.com><commit_after> | # Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Version information.
.. moduleauthor:: mulhern <amulhern@redhat.com>
"""
__version_info__ = (3, 1, 0)
__version__ = ".".join(str(x) for x in __version_info__)
| # Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Version information.
.. moduleauthor:: mulhern <amulhern@redhat.com>
"""
__version_info__ = (3, 0, 1)
__version__ = ".".join(str(x) for x in __version_info__)
Set stratis-cli version to 3.1.0
Signed-off-by: mulhern <7b51bcf507bcd7afb72bf8663752c0ddbeb517f6@redhat.com># Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Version information.
.. moduleauthor:: mulhern <amulhern@redhat.com>
"""
__version_info__ = (3, 1, 0)
__version__ = ".".join(str(x) for x in __version_info__)
| <commit_before># Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Version information.
.. moduleauthor:: mulhern <amulhern@redhat.com>
"""
__version_info__ = (3, 0, 1)
__version__ = ".".join(str(x) for x in __version_info__)
<commit_msg>Set stratis-cli version to 3.1.0
Signed-off-by: mulhern <7b51bcf507bcd7afb72bf8663752c0ddbeb517f6@redhat.com><commit_after># Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Version information.
.. moduleauthor:: mulhern <amulhern@redhat.com>
"""
__version_info__ = (3, 1, 0)
__version__ = ".".join(str(x) for x in __version_info__)
|
655c3ca55a5b3bb1f03d524219c3d038c2d02ed5 | st2client/st2client/models/datastore.py | st2client/st2client/models/datastore.py | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from st2client.models import core
LOG = logging.getLogger(__name__)
class KeyValuePair(core.Resource):
_alias = 'Key'
_display_name = 'Key Value Pair'
_plural = 'Keys'
_plural_display_name = 'Key Value Pairs'
_repr_attributes = ['name', 'value']
| # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from st2client.models import core
LOG = logging.getLogger(__name__)
class KeyValuePair(core.Resource):
_alias = 'Key'
_display_name = 'Key Value Pair'
_plural = 'Keys'
_plural_display_name = 'Key Value Pairs'
_repr_attributes = ['name', 'value']
@property
def id(self):
# Note: This is a temporary hack until we refactor client and make it support non id PKs
return self.name
| Update KeyValuePair model so delete and other operations which require "id" attribute to be present still work. | Update KeyValuePair model so delete and other operations which require "id"
attribute to be present still work.
| Python | apache-2.0 | tonybaloney/st2,alfasin/st2,nzlosh/st2,tonybaloney/st2,pixelrebel/st2,Plexxi/st2,punalpatel/st2,StackStorm/st2,Itxaka/st2,nzlosh/st2,pinterb/st2,StackStorm/st2,emedvedev/st2,alfasin/st2,jtopjian/st2,peak6/st2,peak6/st2,emedvedev/st2,tonybaloney/st2,Itxaka/st2,lakshmi-kannan/st2,Plexxi/st2,grengojbo/st2,dennybaa/st2,nzlosh/st2,pixelrebel/st2,dennybaa/st2,jtopjian/st2,punalpatel/st2,dennybaa/st2,StackStorm/st2,armab/st2,lakshmi-kannan/st2,grengojbo/st2,grengojbo/st2,nzlosh/st2,Itxaka/st2,lakshmi-kannan/st2,pinterb/st2,Plexxi/st2,Plexxi/st2,peak6/st2,armab/st2,punalpatel/st2,StackStorm/st2,emedvedev/st2,pixelrebel/st2,pinterb/st2,alfasin/st2,armab/st2,jtopjian/st2 | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from st2client.models import core
LOG = logging.getLogger(__name__)
class KeyValuePair(core.Resource):
_alias = 'Key'
_display_name = 'Key Value Pair'
_plural = 'Keys'
_plural_display_name = 'Key Value Pairs'
_repr_attributes = ['name', 'value']
Update KeyValuePair model so delete and other operations which require "id"
attribute to be present still work. | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from st2client.models import core
LOG = logging.getLogger(__name__)
class KeyValuePair(core.Resource):
_alias = 'Key'
_display_name = 'Key Value Pair'
_plural = 'Keys'
_plural_display_name = 'Key Value Pairs'
_repr_attributes = ['name', 'value']
@property
def id(self):
# Note: This is a temporary hack until we refactor client and make it support non id PKs
return self.name
| <commit_before># Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from st2client.models import core
LOG = logging.getLogger(__name__)
class KeyValuePair(core.Resource):
_alias = 'Key'
_display_name = 'Key Value Pair'
_plural = 'Keys'
_plural_display_name = 'Key Value Pairs'
_repr_attributes = ['name', 'value']
<commit_msg>Update KeyValuePair model so delete and other operations which require "id"
attribute to be present still work.<commit_after> | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from st2client.models import core
LOG = logging.getLogger(__name__)
class KeyValuePair(core.Resource):
_alias = 'Key'
_display_name = 'Key Value Pair'
_plural = 'Keys'
_plural_display_name = 'Key Value Pairs'
_repr_attributes = ['name', 'value']
@property
def id(self):
# Note: This is a temporary hack until we refactor client and make it support non id PKs
return self.name
| # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from st2client.models import core
LOG = logging.getLogger(__name__)
class KeyValuePair(core.Resource):
_alias = 'Key'
_display_name = 'Key Value Pair'
_plural = 'Keys'
_plural_display_name = 'Key Value Pairs'
_repr_attributes = ['name', 'value']
Update KeyValuePair model so delete and other operations which require "id"
attribute to be present still work.# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from st2client.models import core
LOG = logging.getLogger(__name__)
class KeyValuePair(core.Resource):
_alias = 'Key'
_display_name = 'Key Value Pair'
_plural = 'Keys'
_plural_display_name = 'Key Value Pairs'
_repr_attributes = ['name', 'value']
@property
def id(self):
# Note: This is a temporary hack until we refactor client and make it support non id PKs
return self.name
| <commit_before># Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from st2client.models import core
LOG = logging.getLogger(__name__)
class KeyValuePair(core.Resource):
_alias = 'Key'
_display_name = 'Key Value Pair'
_plural = 'Keys'
_plural_display_name = 'Key Value Pairs'
_repr_attributes = ['name', 'value']
<commit_msg>Update KeyValuePair model so delete and other operations which require "id"
attribute to be present still work.<commit_after># Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from st2client.models import core
LOG = logging.getLogger(__name__)
class KeyValuePair(core.Resource):
_alias = 'Key'
_display_name = 'Key Value Pair'
_plural = 'Keys'
_plural_display_name = 'Key Value Pairs'
_repr_attributes = ['name', 'value']
@property
def id(self):
# Note: This is a temporary hack until we refactor client and make it support non id PKs
return self.name
|
24dba96be4a9202fdac5fa779c52789ec6cd13c4 | api/common/views.py | api/common/views.py | import subprocess
from django.conf import settings
from django.contrib.sites.models import Site
from django.http import JsonResponse, HttpResponseBadRequest
from django.shortcuts import redirect
from django.views.decorators.csrf import csrf_exempt
from rest_framework.authtoken.models import Token
@csrf_exempt
def deploy(request):
deploy_secret_key = request.POST.get('DEPLOY_SECRET_KEY')
# branch = request.POST.get('BRANCH')
commit = request.POST.get('COMMIT')
if deploy_secret_key != settings.SECRET_KEY:
return HttpResponseBadRequest('Incorrect key.')
subprocess.Popen(['scripts/deploy.sh', commit], stdout=subprocess.PIPE)
return JsonResponse({'result': 'deploy started'})
def social_redirect(request):
token, _ = Token.objects.get_or_create(user=request.user)
return_url = '{protocol}://{domain}/finish-steam/{token}'.format(
protocol='http' if settings.DEBUG else 'https',
domain=Site.objects.get_current().domain,
token=token.key
)
return redirect(return_url)
| import subprocess
from django.conf import settings
from django.contrib.sites.models import Site
from django.http import JsonResponse, HttpResponseBadRequest
from django.shortcuts import redirect
from django.views.decorators.csrf import csrf_exempt
from rest_framework.authtoken.models import Token
@csrf_exempt
def deploy(request):
deploy_secret_key = request.POST.get('DEPLOY_SECRET_KEY')
# branch = request.POST.get('BRANCH')
commit = request.POST.get('COMMIT')
if deploy_secret_key != settings.SECRET_KEY:
return HttpResponseBadRequest('Incorrect key.')
subprocess.run(['scripts/deploy.sh', commit],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
return JsonResponse({'result': 'deploy started'})
def social_redirect(request):
token, _ = Token.objects.get_or_create(user=request.user)
return_url = '{protocol}://{domain}/finish-steam/{token}'.format(
protocol='http' if settings.DEBUG else 'https',
domain=Site.objects.get_current().domain,
token=token.key
)
return redirect(return_url)
| Improve subprocess call during deployment | Improve subprocess call during deployment
| Python | apache-2.0 | prattl/teamfinder,prattl/teamfinder,prattl/teamfinder,prattl/teamfinder | import subprocess
from django.conf import settings
from django.contrib.sites.models import Site
from django.http import JsonResponse, HttpResponseBadRequest
from django.shortcuts import redirect
from django.views.decorators.csrf import csrf_exempt
from rest_framework.authtoken.models import Token
@csrf_exempt
def deploy(request):
deploy_secret_key = request.POST.get('DEPLOY_SECRET_KEY')
# branch = request.POST.get('BRANCH')
commit = request.POST.get('COMMIT')
if deploy_secret_key != settings.SECRET_KEY:
return HttpResponseBadRequest('Incorrect key.')
subprocess.Popen(['scripts/deploy.sh', commit], stdout=subprocess.PIPE)
return JsonResponse({'result': 'deploy started'})
def social_redirect(request):
token, _ = Token.objects.get_or_create(user=request.user)
return_url = '{protocol}://{domain}/finish-steam/{token}'.format(
protocol='http' if settings.DEBUG else 'https',
domain=Site.objects.get_current().domain,
token=token.key
)
return redirect(return_url)
Improve subprocess call during deployment | import subprocess
from django.conf import settings
from django.contrib.sites.models import Site
from django.http import JsonResponse, HttpResponseBadRequest
from django.shortcuts import redirect
from django.views.decorators.csrf import csrf_exempt
from rest_framework.authtoken.models import Token
@csrf_exempt
def deploy(request):
deploy_secret_key = request.POST.get('DEPLOY_SECRET_KEY')
# branch = request.POST.get('BRANCH')
commit = request.POST.get('COMMIT')
if deploy_secret_key != settings.SECRET_KEY:
return HttpResponseBadRequest('Incorrect key.')
subprocess.run(['scripts/deploy.sh', commit],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
return JsonResponse({'result': 'deploy started'})
def social_redirect(request):
token, _ = Token.objects.get_or_create(user=request.user)
return_url = '{protocol}://{domain}/finish-steam/{token}'.format(
protocol='http' if settings.DEBUG else 'https',
domain=Site.objects.get_current().domain,
token=token.key
)
return redirect(return_url)
| <commit_before>import subprocess
from django.conf import settings
from django.contrib.sites.models import Site
from django.http import JsonResponse, HttpResponseBadRequest
from django.shortcuts import redirect
from django.views.decorators.csrf import csrf_exempt
from rest_framework.authtoken.models import Token
@csrf_exempt
def deploy(request):
deploy_secret_key = request.POST.get('DEPLOY_SECRET_KEY')
# branch = request.POST.get('BRANCH')
commit = request.POST.get('COMMIT')
if deploy_secret_key != settings.SECRET_KEY:
return HttpResponseBadRequest('Incorrect key.')
subprocess.Popen(['scripts/deploy.sh', commit], stdout=subprocess.PIPE)
return JsonResponse({'result': 'deploy started'})
def social_redirect(request):
token, _ = Token.objects.get_or_create(user=request.user)
return_url = '{protocol}://{domain}/finish-steam/{token}'.format(
protocol='http' if settings.DEBUG else 'https',
domain=Site.objects.get_current().domain,
token=token.key
)
return redirect(return_url)
<commit_msg>Improve subprocess call during deployment<commit_after> | import subprocess
from django.conf import settings
from django.contrib.sites.models import Site
from django.http import JsonResponse, HttpResponseBadRequest
from django.shortcuts import redirect
from django.views.decorators.csrf import csrf_exempt
from rest_framework.authtoken.models import Token
@csrf_exempt
def deploy(request):
deploy_secret_key = request.POST.get('DEPLOY_SECRET_KEY')
# branch = request.POST.get('BRANCH')
commit = request.POST.get('COMMIT')
if deploy_secret_key != settings.SECRET_KEY:
return HttpResponseBadRequest('Incorrect key.')
subprocess.run(['scripts/deploy.sh', commit],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
return JsonResponse({'result': 'deploy started'})
def social_redirect(request):
token, _ = Token.objects.get_or_create(user=request.user)
return_url = '{protocol}://{domain}/finish-steam/{token}'.format(
protocol='http' if settings.DEBUG else 'https',
domain=Site.objects.get_current().domain,
token=token.key
)
return redirect(return_url)
| import subprocess
from django.conf import settings
from django.contrib.sites.models import Site
from django.http import JsonResponse, HttpResponseBadRequest
from django.shortcuts import redirect
from django.views.decorators.csrf import csrf_exempt
from rest_framework.authtoken.models import Token
@csrf_exempt
def deploy(request):
deploy_secret_key = request.POST.get('DEPLOY_SECRET_KEY')
# branch = request.POST.get('BRANCH')
commit = request.POST.get('COMMIT')
if deploy_secret_key != settings.SECRET_KEY:
return HttpResponseBadRequest('Incorrect key.')
subprocess.Popen(['scripts/deploy.sh', commit], stdout=subprocess.PIPE)
return JsonResponse({'result': 'deploy started'})
def social_redirect(request):
token, _ = Token.objects.get_or_create(user=request.user)
return_url = '{protocol}://{domain}/finish-steam/{token}'.format(
protocol='http' if settings.DEBUG else 'https',
domain=Site.objects.get_current().domain,
token=token.key
)
return redirect(return_url)
Improve subprocess call during deploymentimport subprocess
from django.conf import settings
from django.contrib.sites.models import Site
from django.http import JsonResponse, HttpResponseBadRequest
from django.shortcuts import redirect
from django.views.decorators.csrf import csrf_exempt
from rest_framework.authtoken.models import Token
@csrf_exempt
def deploy(request):
deploy_secret_key = request.POST.get('DEPLOY_SECRET_KEY')
# branch = request.POST.get('BRANCH')
commit = request.POST.get('COMMIT')
if deploy_secret_key != settings.SECRET_KEY:
return HttpResponseBadRequest('Incorrect key.')
subprocess.run(['scripts/deploy.sh', commit],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
return JsonResponse({'result': 'deploy started'})
def social_redirect(request):
token, _ = Token.objects.get_or_create(user=request.user)
return_url = '{protocol}://{domain}/finish-steam/{token}'.format(
protocol='http' if settings.DEBUG else 'https',
domain=Site.objects.get_current().domain,
token=token.key
)
return redirect(return_url)
| <commit_before>import subprocess
from django.conf import settings
from django.contrib.sites.models import Site
from django.http import JsonResponse, HttpResponseBadRequest
from django.shortcuts import redirect
from django.views.decorators.csrf import csrf_exempt
from rest_framework.authtoken.models import Token
@csrf_exempt
def deploy(request):
deploy_secret_key = request.POST.get('DEPLOY_SECRET_KEY')
# branch = request.POST.get('BRANCH')
commit = request.POST.get('COMMIT')
if deploy_secret_key != settings.SECRET_KEY:
return HttpResponseBadRequest('Incorrect key.')
subprocess.Popen(['scripts/deploy.sh', commit], stdout=subprocess.PIPE)
return JsonResponse({'result': 'deploy started'})
def social_redirect(request):
token, _ = Token.objects.get_or_create(user=request.user)
return_url = '{protocol}://{domain}/finish-steam/{token}'.format(
protocol='http' if settings.DEBUG else 'https',
domain=Site.objects.get_current().domain,
token=token.key
)
return redirect(return_url)
<commit_msg>Improve subprocess call during deployment<commit_after>import subprocess
from django.conf import settings
from django.contrib.sites.models import Site
from django.http import JsonResponse, HttpResponseBadRequest
from django.shortcuts import redirect
from django.views.decorators.csrf import csrf_exempt
from rest_framework.authtoken.models import Token
@csrf_exempt
def deploy(request):
deploy_secret_key = request.POST.get('DEPLOY_SECRET_KEY')
# branch = request.POST.get('BRANCH')
commit = request.POST.get('COMMIT')
if deploy_secret_key != settings.SECRET_KEY:
return HttpResponseBadRequest('Incorrect key.')
subprocess.run(['scripts/deploy.sh', commit],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
return JsonResponse({'result': 'deploy started'})
def social_redirect(request):
token, _ = Token.objects.get_or_create(user=request.user)
return_url = '{protocol}://{domain}/finish-steam/{token}'.format(
protocol='http' if settings.DEBUG else 'https',
domain=Site.objects.get_current().domain,
token=token.key
)
return redirect(return_url)
|
6740f677903c7d48748fd0a595762b8bf2c7dcb3 | test_connector/components/components.py | test_connector/components/components.py | # -*- coding: utf-8 -*-
# Copyright 2017 Camptocamp SA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html)
# TODO make API short paths
# from odoo.addons.connector import Component
from odoo.addons.connector.components.core import Component
from odoo.addons.connector.components.collection import use
class BaseComponent(Component):
_inherit = 'base'
def test_inherit_base(self):
return 'test_inherit_base'
class Mapper(Component):
_name = 'mapper'
def test_inherit_component(self):
return 'test_inherit_component'
class TestMapper(Component):
_name = 'test.mapper'
_inherit = 'mapper'
def name(self):
return 'test.mapper'
| # -*- coding: utf-8 -*-
# Copyright 2017 Camptocamp SA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html)
# TODO make API short paths
# from odoo.addons.connector import Component
from odoo.addons.connector.components.core import Component
class BaseComponent(Component):
_inherit = 'base'
def test_inherit_base(self):
return 'test_inherit_base'
class Mapper(Component):
_name = 'mapper'
def test_inherit_component(self):
return 'test_inherit_component'
class TestMapper(Component):
_name = 'test.mapper'
_inherit = 'mapper'
def name(self):
return 'test.mapper'
| Improve on the collections, work, ... | Improve on the collections, work, ...
| Python | agpl-3.0 | OCA/connector,OCA/connector | # -*- coding: utf-8 -*-
# Copyright 2017 Camptocamp SA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html)
# TODO make API short paths
# from odoo.addons.connector import Component
from odoo.addons.connector.components.core import Component
from odoo.addons.connector.components.collection import use
class BaseComponent(Component):
_inherit = 'base'
def test_inherit_base(self):
return 'test_inherit_base'
class Mapper(Component):
_name = 'mapper'
def test_inherit_component(self):
return 'test_inherit_component'
class TestMapper(Component):
_name = 'test.mapper'
_inherit = 'mapper'
def name(self):
return 'test.mapper'
Improve on the collections, work, ... | # -*- coding: utf-8 -*-
# Copyright 2017 Camptocamp SA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html)
# TODO make API short paths
# from odoo.addons.connector import Component
from odoo.addons.connector.components.core import Component
class BaseComponent(Component):
_inherit = 'base'
def test_inherit_base(self):
return 'test_inherit_base'
class Mapper(Component):
_name = 'mapper'
def test_inherit_component(self):
return 'test_inherit_component'
class TestMapper(Component):
_name = 'test.mapper'
_inherit = 'mapper'
def name(self):
return 'test.mapper'
| <commit_before># -*- coding: utf-8 -*-
# Copyright 2017 Camptocamp SA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html)
# TODO make API short paths
# from odoo.addons.connector import Component
from odoo.addons.connector.components.core import Component
from odoo.addons.connector.components.collection import use
class BaseComponent(Component):
_inherit = 'base'
def test_inherit_base(self):
return 'test_inherit_base'
class Mapper(Component):
_name = 'mapper'
def test_inherit_component(self):
return 'test_inherit_component'
class TestMapper(Component):
_name = 'test.mapper'
_inherit = 'mapper'
def name(self):
return 'test.mapper'
<commit_msg>Improve on the collections, work, ...<commit_after> | # -*- coding: utf-8 -*-
# Copyright 2017 Camptocamp SA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html)
# TODO make API short paths
# from odoo.addons.connector import Component
from odoo.addons.connector.components.core import Component
class BaseComponent(Component):
_inherit = 'base'
def test_inherit_base(self):
return 'test_inherit_base'
class Mapper(Component):
_name = 'mapper'
def test_inherit_component(self):
return 'test_inherit_component'
class TestMapper(Component):
_name = 'test.mapper'
_inherit = 'mapper'
def name(self):
return 'test.mapper'
| # -*- coding: utf-8 -*-
# Copyright 2017 Camptocamp SA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html)
# TODO make API short paths
# from odoo.addons.connector import Component
from odoo.addons.connector.components.core import Component
from odoo.addons.connector.components.collection import use
class BaseComponent(Component):
_inherit = 'base'
def test_inherit_base(self):
return 'test_inherit_base'
class Mapper(Component):
_name = 'mapper'
def test_inherit_component(self):
return 'test_inherit_component'
class TestMapper(Component):
_name = 'test.mapper'
_inherit = 'mapper'
def name(self):
return 'test.mapper'
Improve on the collections, work, ...# -*- coding: utf-8 -*-
# Copyright 2017 Camptocamp SA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html)
# TODO make API short paths
# from odoo.addons.connector import Component
from odoo.addons.connector.components.core import Component
class BaseComponent(Component):
_inherit = 'base'
def test_inherit_base(self):
return 'test_inherit_base'
class Mapper(Component):
_name = 'mapper'
def test_inherit_component(self):
return 'test_inherit_component'
class TestMapper(Component):
_name = 'test.mapper'
_inherit = 'mapper'
def name(self):
return 'test.mapper'
| <commit_before># -*- coding: utf-8 -*-
# Copyright 2017 Camptocamp SA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html)
# TODO make API short paths
# from odoo.addons.connector import Component
from odoo.addons.connector.components.core import Component
from odoo.addons.connector.components.collection import use
class BaseComponent(Component):
_inherit = 'base'
def test_inherit_base(self):
return 'test_inherit_base'
class Mapper(Component):
_name = 'mapper'
def test_inherit_component(self):
return 'test_inherit_component'
class TestMapper(Component):
_name = 'test.mapper'
_inherit = 'mapper'
def name(self):
return 'test.mapper'
<commit_msg>Improve on the collections, work, ...<commit_after># -*- coding: utf-8 -*-
# Copyright 2017 Camptocamp SA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html)
# TODO make API short paths
# from odoo.addons.connector import Component
from odoo.addons.connector.components.core import Component
class BaseComponent(Component):
_inherit = 'base'
def test_inherit_base(self):
return 'test_inherit_base'
class Mapper(Component):
_name = 'mapper'
def test_inherit_component(self):
return 'test_inherit_component'
class TestMapper(Component):
_name = 'test.mapper'
_inherit = 'mapper'
def name(self):
return 'test.mapper'
|
3bee320f66d192e2e40b6b91a53c3ccd64c09443 | test/MSVC/query_vcbat.py | test/MSVC/query_vcbat.py | import sys
import TestSCons
test = TestSCons.TestSCons(match = TestSCons.match_re)
if sys.platform != 'win32':
msg = "Skipping Visual C/C++ test on non-Windows platform '%s'\n" % sys.platform
test.skip_test(msg)
#####
# Test the basics
test.write('SConstruct',"""
import os
env = Environment(tools = ['MSVCCommon'])
""")
test.run(stderr = None)
test.pass_test()
| import sys
import TestSCons
test = TestSCons.TestSCons(match = TestSCons.match_re)
if sys.platform != 'win32':
msg = "Skipping Visual C/C++ test on non-Windows platform '%s'\n" % sys.platform
test.skip_test(msg)
#####
# Test the basics
test.write('SConstruct',"""
from SCons.Tool.MSVCCommon import FindMSVSBatFile, ParseBatFile, MergeMSVSBatFile, query_versions
#env = Environment(tools = ['mingw'])
DefaultEnvironment(tools = [])
#for v in [9, 8, 7.1, 7]:
# print " ==== Testing for version %s ==== " % str(v)
# bat = FindMSVSBatFile(v)
# print bat
# if bat:
# d = ParseBatFile(bat)
# for k, v in d.items():
# print k, v
#MergeMSVSBatFile(env, 9.0)
#print env['ENV']['PATH']
print query_versions()
""")
test.run(stderr = None)
test.pass_test()
| Update our fake test for debugging purpose. | Update our fake test for debugging purpose.
| Python | mit | azatoth/scons,azatoth/scons,azatoth/scons,azatoth/scons,azatoth/scons | import sys
import TestSCons
test = TestSCons.TestSCons(match = TestSCons.match_re)
if sys.platform != 'win32':
msg = "Skipping Visual C/C++ test on non-Windows platform '%s'\n" % sys.platform
test.skip_test(msg)
#####
# Test the basics
test.write('SConstruct',"""
import os
env = Environment(tools = ['MSVCCommon'])
""")
test.run(stderr = None)
test.pass_test()
Update our fake test for debugging purpose. | import sys
import TestSCons
test = TestSCons.TestSCons(match = TestSCons.match_re)
if sys.platform != 'win32':
msg = "Skipping Visual C/C++ test on non-Windows platform '%s'\n" % sys.platform
test.skip_test(msg)
#####
# Test the basics
test.write('SConstruct',"""
from SCons.Tool.MSVCCommon import FindMSVSBatFile, ParseBatFile, MergeMSVSBatFile, query_versions
#env = Environment(tools = ['mingw'])
DefaultEnvironment(tools = [])
#for v in [9, 8, 7.1, 7]:
# print " ==== Testing for version %s ==== " % str(v)
# bat = FindMSVSBatFile(v)
# print bat
# if bat:
# d = ParseBatFile(bat)
# for k, v in d.items():
# print k, v
#MergeMSVSBatFile(env, 9.0)
#print env['ENV']['PATH']
print query_versions()
""")
test.run(stderr = None)
test.pass_test()
| <commit_before>import sys
import TestSCons
test = TestSCons.TestSCons(match = TestSCons.match_re)
if sys.platform != 'win32':
msg = "Skipping Visual C/C++ test on non-Windows platform '%s'\n" % sys.platform
test.skip_test(msg)
#####
# Test the basics
test.write('SConstruct',"""
import os
env = Environment(tools = ['MSVCCommon'])
""")
test.run(stderr = None)
test.pass_test()
<commit_msg>Update our fake test for debugging purpose.<commit_after> | import sys
import TestSCons
test = TestSCons.TestSCons(match = TestSCons.match_re)
if sys.platform != 'win32':
msg = "Skipping Visual C/C++ test on non-Windows platform '%s'\n" % sys.platform
test.skip_test(msg)
#####
# Test the basics
test.write('SConstruct',"""
from SCons.Tool.MSVCCommon import FindMSVSBatFile, ParseBatFile, MergeMSVSBatFile, query_versions
#env = Environment(tools = ['mingw'])
DefaultEnvironment(tools = [])
#for v in [9, 8, 7.1, 7]:
# print " ==== Testing for version %s ==== " % str(v)
# bat = FindMSVSBatFile(v)
# print bat
# if bat:
# d = ParseBatFile(bat)
# for k, v in d.items():
# print k, v
#MergeMSVSBatFile(env, 9.0)
#print env['ENV']['PATH']
print query_versions()
""")
test.run(stderr = None)
test.pass_test()
| import sys
import TestSCons
test = TestSCons.TestSCons(match = TestSCons.match_re)
if sys.platform != 'win32':
msg = "Skipping Visual C/C++ test on non-Windows platform '%s'\n" % sys.platform
test.skip_test(msg)
#####
# Test the basics
test.write('SConstruct',"""
import os
env = Environment(tools = ['MSVCCommon'])
""")
test.run(stderr = None)
test.pass_test()
Update our fake test for debugging purpose.import sys
import TestSCons
test = TestSCons.TestSCons(match = TestSCons.match_re)
if sys.platform != 'win32':
msg = "Skipping Visual C/C++ test on non-Windows platform '%s'\n" % sys.platform
test.skip_test(msg)
#####
# Test the basics
test.write('SConstruct',"""
from SCons.Tool.MSVCCommon import FindMSVSBatFile, ParseBatFile, MergeMSVSBatFile, query_versions
#env = Environment(tools = ['mingw'])
DefaultEnvironment(tools = [])
#for v in [9, 8, 7.1, 7]:
# print " ==== Testing for version %s ==== " % str(v)
# bat = FindMSVSBatFile(v)
# print bat
# if bat:
# d = ParseBatFile(bat)
# for k, v in d.items():
# print k, v
#MergeMSVSBatFile(env, 9.0)
#print env['ENV']['PATH']
print query_versions()
""")
test.run(stderr = None)
test.pass_test()
| <commit_before>import sys
import TestSCons
test = TestSCons.TestSCons(match = TestSCons.match_re)
if sys.platform != 'win32':
msg = "Skipping Visual C/C++ test on non-Windows platform '%s'\n" % sys.platform
test.skip_test(msg)
#####
# Test the basics
test.write('SConstruct',"""
import os
env = Environment(tools = ['MSVCCommon'])
""")
test.run(stderr = None)
test.pass_test()
<commit_msg>Update our fake test for debugging purpose.<commit_after>import sys
import TestSCons
test = TestSCons.TestSCons(match = TestSCons.match_re)
if sys.platform != 'win32':
msg = "Skipping Visual C/C++ test on non-Windows platform '%s'\n" % sys.platform
test.skip_test(msg)
#####
# Test the basics
test.write('SConstruct',"""
from SCons.Tool.MSVCCommon import FindMSVSBatFile, ParseBatFile, MergeMSVSBatFile, query_versions
#env = Environment(tools = ['mingw'])
DefaultEnvironment(tools = [])
#for v in [9, 8, 7.1, 7]:
# print " ==== Testing for version %s ==== " % str(v)
# bat = FindMSVSBatFile(v)
# print bat
# if bat:
# d = ParseBatFile(bat)
# for k, v in d.items():
# print k, v
#MergeMSVSBatFile(env, 9.0)
#print env['ENV']['PATH']
print query_versions()
""")
test.run(stderr = None)
test.pass_test()
|
d58eb0f244d461222d5cd0719675cf46a9297081 | app/influx/views.py | app/influx/views.py | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
from flask import jsonify
from flask import current_app
from influxdb import InfluxDBClient
from app.base import ApiResource
class InfluxResource(ApiResource):
endpoint = 'influxdb'
url_prefix = '/influxdb'
url_rules = {
'index': {
'rule': '/',
}
}
def get(self):
config = current_app.config['USER_CONFIG'].get('influxdb', {})
client = InfluxDBClient.from_DSN(config['uri'], timeout=5)
results = []
for metric in config.get('metrics', []):
collection = []
for index, query in enumerate(metric.get('queries', [])):
result = client.query(query, epoch='ms')
if result:
for dataset in result.raw['series']:
series = {}
series['data'] = dataset['values']
series['label'] = metric['labels'][index] if 'labels' in metric else None
series['lines'] = dict(fill=True)
series['mode'] = metric['mode'] if 'mode' in metric else None
series['color'] = metric['colors'][index] if 'colors' in metric else None
collection.append(series)
results.append(collection)
return jsonify(results=results)
| #!/usr/bin/env python
# -*- coding: UTF-8 -*-
from flask import jsonify
from flask import current_app
from influxdb import InfluxDBClient
from app.base import ApiResource
class InfluxResource(ApiResource):
endpoint = 'influxdb'
url_prefix = '/influxdb'
url_rules = {
'index': {
'rule': '/',
}
}
def get(self):
config = current_app.config['USER_CONFIG'].get('influxdb', {})
client = InfluxDBClient.from_DSN(config['uri'], timeout=5)
results = []
for metric in config.get('metrics', []):
collection = []
for index, query in enumerate(metric.get('queries', [])):
result = client.query(query, epoch='ms')
if result:
for dataset in result.raw['series']:
series = {}
series['data'] = dataset['values']
series['label'] = metric['labels'][index] if 'labels' in metric else None
series['lines'] = dict(fill=True)
series['mode'] = metric['mode'] if 'mode' in metric else None
series['color'] = metric['colors'][index] if 'colors' in metric else None
collection.append(series)
results.append(collection)
return jsonify(results=results)
| Stop watching movies while editing code! | Stop watching movies while editing code! | Python | bsd-2-clause | Crapworks/ceph-dash,Crapworks/ceph-dash,Aorjoa/aiyara-ceph-dash,Crapworks/ceph-dash,Aorjoa/aiyara-ceph-dash,Aorjoa/aiyara-ceph-dash,Aorjoa/aiyara-ceph-dash,Crapworks/ceph-dash,Aorjoa/aiyara-ceph-dash | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
from flask import jsonify
from flask import current_app
from influxdb import InfluxDBClient
from app.base import ApiResource
class InfluxResource(ApiResource):
endpoint = 'influxdb'
url_prefix = '/influxdb'
url_rules = {
'index': {
'rule': '/',
}
}
def get(self):
config = current_app.config['USER_CONFIG'].get('influxdb', {})
client = InfluxDBClient.from_DSN(config['uri'], timeout=5)
results = []
for metric in config.get('metrics', []):
collection = []
for index, query in enumerate(metric.get('queries', [])):
result = client.query(query, epoch='ms')
if result:
for dataset in result.raw['series']:
series = {}
series['data'] = dataset['values']
series['label'] = metric['labels'][index] if 'labels' in metric else None
series['lines'] = dict(fill=True)
series['mode'] = metric['mode'] if 'mode' in metric else None
series['color'] = metric['colors'][index] if 'colors' in metric else None
collection.append(series)
results.append(collection)
return jsonify(results=results)
Stop watching movies while editing code! | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
from flask import jsonify
from flask import current_app
from influxdb import InfluxDBClient
from app.base import ApiResource
class InfluxResource(ApiResource):
endpoint = 'influxdb'
url_prefix = '/influxdb'
url_rules = {
'index': {
'rule': '/',
}
}
def get(self):
config = current_app.config['USER_CONFIG'].get('influxdb', {})
client = InfluxDBClient.from_DSN(config['uri'], timeout=5)
results = []
for metric in config.get('metrics', []):
collection = []
for index, query in enumerate(metric.get('queries', [])):
result = client.query(query, epoch='ms')
if result:
for dataset in result.raw['series']:
series = {}
series['data'] = dataset['values']
series['label'] = metric['labels'][index] if 'labels' in metric else None
series['lines'] = dict(fill=True)
series['mode'] = metric['mode'] if 'mode' in metric else None
series['color'] = metric['colors'][index] if 'colors' in metric else None
collection.append(series)
results.append(collection)
return jsonify(results=results)
| <commit_before>#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from flask import jsonify
from flask import current_app
from influxdb import InfluxDBClient
from app.base import ApiResource
class InfluxResource(ApiResource):
endpoint = 'influxdb'
url_prefix = '/influxdb'
url_rules = {
'index': {
'rule': '/',
}
}
def get(self):
config = current_app.config['USER_CONFIG'].get('influxdb', {})
client = InfluxDBClient.from_DSN(config['uri'], timeout=5)
results = []
for metric in config.get('metrics', []):
collection = []
for index, query in enumerate(metric.get('queries', [])):
result = client.query(query, epoch='ms')
if result:
for dataset in result.raw['series']:
series = {}
series['data'] = dataset['values']
series['label'] = metric['labels'][index] if 'labels' in metric else None
series['lines'] = dict(fill=True)
series['mode'] = metric['mode'] if 'mode' in metric else None
series['color'] = metric['colors'][index] if 'colors' in metric else None
collection.append(series)
results.append(collection)
return jsonify(results=results)
<commit_msg>Stop watching movies while editing code!<commit_after> | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
from flask import jsonify
from flask import current_app
from influxdb import InfluxDBClient
from app.base import ApiResource
class InfluxResource(ApiResource):
endpoint = 'influxdb'
url_prefix = '/influxdb'
url_rules = {
'index': {
'rule': '/',
}
}
def get(self):
config = current_app.config['USER_CONFIG'].get('influxdb', {})
client = InfluxDBClient.from_DSN(config['uri'], timeout=5)
results = []
for metric in config.get('metrics', []):
collection = []
for index, query in enumerate(metric.get('queries', [])):
result = client.query(query, epoch='ms')
if result:
for dataset in result.raw['series']:
series = {}
series['data'] = dataset['values']
series['label'] = metric['labels'][index] if 'labels' in metric else None
series['lines'] = dict(fill=True)
series['mode'] = metric['mode'] if 'mode' in metric else None
series['color'] = metric['colors'][index] if 'colors' in metric else None
collection.append(series)
results.append(collection)
return jsonify(results=results)
| #!/usr/bin/env python
# -*- coding: UTF-8 -*-
from flask import jsonify
from flask import current_app
from influxdb import InfluxDBClient
from app.base import ApiResource
class InfluxResource(ApiResource):
endpoint = 'influxdb'
url_prefix = '/influxdb'
url_rules = {
'index': {
'rule': '/',
}
}
def get(self):
config = current_app.config['USER_CONFIG'].get('influxdb', {})
client = InfluxDBClient.from_DSN(config['uri'], timeout=5)
results = []
for metric in config.get('metrics', []):
collection = []
for index, query in enumerate(metric.get('queries', [])):
result = client.query(query, epoch='ms')
if result:
for dataset in result.raw['series']:
series = {}
series['data'] = dataset['values']
series['label'] = metric['labels'][index] if 'labels' in metric else None
series['lines'] = dict(fill=True)
series['mode'] = metric['mode'] if 'mode' in metric else None
series['color'] = metric['colors'][index] if 'colors' in metric else None
collection.append(series)
results.append(collection)
return jsonify(results=results)
Stop watching movies while editing code!#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from flask import jsonify
from flask import current_app
from influxdb import InfluxDBClient
from app.base import ApiResource
class InfluxResource(ApiResource):
endpoint = 'influxdb'
url_prefix = '/influxdb'
url_rules = {
'index': {
'rule': '/',
}
}
def get(self):
config = current_app.config['USER_CONFIG'].get('influxdb', {})
client = InfluxDBClient.from_DSN(config['uri'], timeout=5)
results = []
for metric in config.get('metrics', []):
collection = []
for index, query in enumerate(metric.get('queries', [])):
result = client.query(query, epoch='ms')
if result:
for dataset in result.raw['series']:
series = {}
series['data'] = dataset['values']
series['label'] = metric['labels'][index] if 'labels' in metric else None
series['lines'] = dict(fill=True)
series['mode'] = metric['mode'] if 'mode' in metric else None
series['color'] = metric['colors'][index] if 'colors' in metric else None
collection.append(series)
results.append(collection)
return jsonify(results=results)
| <commit_before>#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from flask import jsonify
from flask import current_app
from influxdb import InfluxDBClient
from app.base import ApiResource
class InfluxResource(ApiResource):
endpoint = 'influxdb'
url_prefix = '/influxdb'
url_rules = {
'index': {
'rule': '/',
}
}
def get(self):
config = current_app.config['USER_CONFIG'].get('influxdb', {})
client = InfluxDBClient.from_DSN(config['uri'], timeout=5)
results = []
for metric in config.get('metrics', []):
collection = []
for index, query in enumerate(metric.get('queries', [])):
result = client.query(query, epoch='ms')
if result:
for dataset in result.raw['series']:
series = {}
series['data'] = dataset['values']
series['label'] = metric['labels'][index] if 'labels' in metric else None
series['lines'] = dict(fill=True)
series['mode'] = metric['mode'] if 'mode' in metric else None
series['color'] = metric['colors'][index] if 'colors' in metric else None
collection.append(series)
results.append(collection)
return jsonify(results=results)
<commit_msg>Stop watching movies while editing code!<commit_after>#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from flask import jsonify
from flask import current_app
from influxdb import InfluxDBClient
from app.base import ApiResource
class InfluxResource(ApiResource):
endpoint = 'influxdb'
url_prefix = '/influxdb'
url_rules = {
'index': {
'rule': '/',
}
}
def get(self):
config = current_app.config['USER_CONFIG'].get('influxdb', {})
client = InfluxDBClient.from_DSN(config['uri'], timeout=5)
results = []
for metric in config.get('metrics', []):
collection = []
for index, query in enumerate(metric.get('queries', [])):
result = client.query(query, epoch='ms')
if result:
for dataset in result.raw['series']:
series = {}
series['data'] = dataset['values']
series['label'] = metric['labels'][index] if 'labels' in metric else None
series['lines'] = dict(fill=True)
series['mode'] = metric['mode'] if 'mode' in metric else None
series['color'] = metric['colors'][index] if 'colors' in metric else None
collection.append(series)
results.append(collection)
return jsonify(results=results)
|
165d76e68492070060a7045e08a7bec09a226093 | api/base/exceptions.py | api/base/exceptions.py |
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Title removed to avoid clash with node "title" errors
acceptable_members = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response is not None:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in acceptable_members:
errors.append({key: value})
else:
errors.append({'detail': {key: value}})
elif isinstance(message, list):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
|
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Title removed to avoid clash with node "title" errors
acceptable_members = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response is not None:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in acceptable_members:
errors.append({key: value})
else:
errors.append({'detail': {key: value}})
elif isinstance(message, list):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
| Add comment to explain import inside method | Add comment to explain import inside method
| Python | apache-2.0 | arpitar/osf.io,mluke93/osf.io,icereval/osf.io,DanielSBrown/osf.io,MerlinZhang/osf.io,pattisdr/osf.io,cslzchen/osf.io,ckc6cz/osf.io,ckc6cz/osf.io,sbt9uc/osf.io,RomanZWang/osf.io,sloria/osf.io,jnayak1/osf.io,petermalcolm/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,alexschiller/osf.io,Ghalko/osf.io,alexschiller/osf.io,chennan47/osf.io,mluo613/osf.io,Ghalko/osf.io,caseyrygt/osf.io,MerlinZhang/osf.io,cwisecarver/osf.io,MerlinZhang/osf.io,doublebits/osf.io,SSJohns/osf.io,monikagrabowska/osf.io,ZobairAlijan/osf.io,erinspace/osf.io,samanehsan/osf.io,kwierman/osf.io,RomanZWang/osf.io,aaxelb/osf.io,asanfilippo7/osf.io,adlius/osf.io,cslzchen/osf.io,emetsger/osf.io,ticklemepierce/osf.io,saradbowman/osf.io,caseyrollins/osf.io,baylee-d/osf.io,emetsger/osf.io,alexschiller/osf.io,HalcyonChimera/osf.io,danielneis/osf.io,samanehsan/osf.io,cwisecarver/osf.io,mfraezz/osf.io,KAsante95/osf.io,chrisseto/osf.io,Johnetordoff/osf.io,ticklemepierce/osf.io,wearpants/osf.io,binoculars/osf.io,abought/osf.io,baylee-d/osf.io,hmoco/osf.io,DanielSBrown/osf.io,danielneis/osf.io,TomBaxter/osf.io,caneruguz/osf.io,HalcyonChimera/osf.io,brandonPurvis/osf.io,laurenrevere/osf.io,sbt9uc/osf.io,saradbowman/osf.io,caseyrygt/osf.io,brianjgeiger/osf.io,acshi/osf.io,samanehsan/osf.io,cosenal/osf.io,danielneis/osf.io,zamattiac/osf.io,Johnetordoff/osf.io,ZobairAlijan/osf.io,brandonPurvis/osf.io,billyhunt/osf.io,asanfilippo7/osf.io,monikagrabowska/osf.io,abought/osf.io,Ghalko/osf.io,brandonPurvis/osf.io,samchrisinger/osf.io,aaxelb/osf.io,Nesiehr/osf.io,chrisseto/osf.io,aaxelb/osf.io,SSJohns/osf.io,crcresearch/osf.io,wearpants/osf.io,jmcarp/osf.io,mluke93/osf.io,mfraezz/osf.io,haoyuchen1992/osf.io,GageGaskins/osf.io,mattclark/osf.io,mluo613/osf.io,Johnetordoff/osf.io,kwierman/osf.io,icereval/osf.io,rdhyee/osf.io,TomHeatwole/osf.io,binoculars/osf.io,mluo613/osf.io,CenterForOpenScience/osf.io,Nesiehr/osf.io,amyshi188/osf.io,DanielSBrown/osf.io,brandonPurvis/osf.io,zachjanicki/osf.io,arpitar/osf.io,HalcyonChimera/osf.io,jmcarp/osf.io,leb2dg/osf.io,billyhunt/osf.io,zamattiac/osf.io,cosenal/osf.io,cslzchen/osf.io,acshi/osf.io,felliott/osf.io,laurenrevere/osf.io,jmcarp/osf.io,ckc6cz/osf.io,caseyrygt/osf.io,GageGaskins/osf.io,ticklemepierce/osf.io,samchrisinger/osf.io,asanfilippo7/osf.io,DanielSBrown/osf.io,billyhunt/osf.io,ZobairAlijan/osf.io,acshi/osf.io,samanehsan/osf.io,acshi/osf.io,doublebits/osf.io,alexschiller/osf.io,amyshi188/osf.io,brianjgeiger/osf.io,adlius/osf.io,emetsger/osf.io,monikagrabowska/osf.io,pattisdr/osf.io,chrisseto/osf.io,chennan47/osf.io,ckc6cz/osf.io,danielneis/osf.io,chennan47/osf.io,zamattiac/osf.io,samchrisinger/osf.io,kch8qx/osf.io,njantrania/osf.io,jnayak1/osf.io,cwisecarver/osf.io,KAsante95/osf.io,adlius/osf.io,cslzchen/osf.io,acshi/osf.io,arpitar/osf.io,adlius/osf.io,ZobairAlijan/osf.io,caseyrollins/osf.io,monikagrabowska/osf.io,jnayak1/osf.io,CenterForOpenScience/osf.io,kch8qx/osf.io,zachjanicki/osf.io,TomHeatwole/osf.io,MerlinZhang/osf.io,rdhyee/osf.io,cwisecarver/osf.io,mluo613/osf.io,alexschiller/osf.io,TomBaxter/osf.io,njantrania/osf.io,zachjanicki/osf.io,njantrania/osf.io,Nesiehr/osf.io,Johnetordoff/osf.io,hmoco/osf.io,petermalcolm/osf.io,zachjanicki/osf.io,CenterForOpenScience/osf.io,RomanZWang/osf.io,kch8qx/osf.io,TomHeatwole/osf.io,mattclark/osf.io,erinspace/osf.io,aaxelb/osf.io,mluke93/osf.io,hmoco/osf.io,HalcyonChimera/osf.io,leb2dg/osf.io,GageGaskins/osf.io,wearpants/osf.io,GageGaskins/osf.io,erinspace/osf.io,KAsante95/osf.io,sloria/osf.io,zamattiac/osf.io,arpitar/osf.io,caseyrygt/osf.io,monikagrabowska/osf.io,petermalcolm/osf.io,haoyuchen1992/osf.io,jmcarp/osf.io,brianjgeiger/osf.io,SSJohns/osf.io,hmoco/osf.io,caseyrollins/osf.io,amyshi188/osf.io,leb2dg/osf.io,njantrania/osf.io,mluo613/osf.io,TomBaxter/osf.io,kwierman/osf.io,rdhyee/osf.io,KAsante95/osf.io,SSJohns/osf.io,kch8qx/osf.io,ticklemepierce/osf.io,caneruguz/osf.io,kch8qx/osf.io,Nesiehr/osf.io,caneruguz/osf.io,cosenal/osf.io,abought/osf.io,cosenal/osf.io,billyhunt/osf.io,wearpants/osf.io,brianjgeiger/osf.io,binoculars/osf.io,RomanZWang/osf.io,crcresearch/osf.io,GageGaskins/osf.io,sbt9uc/osf.io,doublebits/osf.io,felliott/osf.io,brandonPurvis/osf.io,doublebits/osf.io,felliott/osf.io,pattisdr/osf.io,baylee-d/osf.io,rdhyee/osf.io,icereval/osf.io,emetsger/osf.io,haoyuchen1992/osf.io,asanfilippo7/osf.io,KAsante95/osf.io,chrisseto/osf.io,doublebits/osf.io,RomanZWang/osf.io,billyhunt/osf.io,petermalcolm/osf.io,amyshi188/osf.io,laurenrevere/osf.io,mfraezz/osf.io,samchrisinger/osf.io,sloria/osf.io,kwierman/osf.io,jnayak1/osf.io,mluke93/osf.io,crcresearch/osf.io,caneruguz/osf.io,leb2dg/osf.io,TomHeatwole/osf.io,mattclark/osf.io,sbt9uc/osf.io,mfraezz/osf.io,Ghalko/osf.io,haoyuchen1992/osf.io,abought/osf.io |
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Title removed to avoid clash with node "title" errors
acceptable_members = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response is not None:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in acceptable_members:
errors.append({key: value})
else:
errors.append({'detail': {key: value}})
elif isinstance(message, list):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
Add comment to explain import inside method |
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Title removed to avoid clash with node "title" errors
acceptable_members = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response is not None:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in acceptable_members:
errors.append({key: value})
else:
errors.append({'detail': {key: value}})
elif isinstance(message, list):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
| <commit_before>
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Title removed to avoid clash with node "title" errors
acceptable_members = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response is not None:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in acceptable_members:
errors.append({key: value})
else:
errors.append({'detail': {key: value}})
elif isinstance(message, list):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
<commit_msg>Add comment to explain import inside method<commit_after> |
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Title removed to avoid clash with node "title" errors
acceptable_members = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response is not None:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in acceptable_members:
errors.append({key: value})
else:
errors.append({'detail': {key: value}})
elif isinstance(message, list):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
|
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Title removed to avoid clash with node "title" errors
acceptable_members = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response is not None:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in acceptable_members:
errors.append({key: value})
else:
errors.append({'detail': {key: value}})
elif isinstance(message, list):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
Add comment to explain import inside method
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Title removed to avoid clash with node "title" errors
acceptable_members = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response is not None:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in acceptable_members:
errors.append({key: value})
else:
errors.append({'detail': {key: value}})
elif isinstance(message, list):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
| <commit_before>
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Title removed to avoid clash with node "title" errors
acceptable_members = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response is not None:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in acceptable_members:
errors.append({key: value})
else:
errors.append({'detail': {key: value}})
elif isinstance(message, list):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
<commit_msg>Add comment to explain import inside method<commit_after>
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Title removed to avoid clash with node "title" errors
acceptable_members = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response is not None:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in acceptable_members:
errors.append({key: value})
else:
errors.append({'detail': {key: value}})
elif isinstance(message, list):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
|
2a925111aa0cd114b30e94c6a8d7d96d46f6d3d8 | appengine_config.py | appengine_config.py | """Configuration."""
import logging
import os
import re
from google.appengine.ext.appstats import recording
logging.info('Loading %s from %s', __name__, __file__)
# Custom webapp middleware to add Appstats.
def webapp_add_wsgi_middleware(app):
app = recording.appstats_wsgi_middleware(app)
return app
# Custom Appstats path normalization.
def appstats_normalize_path(path):
if path.startswith('/user/'):
return '/user/X'
if path.startswith('/user_popup/'):
return '/user_popup/X'
if '/diff/' in path:
return '/X/diff/...'
if '/diff2/' in path:
return '/X/diff2/...'
if '/patch/' in path:
return '/X/patch/...'
if path.startswith('/rss/'):
i = path.find('/', 5)
if i > 0:
return path[:i] + '/X'
return re.sub(r'\d+', 'X', path)
# Segregate Appstats by runtime (python vs. python27).
appstats_KEY_NAMESPACE = '__appstats_%s__' % os.getenv('APPENGINE_RUNTIME')
# Django 1.2+ requires DJANGO_SETTINGS_MODULE environment variable to be set
# http://code.google.com/appengine/docs/python/tools/libraries.html#Django
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
# NOTE: All "main" scripts must import webapp.template before django.
| """Configuration."""
import logging
import os
import re
from google.appengine.ext.appstats import recording
logging.info('Loading %s from %s', __name__, __file__)
# Custom webapp middleware to add Appstats.
def webapp_add_wsgi_middleware(app):
app = recording.appstats_wsgi_middleware(app)
return app
# Custom Appstats path normalization.
def appstats_normalize_path(path):
if path.startswith('/user/'):
return '/user/X'
if path.startswith('/user_popup/'):
return '/user_popup/X'
if '/diff/' in path:
return '/X/diff/...'
if '/diff2/' in path:
return '/X/diff2/...'
if '/patch/' in path:
return '/X/patch/...'
if path.startswith('/rss/'):
i = path.find('/', 5)
if i > 0:
return path[:i] + '/X'
return re.sub(r'\d+', 'X', path)
# Segregate Appstats by runtime (python vs. python27).
appstats_KEY_NAMESPACE = '__appstats_%s__' % os.getenv('APPENGINE_RUNTIME')
# Enable Interactive Playground.
appstats_SHELL_OK = True
# Django 1.2+ requires DJANGO_SETTINGS_MODULE environment variable to be set
# http://code.google.com/appengine/docs/python/tools/libraries.html#Django
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
# NOTE: All "main" scripts must import webapp.template before django.
| Enable the Appstats Interactive Playground. | Enable the Appstats Interactive Playground.
| Python | apache-2.0 | riannucci/rietveldv2,riannucci/rietveldv2 | """Configuration."""
import logging
import os
import re
from google.appengine.ext.appstats import recording
logging.info('Loading %s from %s', __name__, __file__)
# Custom webapp middleware to add Appstats.
def webapp_add_wsgi_middleware(app):
app = recording.appstats_wsgi_middleware(app)
return app
# Custom Appstats path normalization.
def appstats_normalize_path(path):
if path.startswith('/user/'):
return '/user/X'
if path.startswith('/user_popup/'):
return '/user_popup/X'
if '/diff/' in path:
return '/X/diff/...'
if '/diff2/' in path:
return '/X/diff2/...'
if '/patch/' in path:
return '/X/patch/...'
if path.startswith('/rss/'):
i = path.find('/', 5)
if i > 0:
return path[:i] + '/X'
return re.sub(r'\d+', 'X', path)
# Segregate Appstats by runtime (python vs. python27).
appstats_KEY_NAMESPACE = '__appstats_%s__' % os.getenv('APPENGINE_RUNTIME')
# Django 1.2+ requires DJANGO_SETTINGS_MODULE environment variable to be set
# http://code.google.com/appengine/docs/python/tools/libraries.html#Django
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
# NOTE: All "main" scripts must import webapp.template before django.
Enable the Appstats Interactive Playground. | """Configuration."""
import logging
import os
import re
from google.appengine.ext.appstats import recording
logging.info('Loading %s from %s', __name__, __file__)
# Custom webapp middleware to add Appstats.
def webapp_add_wsgi_middleware(app):
app = recording.appstats_wsgi_middleware(app)
return app
# Custom Appstats path normalization.
def appstats_normalize_path(path):
if path.startswith('/user/'):
return '/user/X'
if path.startswith('/user_popup/'):
return '/user_popup/X'
if '/diff/' in path:
return '/X/diff/...'
if '/diff2/' in path:
return '/X/diff2/...'
if '/patch/' in path:
return '/X/patch/...'
if path.startswith('/rss/'):
i = path.find('/', 5)
if i > 0:
return path[:i] + '/X'
return re.sub(r'\d+', 'X', path)
# Segregate Appstats by runtime (python vs. python27).
appstats_KEY_NAMESPACE = '__appstats_%s__' % os.getenv('APPENGINE_RUNTIME')
# Enable Interactive Playground.
appstats_SHELL_OK = True
# Django 1.2+ requires DJANGO_SETTINGS_MODULE environment variable to be set
# http://code.google.com/appengine/docs/python/tools/libraries.html#Django
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
# NOTE: All "main" scripts must import webapp.template before django.
| <commit_before>"""Configuration."""
import logging
import os
import re
from google.appengine.ext.appstats import recording
logging.info('Loading %s from %s', __name__, __file__)
# Custom webapp middleware to add Appstats.
def webapp_add_wsgi_middleware(app):
app = recording.appstats_wsgi_middleware(app)
return app
# Custom Appstats path normalization.
def appstats_normalize_path(path):
if path.startswith('/user/'):
return '/user/X'
if path.startswith('/user_popup/'):
return '/user_popup/X'
if '/diff/' in path:
return '/X/diff/...'
if '/diff2/' in path:
return '/X/diff2/...'
if '/patch/' in path:
return '/X/patch/...'
if path.startswith('/rss/'):
i = path.find('/', 5)
if i > 0:
return path[:i] + '/X'
return re.sub(r'\d+', 'X', path)
# Segregate Appstats by runtime (python vs. python27).
appstats_KEY_NAMESPACE = '__appstats_%s__' % os.getenv('APPENGINE_RUNTIME')
# Django 1.2+ requires DJANGO_SETTINGS_MODULE environment variable to be set
# http://code.google.com/appengine/docs/python/tools/libraries.html#Django
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
# NOTE: All "main" scripts must import webapp.template before django.
<commit_msg>Enable the Appstats Interactive Playground.<commit_after> | """Configuration."""
import logging
import os
import re
from google.appengine.ext.appstats import recording
logging.info('Loading %s from %s', __name__, __file__)
# Custom webapp middleware to add Appstats.
def webapp_add_wsgi_middleware(app):
app = recording.appstats_wsgi_middleware(app)
return app
# Custom Appstats path normalization.
def appstats_normalize_path(path):
if path.startswith('/user/'):
return '/user/X'
if path.startswith('/user_popup/'):
return '/user_popup/X'
if '/diff/' in path:
return '/X/diff/...'
if '/diff2/' in path:
return '/X/diff2/...'
if '/patch/' in path:
return '/X/patch/...'
if path.startswith('/rss/'):
i = path.find('/', 5)
if i > 0:
return path[:i] + '/X'
return re.sub(r'\d+', 'X', path)
# Segregate Appstats by runtime (python vs. python27).
appstats_KEY_NAMESPACE = '__appstats_%s__' % os.getenv('APPENGINE_RUNTIME')
# Enable Interactive Playground.
appstats_SHELL_OK = True
# Django 1.2+ requires DJANGO_SETTINGS_MODULE environment variable to be set
# http://code.google.com/appengine/docs/python/tools/libraries.html#Django
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
# NOTE: All "main" scripts must import webapp.template before django.
| """Configuration."""
import logging
import os
import re
from google.appengine.ext.appstats import recording
logging.info('Loading %s from %s', __name__, __file__)
# Custom webapp middleware to add Appstats.
def webapp_add_wsgi_middleware(app):
app = recording.appstats_wsgi_middleware(app)
return app
# Custom Appstats path normalization.
def appstats_normalize_path(path):
if path.startswith('/user/'):
return '/user/X'
if path.startswith('/user_popup/'):
return '/user_popup/X'
if '/diff/' in path:
return '/X/diff/...'
if '/diff2/' in path:
return '/X/diff2/...'
if '/patch/' in path:
return '/X/patch/...'
if path.startswith('/rss/'):
i = path.find('/', 5)
if i > 0:
return path[:i] + '/X'
return re.sub(r'\d+', 'X', path)
# Segregate Appstats by runtime (python vs. python27).
appstats_KEY_NAMESPACE = '__appstats_%s__' % os.getenv('APPENGINE_RUNTIME')
# Django 1.2+ requires DJANGO_SETTINGS_MODULE environment variable to be set
# http://code.google.com/appengine/docs/python/tools/libraries.html#Django
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
# NOTE: All "main" scripts must import webapp.template before django.
Enable the Appstats Interactive Playground."""Configuration."""
import logging
import os
import re
from google.appengine.ext.appstats import recording
logging.info('Loading %s from %s', __name__, __file__)
# Custom webapp middleware to add Appstats.
def webapp_add_wsgi_middleware(app):
app = recording.appstats_wsgi_middleware(app)
return app
# Custom Appstats path normalization.
def appstats_normalize_path(path):
if path.startswith('/user/'):
return '/user/X'
if path.startswith('/user_popup/'):
return '/user_popup/X'
if '/diff/' in path:
return '/X/diff/...'
if '/diff2/' in path:
return '/X/diff2/...'
if '/patch/' in path:
return '/X/patch/...'
if path.startswith('/rss/'):
i = path.find('/', 5)
if i > 0:
return path[:i] + '/X'
return re.sub(r'\d+', 'X', path)
# Segregate Appstats by runtime (python vs. python27).
appstats_KEY_NAMESPACE = '__appstats_%s__' % os.getenv('APPENGINE_RUNTIME')
# Enable Interactive Playground.
appstats_SHELL_OK = True
# Django 1.2+ requires DJANGO_SETTINGS_MODULE environment variable to be set
# http://code.google.com/appengine/docs/python/tools/libraries.html#Django
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
# NOTE: All "main" scripts must import webapp.template before django.
| <commit_before>"""Configuration."""
import logging
import os
import re
from google.appengine.ext.appstats import recording
logging.info('Loading %s from %s', __name__, __file__)
# Custom webapp middleware to add Appstats.
def webapp_add_wsgi_middleware(app):
app = recording.appstats_wsgi_middleware(app)
return app
# Custom Appstats path normalization.
def appstats_normalize_path(path):
if path.startswith('/user/'):
return '/user/X'
if path.startswith('/user_popup/'):
return '/user_popup/X'
if '/diff/' in path:
return '/X/diff/...'
if '/diff2/' in path:
return '/X/diff2/...'
if '/patch/' in path:
return '/X/patch/...'
if path.startswith('/rss/'):
i = path.find('/', 5)
if i > 0:
return path[:i] + '/X'
return re.sub(r'\d+', 'X', path)
# Segregate Appstats by runtime (python vs. python27).
appstats_KEY_NAMESPACE = '__appstats_%s__' % os.getenv('APPENGINE_RUNTIME')
# Django 1.2+ requires DJANGO_SETTINGS_MODULE environment variable to be set
# http://code.google.com/appengine/docs/python/tools/libraries.html#Django
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
# NOTE: All "main" scripts must import webapp.template before django.
<commit_msg>Enable the Appstats Interactive Playground.<commit_after>"""Configuration."""
import logging
import os
import re
from google.appengine.ext.appstats import recording
logging.info('Loading %s from %s', __name__, __file__)
# Custom webapp middleware to add Appstats.
def webapp_add_wsgi_middleware(app):
app = recording.appstats_wsgi_middleware(app)
return app
# Custom Appstats path normalization.
def appstats_normalize_path(path):
if path.startswith('/user/'):
return '/user/X'
if path.startswith('/user_popup/'):
return '/user_popup/X'
if '/diff/' in path:
return '/X/diff/...'
if '/diff2/' in path:
return '/X/diff2/...'
if '/patch/' in path:
return '/X/patch/...'
if path.startswith('/rss/'):
i = path.find('/', 5)
if i > 0:
return path[:i] + '/X'
return re.sub(r'\d+', 'X', path)
# Segregate Appstats by runtime (python vs. python27).
appstats_KEY_NAMESPACE = '__appstats_%s__' % os.getenv('APPENGINE_RUNTIME')
# Enable Interactive Playground.
appstats_SHELL_OK = True
# Django 1.2+ requires DJANGO_SETTINGS_MODULE environment variable to be set
# http://code.google.com/appengine/docs/python/tools/libraries.html#Django
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
# NOTE: All "main" scripts must import webapp.template before django.
|
df2fe66f64f79127374d2f183cb76966f77761ee | signac/common/errors.py | signac/common/errors.py | # Copyright (c) 2016 The Regents of the University of Michigan
# All rights reserved.
# This software is licensed under the MIT License.
class Error(Exception):
pass
class ConfigError(Error, RuntimeError):
pass
class AuthenticationError(Error, RuntimeError):
pass
| # Copyright (c) 2016 The Regents of the University of Michigan
# All rights reserved.
# This software is licensed under the MIT License.
class Error(Exception):
pass
class ConfigError(Error, RuntimeError):
pass
class AuthenticationError(Error, RuntimeError):
def __str__(self):
if len(self.args) > 0:
return "Failed to authenticate with host '{}'.".format(self.args[0])
else:
return "Failed to authenticate with host."
| Improve error message for authentication issues. | Improve error message for authentication issues.
| Python | bsd-3-clause | csadorf/signac,csadorf/signac | # Copyright (c) 2016 The Regents of the University of Michigan
# All rights reserved.
# This software is licensed under the MIT License.
class Error(Exception):
pass
class ConfigError(Error, RuntimeError):
pass
class AuthenticationError(Error, RuntimeError):
pass
Improve error message for authentication issues. | # Copyright (c) 2016 The Regents of the University of Michigan
# All rights reserved.
# This software is licensed under the MIT License.
class Error(Exception):
pass
class ConfigError(Error, RuntimeError):
pass
class AuthenticationError(Error, RuntimeError):
def __str__(self):
if len(self.args) > 0:
return "Failed to authenticate with host '{}'.".format(self.args[0])
else:
return "Failed to authenticate with host."
| <commit_before># Copyright (c) 2016 The Regents of the University of Michigan
# All rights reserved.
# This software is licensed under the MIT License.
class Error(Exception):
pass
class ConfigError(Error, RuntimeError):
pass
class AuthenticationError(Error, RuntimeError):
pass
<commit_msg>Improve error message for authentication issues.<commit_after> | # Copyright (c) 2016 The Regents of the University of Michigan
# All rights reserved.
# This software is licensed under the MIT License.
class Error(Exception):
pass
class ConfigError(Error, RuntimeError):
pass
class AuthenticationError(Error, RuntimeError):
def __str__(self):
if len(self.args) > 0:
return "Failed to authenticate with host '{}'.".format(self.args[0])
else:
return "Failed to authenticate with host."
| # Copyright (c) 2016 The Regents of the University of Michigan
# All rights reserved.
# This software is licensed under the MIT License.
class Error(Exception):
pass
class ConfigError(Error, RuntimeError):
pass
class AuthenticationError(Error, RuntimeError):
pass
Improve error message for authentication issues.# Copyright (c) 2016 The Regents of the University of Michigan
# All rights reserved.
# This software is licensed under the MIT License.
class Error(Exception):
pass
class ConfigError(Error, RuntimeError):
pass
class AuthenticationError(Error, RuntimeError):
def __str__(self):
if len(self.args) > 0:
return "Failed to authenticate with host '{}'.".format(self.args[0])
else:
return "Failed to authenticate with host."
| <commit_before># Copyright (c) 2016 The Regents of the University of Michigan
# All rights reserved.
# This software is licensed under the MIT License.
class Error(Exception):
pass
class ConfigError(Error, RuntimeError):
pass
class AuthenticationError(Error, RuntimeError):
pass
<commit_msg>Improve error message for authentication issues.<commit_after># Copyright (c) 2016 The Regents of the University of Michigan
# All rights reserved.
# This software is licensed under the MIT License.
class Error(Exception):
pass
class ConfigError(Error, RuntimeError):
pass
class AuthenticationError(Error, RuntimeError):
def __str__(self):
if len(self.args) > 0:
return "Failed to authenticate with host '{}'.".format(self.args[0])
else:
return "Failed to authenticate with host."
|
b1adfacff3b7c319ae148e79807d371ded934a5c | tx_salaries/management/commands/generate_transformer_hash.py | tx_salaries/management/commands/generate_transformer_hash.py | from django.core.management.base import BaseCommand
from ...utils import transformer
class Command(BaseCommand):
def handle(self, filename, *args, **kwargs):
reader = transformer.convert_to_csv_reader(filename)
labels = reader.next()
print transformer.generate_key(labels)
| from django.core.management.base import BaseCommand
from ...utils import transformer
class Command(BaseCommand):
def handle(self, filename, *args, **kwargs):
reader = transformer.convert_to_csv_reader(filename)
labels = reader.next()
transformer_key = transformer.generate_key(labels)
if transformer_key in transformer.TRANSFORMERS.keys():
print transformer_key + ' (exists)'
else:
print transformer_key
| Add message if transformer_hash already exists | Add message if transformer_hash already exists
| Python | apache-2.0 | texastribune/tx_salaries,texastribune/tx_salaries | from django.core.management.base import BaseCommand
from ...utils import transformer
class Command(BaseCommand):
def handle(self, filename, *args, **kwargs):
reader = transformer.convert_to_csv_reader(filename)
labels = reader.next()
print transformer.generate_key(labels)
Add message if transformer_hash already exists | from django.core.management.base import BaseCommand
from ...utils import transformer
class Command(BaseCommand):
def handle(self, filename, *args, **kwargs):
reader = transformer.convert_to_csv_reader(filename)
labels = reader.next()
transformer_key = transformer.generate_key(labels)
if transformer_key in transformer.TRANSFORMERS.keys():
print transformer_key + ' (exists)'
else:
print transformer_key
| <commit_before>from django.core.management.base import BaseCommand
from ...utils import transformer
class Command(BaseCommand):
def handle(self, filename, *args, **kwargs):
reader = transformer.convert_to_csv_reader(filename)
labels = reader.next()
print transformer.generate_key(labels)
<commit_msg>Add message if transformer_hash already exists<commit_after> | from django.core.management.base import BaseCommand
from ...utils import transformer
class Command(BaseCommand):
def handle(self, filename, *args, **kwargs):
reader = transformer.convert_to_csv_reader(filename)
labels = reader.next()
transformer_key = transformer.generate_key(labels)
if transformer_key in transformer.TRANSFORMERS.keys():
print transformer_key + ' (exists)'
else:
print transformer_key
| from django.core.management.base import BaseCommand
from ...utils import transformer
class Command(BaseCommand):
def handle(self, filename, *args, **kwargs):
reader = transformer.convert_to_csv_reader(filename)
labels = reader.next()
print transformer.generate_key(labels)
Add message if transformer_hash already existsfrom django.core.management.base import BaseCommand
from ...utils import transformer
class Command(BaseCommand):
def handle(self, filename, *args, **kwargs):
reader = transformer.convert_to_csv_reader(filename)
labels = reader.next()
transformer_key = transformer.generate_key(labels)
if transformer_key in transformer.TRANSFORMERS.keys():
print transformer_key + ' (exists)'
else:
print transformer_key
| <commit_before>from django.core.management.base import BaseCommand
from ...utils import transformer
class Command(BaseCommand):
def handle(self, filename, *args, **kwargs):
reader = transformer.convert_to_csv_reader(filename)
labels = reader.next()
print transformer.generate_key(labels)
<commit_msg>Add message if transformer_hash already exists<commit_after>from django.core.management.base import BaseCommand
from ...utils import transformer
class Command(BaseCommand):
def handle(self, filename, *args, **kwargs):
reader = transformer.convert_to_csv_reader(filename)
labels = reader.next()
transformer_key = transformer.generate_key(labels)
if transformer_key in transformer.TRANSFORMERS.keys():
print transformer_key + ' (exists)'
else:
print transformer_key
|
e4b47c9bc3de18c83a2fb718c806b7668b492de6 | authentication/urls.py | authentication/urls.py | from django.conf.urls import patterns, include, url
urlpatterns = patterns('django.contrib.auth.views',
url(r'^login/$', 'login', {'template_name': 'authentication/login.html'}),
url(r'^logout/$', 'logout_then_login')) | from django.conf.urls import patterns, include, url
urlpatterns = patterns('django.contrib.auth.views',
url(r'^login/$', 'login', {'template_name': 'authentication/login.html'}),
url(r'^logout/$', 'logout_then_login',name='logout')) | Add name to logout url regex | Add name to logout url regex
| Python | mit | DummyDivision/Tsune,DummyDivision/Tsune,DummyDivision/Tsune | from django.conf.urls import patterns, include, url
urlpatterns = patterns('django.contrib.auth.views',
url(r'^login/$', 'login', {'template_name': 'authentication/login.html'}),
url(r'^logout/$', 'logout_then_login'))Add name to logout url regex | from django.conf.urls import patterns, include, url
urlpatterns = patterns('django.contrib.auth.views',
url(r'^login/$', 'login', {'template_name': 'authentication/login.html'}),
url(r'^logout/$', 'logout_then_login',name='logout')) | <commit_before>from django.conf.urls import patterns, include, url
urlpatterns = patterns('django.contrib.auth.views',
url(r'^login/$', 'login', {'template_name': 'authentication/login.html'}),
url(r'^logout/$', 'logout_then_login'))<commit_msg>Add name to logout url regex<commit_after> | from django.conf.urls import patterns, include, url
urlpatterns = patterns('django.contrib.auth.views',
url(r'^login/$', 'login', {'template_name': 'authentication/login.html'}),
url(r'^logout/$', 'logout_then_login',name='logout')) | from django.conf.urls import patterns, include, url
urlpatterns = patterns('django.contrib.auth.views',
url(r'^login/$', 'login', {'template_name': 'authentication/login.html'}),
url(r'^logout/$', 'logout_then_login'))Add name to logout url regexfrom django.conf.urls import patterns, include, url
urlpatterns = patterns('django.contrib.auth.views',
url(r'^login/$', 'login', {'template_name': 'authentication/login.html'}),
url(r'^logout/$', 'logout_then_login',name='logout')) | <commit_before>from django.conf.urls import patterns, include, url
urlpatterns = patterns('django.contrib.auth.views',
url(r'^login/$', 'login', {'template_name': 'authentication/login.html'}),
url(r'^logout/$', 'logout_then_login'))<commit_msg>Add name to logout url regex<commit_after>from django.conf.urls import patterns, include, url
urlpatterns = patterns('django.contrib.auth.views',
url(r'^login/$', 'login', {'template_name': 'authentication/login.html'}),
url(r'^logout/$', 'logout_then_login',name='logout')) |
caa2b1e9303d2ba67a0491942d2459006ea8efe3 | bucky/__init__.py | bucky/__init__.py | from flask import Flask
from flask_login import LoginManager
from config import config
database = {}
login_manager = LoginManager()
def create_app(config_name):
app = Flask(__name__)
app.database = database
app.config.from_object(config[config_name])
login_manager.init_app(app)
@login_manager.user_loader
def load_user(username):
for user in app.database:
if user.username == username:
return user
from . import views
app.register_blueprint(views.views)
return app
| from flask import Flask
from flask_login import LoginManager
from config import config
database = {}
current_user = {}
login_manager = LoginManager()
def create_app(config_name):
app = Flask(__name__)
app.database = database
app.current_user = current_user
app.config.from_object(config[config_name])
login_manager.init_app(app)
@login_manager.user_loader
def load_user(username):
for user in app.database:
if user.username == username:
return user
from . import views
app.register_blueprint(views.views)
return app
| Add current_user to app properties | Add current_user to app properties
| Python | mit | JoshuaOndieki/buckylist,JoshuaOndieki/buckylist | from flask import Flask
from flask_login import LoginManager
from config import config
database = {}
login_manager = LoginManager()
def create_app(config_name):
app = Flask(__name__)
app.database = database
app.config.from_object(config[config_name])
login_manager.init_app(app)
@login_manager.user_loader
def load_user(username):
for user in app.database:
if user.username == username:
return user
from . import views
app.register_blueprint(views.views)
return app
Add current_user to app properties | from flask import Flask
from flask_login import LoginManager
from config import config
database = {}
current_user = {}
login_manager = LoginManager()
def create_app(config_name):
app = Flask(__name__)
app.database = database
app.current_user = current_user
app.config.from_object(config[config_name])
login_manager.init_app(app)
@login_manager.user_loader
def load_user(username):
for user in app.database:
if user.username == username:
return user
from . import views
app.register_blueprint(views.views)
return app
| <commit_before>from flask import Flask
from flask_login import LoginManager
from config import config
database = {}
login_manager = LoginManager()
def create_app(config_name):
app = Flask(__name__)
app.database = database
app.config.from_object(config[config_name])
login_manager.init_app(app)
@login_manager.user_loader
def load_user(username):
for user in app.database:
if user.username == username:
return user
from . import views
app.register_blueprint(views.views)
return app
<commit_msg>Add current_user to app properties<commit_after> | from flask import Flask
from flask_login import LoginManager
from config import config
database = {}
current_user = {}
login_manager = LoginManager()
def create_app(config_name):
app = Flask(__name__)
app.database = database
app.current_user = current_user
app.config.from_object(config[config_name])
login_manager.init_app(app)
@login_manager.user_loader
def load_user(username):
for user in app.database:
if user.username == username:
return user
from . import views
app.register_blueprint(views.views)
return app
| from flask import Flask
from flask_login import LoginManager
from config import config
database = {}
login_manager = LoginManager()
def create_app(config_name):
app = Flask(__name__)
app.database = database
app.config.from_object(config[config_name])
login_manager.init_app(app)
@login_manager.user_loader
def load_user(username):
for user in app.database:
if user.username == username:
return user
from . import views
app.register_blueprint(views.views)
return app
Add current_user to app propertiesfrom flask import Flask
from flask_login import LoginManager
from config import config
database = {}
current_user = {}
login_manager = LoginManager()
def create_app(config_name):
app = Flask(__name__)
app.database = database
app.current_user = current_user
app.config.from_object(config[config_name])
login_manager.init_app(app)
@login_manager.user_loader
def load_user(username):
for user in app.database:
if user.username == username:
return user
from . import views
app.register_blueprint(views.views)
return app
| <commit_before>from flask import Flask
from flask_login import LoginManager
from config import config
database = {}
login_manager = LoginManager()
def create_app(config_name):
app = Flask(__name__)
app.database = database
app.config.from_object(config[config_name])
login_manager.init_app(app)
@login_manager.user_loader
def load_user(username):
for user in app.database:
if user.username == username:
return user
from . import views
app.register_blueprint(views.views)
return app
<commit_msg>Add current_user to app properties<commit_after>from flask import Flask
from flask_login import LoginManager
from config import config
database = {}
current_user = {}
login_manager = LoginManager()
def create_app(config_name):
app = Flask(__name__)
app.database = database
app.current_user = current_user
app.config.from_object(config[config_name])
login_manager.init_app(app)
@login_manager.user_loader
def load_user(username):
for user in app.database:
if user.username == username:
return user
from . import views
app.register_blueprint(views.views)
return app
|
b9b194b9eb9a9ddefa9549a522fc67c181acbc4a | tests/test_importlazy.py | tests/test_importlazy.py | """
To run this test:
1. cd importlazy/tests
2. python -m unittest test_importlazy.py
"""
import unittest
import sys
class ImportLazyTest(unittest.TestCase):
def test1_initial_state(self):
"""Initially, the module must not exist in the imported modules.
The package or module should not be imported during the initial state.
"""
assert 'test_importlazy' in sys.modules
assert 'firstpkg' not in sys.modules
assert 'firstpkg.path' not in sys.modules
assert 'a_module' not in sys.modules
def test2_importing_module_only_when_used(self):
"""The package must exist in the imported modules.
Followed by the module once the attribute of it is used.
"""
import firstpkg
# The package will be imported but not the modules
assert 'firstpkg' in sys.modules
assert 'firstpkg.path' in sys.modules
assert 'firstpkg.a_module' not in sys.modules
# As the class attribute of the module is used,
# the module will be imported for the first time
assert str(type(firstpkg.path.ClassOne)) == "<class 'type'>"
assert 'firstpkg.a_module' in sys.modules
# Check that we can really create an instance of the imported class
one = firstpkg.path.ClassOne()
assert isinstance(one, firstpkg.path.ClassOne)
| """
To run this test:
1. cd importlazy/tests
2. python -m unittest test_importlazy.py
"""
import unittest
import sys
class ImportLazyTest(unittest.TestCase):
def test1_initial_state(self):
"""Initially, the module must not exist in the imported modules.
The package or module should not be imported during the initial state.
"""
assert 'test_importlazy' in sys.modules
assert 'firstpkg' not in sys.modules
assert 'firstpkg.path' not in sys.modules
assert 'a_module' not in sys.modules
def test2_importing_module_only_when_used(self):
"""The package must exist in the imported modules.
Followed by the module once the attribute of it is used.
"""
import firstpkg
# The package will be imported but not the modules
assert 'firstpkg' in sys.modules
assert 'firstpkg.path' in sys.modules
assert 'firstpkg.a_module' not in sys.modules
# As the class attribute of the module is used,
# the module will be imported for the first time
assert str(type(firstpkg.path.ClassOne)) == "<class 'type'>"
assert 'firstpkg.a_module' in sys.modules
# Check that we can really create an instance of the imported class
one = firstpkg.path.ClassOne()
from firstpkg.a_module import AModuleClassOne
assert isinstance(one, AModuleClassOne)
| Check the instance against the original class declaration. | Check the instance against the original class declaration.
| Python | mit | ldiary/importlazy | """
To run this test:
1. cd importlazy/tests
2. python -m unittest test_importlazy.py
"""
import unittest
import sys
class ImportLazyTest(unittest.TestCase):
def test1_initial_state(self):
"""Initially, the module must not exist in the imported modules.
The package or module should not be imported during the initial state.
"""
assert 'test_importlazy' in sys.modules
assert 'firstpkg' not in sys.modules
assert 'firstpkg.path' not in sys.modules
assert 'a_module' not in sys.modules
def test2_importing_module_only_when_used(self):
"""The package must exist in the imported modules.
Followed by the module once the attribute of it is used.
"""
import firstpkg
# The package will be imported but not the modules
assert 'firstpkg' in sys.modules
assert 'firstpkg.path' in sys.modules
assert 'firstpkg.a_module' not in sys.modules
# As the class attribute of the module is used,
# the module will be imported for the first time
assert str(type(firstpkg.path.ClassOne)) == "<class 'type'>"
assert 'firstpkg.a_module' in sys.modules
# Check that we can really create an instance of the imported class
one = firstpkg.path.ClassOne()
assert isinstance(one, firstpkg.path.ClassOne)
Check the instance against the original class declaration. | """
To run this test:
1. cd importlazy/tests
2. python -m unittest test_importlazy.py
"""
import unittest
import sys
class ImportLazyTest(unittest.TestCase):
def test1_initial_state(self):
"""Initially, the module must not exist in the imported modules.
The package or module should not be imported during the initial state.
"""
assert 'test_importlazy' in sys.modules
assert 'firstpkg' not in sys.modules
assert 'firstpkg.path' not in sys.modules
assert 'a_module' not in sys.modules
def test2_importing_module_only_when_used(self):
"""The package must exist in the imported modules.
Followed by the module once the attribute of it is used.
"""
import firstpkg
# The package will be imported but not the modules
assert 'firstpkg' in sys.modules
assert 'firstpkg.path' in sys.modules
assert 'firstpkg.a_module' not in sys.modules
# As the class attribute of the module is used,
# the module will be imported for the first time
assert str(type(firstpkg.path.ClassOne)) == "<class 'type'>"
assert 'firstpkg.a_module' in sys.modules
# Check that we can really create an instance of the imported class
one = firstpkg.path.ClassOne()
from firstpkg.a_module import AModuleClassOne
assert isinstance(one, AModuleClassOne)
| <commit_before>"""
To run this test:
1. cd importlazy/tests
2. python -m unittest test_importlazy.py
"""
import unittest
import sys
class ImportLazyTest(unittest.TestCase):
def test1_initial_state(self):
"""Initially, the module must not exist in the imported modules.
The package or module should not be imported during the initial state.
"""
assert 'test_importlazy' in sys.modules
assert 'firstpkg' not in sys.modules
assert 'firstpkg.path' not in sys.modules
assert 'a_module' not in sys.modules
def test2_importing_module_only_when_used(self):
"""The package must exist in the imported modules.
Followed by the module once the attribute of it is used.
"""
import firstpkg
# The package will be imported but not the modules
assert 'firstpkg' in sys.modules
assert 'firstpkg.path' in sys.modules
assert 'firstpkg.a_module' not in sys.modules
# As the class attribute of the module is used,
# the module will be imported for the first time
assert str(type(firstpkg.path.ClassOne)) == "<class 'type'>"
assert 'firstpkg.a_module' in sys.modules
# Check that we can really create an instance of the imported class
one = firstpkg.path.ClassOne()
assert isinstance(one, firstpkg.path.ClassOne)
<commit_msg>Check the instance against the original class declaration.<commit_after> | """
To run this test:
1. cd importlazy/tests
2. python -m unittest test_importlazy.py
"""
import unittest
import sys
class ImportLazyTest(unittest.TestCase):
def test1_initial_state(self):
"""Initially, the module must not exist in the imported modules.
The package or module should not be imported during the initial state.
"""
assert 'test_importlazy' in sys.modules
assert 'firstpkg' not in sys.modules
assert 'firstpkg.path' not in sys.modules
assert 'a_module' not in sys.modules
def test2_importing_module_only_when_used(self):
"""The package must exist in the imported modules.
Followed by the module once the attribute of it is used.
"""
import firstpkg
# The package will be imported but not the modules
assert 'firstpkg' in sys.modules
assert 'firstpkg.path' in sys.modules
assert 'firstpkg.a_module' not in sys.modules
# As the class attribute of the module is used,
# the module will be imported for the first time
assert str(type(firstpkg.path.ClassOne)) == "<class 'type'>"
assert 'firstpkg.a_module' in sys.modules
# Check that we can really create an instance of the imported class
one = firstpkg.path.ClassOne()
from firstpkg.a_module import AModuleClassOne
assert isinstance(one, AModuleClassOne)
| """
To run this test:
1. cd importlazy/tests
2. python -m unittest test_importlazy.py
"""
import unittest
import sys
class ImportLazyTest(unittest.TestCase):
def test1_initial_state(self):
"""Initially, the module must not exist in the imported modules.
The package or module should not be imported during the initial state.
"""
assert 'test_importlazy' in sys.modules
assert 'firstpkg' not in sys.modules
assert 'firstpkg.path' not in sys.modules
assert 'a_module' not in sys.modules
def test2_importing_module_only_when_used(self):
"""The package must exist in the imported modules.
Followed by the module once the attribute of it is used.
"""
import firstpkg
# The package will be imported but not the modules
assert 'firstpkg' in sys.modules
assert 'firstpkg.path' in sys.modules
assert 'firstpkg.a_module' not in sys.modules
# As the class attribute of the module is used,
# the module will be imported for the first time
assert str(type(firstpkg.path.ClassOne)) == "<class 'type'>"
assert 'firstpkg.a_module' in sys.modules
# Check that we can really create an instance of the imported class
one = firstpkg.path.ClassOne()
assert isinstance(one, firstpkg.path.ClassOne)
Check the instance against the original class declaration."""
To run this test:
1. cd importlazy/tests
2. python -m unittest test_importlazy.py
"""
import unittest
import sys
class ImportLazyTest(unittest.TestCase):
def test1_initial_state(self):
"""Initially, the module must not exist in the imported modules.
The package or module should not be imported during the initial state.
"""
assert 'test_importlazy' in sys.modules
assert 'firstpkg' not in sys.modules
assert 'firstpkg.path' not in sys.modules
assert 'a_module' not in sys.modules
def test2_importing_module_only_when_used(self):
"""The package must exist in the imported modules.
Followed by the module once the attribute of it is used.
"""
import firstpkg
# The package will be imported but not the modules
assert 'firstpkg' in sys.modules
assert 'firstpkg.path' in sys.modules
assert 'firstpkg.a_module' not in sys.modules
# As the class attribute of the module is used,
# the module will be imported for the first time
assert str(type(firstpkg.path.ClassOne)) == "<class 'type'>"
assert 'firstpkg.a_module' in sys.modules
# Check that we can really create an instance of the imported class
one = firstpkg.path.ClassOne()
from firstpkg.a_module import AModuleClassOne
assert isinstance(one, AModuleClassOne)
| <commit_before>"""
To run this test:
1. cd importlazy/tests
2. python -m unittest test_importlazy.py
"""
import unittest
import sys
class ImportLazyTest(unittest.TestCase):
def test1_initial_state(self):
"""Initially, the module must not exist in the imported modules.
The package or module should not be imported during the initial state.
"""
assert 'test_importlazy' in sys.modules
assert 'firstpkg' not in sys.modules
assert 'firstpkg.path' not in sys.modules
assert 'a_module' not in sys.modules
def test2_importing_module_only_when_used(self):
"""The package must exist in the imported modules.
Followed by the module once the attribute of it is used.
"""
import firstpkg
# The package will be imported but not the modules
assert 'firstpkg' in sys.modules
assert 'firstpkg.path' in sys.modules
assert 'firstpkg.a_module' not in sys.modules
# As the class attribute of the module is used,
# the module will be imported for the first time
assert str(type(firstpkg.path.ClassOne)) == "<class 'type'>"
assert 'firstpkg.a_module' in sys.modules
# Check that we can really create an instance of the imported class
one = firstpkg.path.ClassOne()
assert isinstance(one, firstpkg.path.ClassOne)
<commit_msg>Check the instance against the original class declaration.<commit_after>"""
To run this test:
1. cd importlazy/tests
2. python -m unittest test_importlazy.py
"""
import unittest
import sys
class ImportLazyTest(unittest.TestCase):
def test1_initial_state(self):
"""Initially, the module must not exist in the imported modules.
The package or module should not be imported during the initial state.
"""
assert 'test_importlazy' in sys.modules
assert 'firstpkg' not in sys.modules
assert 'firstpkg.path' not in sys.modules
assert 'a_module' not in sys.modules
def test2_importing_module_only_when_used(self):
"""The package must exist in the imported modules.
Followed by the module once the attribute of it is used.
"""
import firstpkg
# The package will be imported but not the modules
assert 'firstpkg' in sys.modules
assert 'firstpkg.path' in sys.modules
assert 'firstpkg.a_module' not in sys.modules
# As the class attribute of the module is used,
# the module will be imported for the first time
assert str(type(firstpkg.path.ClassOne)) == "<class 'type'>"
assert 'firstpkg.a_module' in sys.modules
# Check that we can really create an instance of the imported class
one = firstpkg.path.ClassOne()
from firstpkg.a_module import AModuleClassOne
assert isinstance(one, AModuleClassOne)
|
0f16fe34654560f8889ad1f5b199cb8bfa2b3846 | tests/web/test_status.py | tests/web/test_status.py | """
Test Status Endpoint
GET /status
HEAD /status
"""
from biothings.tests.web import BiothingsTestCase
from setup import setup_es # pylint: disable=unused-import
class TestStatus(BiothingsTestCase):
def test_01_get(self):
res = self.request('/status').text
assert res == 'OK'
def test_02_head(self):
self.request('/status', method='HEAD')
| """
Test Status Endpoint
GET /status
HEAD /status
"""
from biothings.tests.web import BiothingsTestCase
from setup import setup_es # pylint: disable=unused-import
class TestStatus(BiothingsTestCase):
def test_01_get(self):
"""
{
"code": 200,
"status": "yellow",
"payload": {
"id": "1017",
"index": "bts_test",
"doc_type": "_all"
},
"response": {
"_index": "bts_test",
"_type": "gene",
"_id": "1017",
"_version": 1,
"found": true,
"_source": { ... }
}
}
"""
res = self.request('/status').json()
assert res['code'] == 200
assert res['response']['found']
def test_02_head(self):
self.request('/status', method='HEAD')
| Update test case for status handler accordingly | Update test case for status handler accordingly
| Python | apache-2.0 | biothings/biothings.api,biothings/biothings.api | """
Test Status Endpoint
GET /status
HEAD /status
"""
from biothings.tests.web import BiothingsTestCase
from setup import setup_es # pylint: disable=unused-import
class TestStatus(BiothingsTestCase):
def test_01_get(self):
res = self.request('/status').text
assert res == 'OK'
def test_02_head(self):
self.request('/status', method='HEAD')
Update test case for status handler accordingly | """
Test Status Endpoint
GET /status
HEAD /status
"""
from biothings.tests.web import BiothingsTestCase
from setup import setup_es # pylint: disable=unused-import
class TestStatus(BiothingsTestCase):
def test_01_get(self):
"""
{
"code": 200,
"status": "yellow",
"payload": {
"id": "1017",
"index": "bts_test",
"doc_type": "_all"
},
"response": {
"_index": "bts_test",
"_type": "gene",
"_id": "1017",
"_version": 1,
"found": true,
"_source": { ... }
}
}
"""
res = self.request('/status').json()
assert res['code'] == 200
assert res['response']['found']
def test_02_head(self):
self.request('/status', method='HEAD')
| <commit_before>"""
Test Status Endpoint
GET /status
HEAD /status
"""
from biothings.tests.web import BiothingsTestCase
from setup import setup_es # pylint: disable=unused-import
class TestStatus(BiothingsTestCase):
def test_01_get(self):
res = self.request('/status').text
assert res == 'OK'
def test_02_head(self):
self.request('/status', method='HEAD')
<commit_msg>Update test case for status handler accordingly<commit_after> | """
Test Status Endpoint
GET /status
HEAD /status
"""
from biothings.tests.web import BiothingsTestCase
from setup import setup_es # pylint: disable=unused-import
class TestStatus(BiothingsTestCase):
def test_01_get(self):
"""
{
"code": 200,
"status": "yellow",
"payload": {
"id": "1017",
"index": "bts_test",
"doc_type": "_all"
},
"response": {
"_index": "bts_test",
"_type": "gene",
"_id": "1017",
"_version": 1,
"found": true,
"_source": { ... }
}
}
"""
res = self.request('/status').json()
assert res['code'] == 200
assert res['response']['found']
def test_02_head(self):
self.request('/status', method='HEAD')
| """
Test Status Endpoint
GET /status
HEAD /status
"""
from biothings.tests.web import BiothingsTestCase
from setup import setup_es # pylint: disable=unused-import
class TestStatus(BiothingsTestCase):
def test_01_get(self):
res = self.request('/status').text
assert res == 'OK'
def test_02_head(self):
self.request('/status', method='HEAD')
Update test case for status handler accordingly"""
Test Status Endpoint
GET /status
HEAD /status
"""
from biothings.tests.web import BiothingsTestCase
from setup import setup_es # pylint: disable=unused-import
class TestStatus(BiothingsTestCase):
def test_01_get(self):
"""
{
"code": 200,
"status": "yellow",
"payload": {
"id": "1017",
"index": "bts_test",
"doc_type": "_all"
},
"response": {
"_index": "bts_test",
"_type": "gene",
"_id": "1017",
"_version": 1,
"found": true,
"_source": { ... }
}
}
"""
res = self.request('/status').json()
assert res['code'] == 200
assert res['response']['found']
def test_02_head(self):
self.request('/status', method='HEAD')
| <commit_before>"""
Test Status Endpoint
GET /status
HEAD /status
"""
from biothings.tests.web import BiothingsTestCase
from setup import setup_es # pylint: disable=unused-import
class TestStatus(BiothingsTestCase):
def test_01_get(self):
res = self.request('/status').text
assert res == 'OK'
def test_02_head(self):
self.request('/status', method='HEAD')
<commit_msg>Update test case for status handler accordingly<commit_after>"""
Test Status Endpoint
GET /status
HEAD /status
"""
from biothings.tests.web import BiothingsTestCase
from setup import setup_es # pylint: disable=unused-import
class TestStatus(BiothingsTestCase):
def test_01_get(self):
"""
{
"code": 200,
"status": "yellow",
"payload": {
"id": "1017",
"index": "bts_test",
"doc_type": "_all"
},
"response": {
"_index": "bts_test",
"_type": "gene",
"_id": "1017",
"_version": 1,
"found": true,
"_source": { ... }
}
}
"""
res = self.request('/status').json()
assert res['code'] == 200
assert res['response']['found']
def test_02_head(self):
self.request('/status', method='HEAD')
|
69d88adcedaf3779e5bf5a5757a21c71d4aa3016 | novajoin/errors.py | novajoin/errors.py | # Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class ConfigurationError(StandardError):
def __init__(self, message):
StandardError.__init__(self, message)
| # Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class ConfigurationError(Exception):
def __init__(self, message):
super(Exception, self).__init__(message)
| Use Exception instead of StandardError | Use Exception instead of StandardError
StandardError was deprecated in python 3.X.
| Python | apache-2.0 | rcritten/novajoin | # Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class ConfigurationError(StandardError):
def __init__(self, message):
StandardError.__init__(self, message)
Use Exception instead of StandardError
StandardError was deprecated in python 3.X. | # Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class ConfigurationError(Exception):
def __init__(self, message):
super(Exception, self).__init__(message)
| <commit_before># Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class ConfigurationError(StandardError):
def __init__(self, message):
StandardError.__init__(self, message)
<commit_msg>Use Exception instead of StandardError
StandardError was deprecated in python 3.X.<commit_after> | # Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class ConfigurationError(Exception):
def __init__(self, message):
super(Exception, self).__init__(message)
| # Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class ConfigurationError(StandardError):
def __init__(self, message):
StandardError.__init__(self, message)
Use Exception instead of StandardError
StandardError was deprecated in python 3.X.# Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class ConfigurationError(Exception):
def __init__(self, message):
super(Exception, self).__init__(message)
| <commit_before># Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class ConfigurationError(StandardError):
def __init__(self, message):
StandardError.__init__(self, message)
<commit_msg>Use Exception instead of StandardError
StandardError was deprecated in python 3.X.<commit_after># Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class ConfigurationError(Exception):
def __init__(self, message):
super(Exception, self).__init__(message)
|
6caa1c3962ecc7ab57dea83a5c7beef4f3c1220e | pmxbot/__init__.py | pmxbot/__init__.py | # -*- coding: utf-8 -*-
# vim:ts=4:sw=4:noexpandtab
from __future__ import absolute_import
import socket
import logging
from .dictlib import ConfigDict
config = ConfigDict(
bot_nickname = 'pmxbot',
database = 'sqlite:pmxbot.sqlite',
server_host = 'localhost',
server_port = 6667,
use_ssl = False,
password = None,
silent_bot = False,
log_channels = [],
other_channels = [],
places = ['London', 'Tokyo', 'New York'],
feed_interval = 15, # minutes
feeds = [dict(
name = 'pmxbot bitbucket',
channel = '#inane',
linkurl = 'http://bitbucket.org/yougov/pmxbot',
url = 'http://bitbucket.org/yougov/pmxbot',
),
],
librarypaste = 'http://paste.jaraco.com',
)
config['logs URL'] = 'http://' + socket.getfqdn()
config['log level'] = logging.INFO
"The config object"
| # -*- coding: utf-8 -*-
# vim:ts=4:sw=4:noexpandtab
from __future__ import absolute_import
import socket
import logging as _logging
from .dictlib import ConfigDict
config = ConfigDict(
bot_nickname = 'pmxbot',
database = 'sqlite:pmxbot.sqlite',
server_host = 'localhost',
server_port = 6667,
use_ssl = False,
password = None,
silent_bot = False,
log_channels = [],
other_channels = [],
places = ['London', 'Tokyo', 'New York'],
feed_interval = 15, # minutes
feeds = [dict(
name = 'pmxbot bitbucket',
channel = '#inane',
linkurl = 'http://bitbucket.org/yougov/pmxbot',
url = 'http://bitbucket.org/yougov/pmxbot',
),
],
librarypaste = 'http://paste.jaraco.com',
)
config['logs URL'] = 'http://' + socket.getfqdn()
config['log level'] = _logging.INFO
"The config object"
| Fix issue with conflated pmxbot.logging | Fix issue with conflated pmxbot.logging
| Python | bsd-3-clause | jamwt/diesel-pmxbot,jamwt/diesel-pmxbot | # -*- coding: utf-8 -*-
# vim:ts=4:sw=4:noexpandtab
from __future__ import absolute_import
import socket
import logging
from .dictlib import ConfigDict
config = ConfigDict(
bot_nickname = 'pmxbot',
database = 'sqlite:pmxbot.sqlite',
server_host = 'localhost',
server_port = 6667,
use_ssl = False,
password = None,
silent_bot = False,
log_channels = [],
other_channels = [],
places = ['London', 'Tokyo', 'New York'],
feed_interval = 15, # minutes
feeds = [dict(
name = 'pmxbot bitbucket',
channel = '#inane',
linkurl = 'http://bitbucket.org/yougov/pmxbot',
url = 'http://bitbucket.org/yougov/pmxbot',
),
],
librarypaste = 'http://paste.jaraco.com',
)
config['logs URL'] = 'http://' + socket.getfqdn()
config['log level'] = logging.INFO
"The config object"
Fix issue with conflated pmxbot.logging | # -*- coding: utf-8 -*-
# vim:ts=4:sw=4:noexpandtab
from __future__ import absolute_import
import socket
import logging as _logging
from .dictlib import ConfigDict
config = ConfigDict(
bot_nickname = 'pmxbot',
database = 'sqlite:pmxbot.sqlite',
server_host = 'localhost',
server_port = 6667,
use_ssl = False,
password = None,
silent_bot = False,
log_channels = [],
other_channels = [],
places = ['London', 'Tokyo', 'New York'],
feed_interval = 15, # minutes
feeds = [dict(
name = 'pmxbot bitbucket',
channel = '#inane',
linkurl = 'http://bitbucket.org/yougov/pmxbot',
url = 'http://bitbucket.org/yougov/pmxbot',
),
],
librarypaste = 'http://paste.jaraco.com',
)
config['logs URL'] = 'http://' + socket.getfqdn()
config['log level'] = _logging.INFO
"The config object"
| <commit_before># -*- coding: utf-8 -*-
# vim:ts=4:sw=4:noexpandtab
from __future__ import absolute_import
import socket
import logging
from .dictlib import ConfigDict
config = ConfigDict(
bot_nickname = 'pmxbot',
database = 'sqlite:pmxbot.sqlite',
server_host = 'localhost',
server_port = 6667,
use_ssl = False,
password = None,
silent_bot = False,
log_channels = [],
other_channels = [],
places = ['London', 'Tokyo', 'New York'],
feed_interval = 15, # minutes
feeds = [dict(
name = 'pmxbot bitbucket',
channel = '#inane',
linkurl = 'http://bitbucket.org/yougov/pmxbot',
url = 'http://bitbucket.org/yougov/pmxbot',
),
],
librarypaste = 'http://paste.jaraco.com',
)
config['logs URL'] = 'http://' + socket.getfqdn()
config['log level'] = logging.INFO
"The config object"
<commit_msg>Fix issue with conflated pmxbot.logging<commit_after> | # -*- coding: utf-8 -*-
# vim:ts=4:sw=4:noexpandtab
from __future__ import absolute_import
import socket
import logging as _logging
from .dictlib import ConfigDict
config = ConfigDict(
bot_nickname = 'pmxbot',
database = 'sqlite:pmxbot.sqlite',
server_host = 'localhost',
server_port = 6667,
use_ssl = False,
password = None,
silent_bot = False,
log_channels = [],
other_channels = [],
places = ['London', 'Tokyo', 'New York'],
feed_interval = 15, # minutes
feeds = [dict(
name = 'pmxbot bitbucket',
channel = '#inane',
linkurl = 'http://bitbucket.org/yougov/pmxbot',
url = 'http://bitbucket.org/yougov/pmxbot',
),
],
librarypaste = 'http://paste.jaraco.com',
)
config['logs URL'] = 'http://' + socket.getfqdn()
config['log level'] = _logging.INFO
"The config object"
| # -*- coding: utf-8 -*-
# vim:ts=4:sw=4:noexpandtab
from __future__ import absolute_import
import socket
import logging
from .dictlib import ConfigDict
config = ConfigDict(
bot_nickname = 'pmxbot',
database = 'sqlite:pmxbot.sqlite',
server_host = 'localhost',
server_port = 6667,
use_ssl = False,
password = None,
silent_bot = False,
log_channels = [],
other_channels = [],
places = ['London', 'Tokyo', 'New York'],
feed_interval = 15, # minutes
feeds = [dict(
name = 'pmxbot bitbucket',
channel = '#inane',
linkurl = 'http://bitbucket.org/yougov/pmxbot',
url = 'http://bitbucket.org/yougov/pmxbot',
),
],
librarypaste = 'http://paste.jaraco.com',
)
config['logs URL'] = 'http://' + socket.getfqdn()
config['log level'] = logging.INFO
"The config object"
Fix issue with conflated pmxbot.logging# -*- coding: utf-8 -*-
# vim:ts=4:sw=4:noexpandtab
from __future__ import absolute_import
import socket
import logging as _logging
from .dictlib import ConfigDict
config = ConfigDict(
bot_nickname = 'pmxbot',
database = 'sqlite:pmxbot.sqlite',
server_host = 'localhost',
server_port = 6667,
use_ssl = False,
password = None,
silent_bot = False,
log_channels = [],
other_channels = [],
places = ['London', 'Tokyo', 'New York'],
feed_interval = 15, # minutes
feeds = [dict(
name = 'pmxbot bitbucket',
channel = '#inane',
linkurl = 'http://bitbucket.org/yougov/pmxbot',
url = 'http://bitbucket.org/yougov/pmxbot',
),
],
librarypaste = 'http://paste.jaraco.com',
)
config['logs URL'] = 'http://' + socket.getfqdn()
config['log level'] = _logging.INFO
"The config object"
| <commit_before># -*- coding: utf-8 -*-
# vim:ts=4:sw=4:noexpandtab
from __future__ import absolute_import
import socket
import logging
from .dictlib import ConfigDict
config = ConfigDict(
bot_nickname = 'pmxbot',
database = 'sqlite:pmxbot.sqlite',
server_host = 'localhost',
server_port = 6667,
use_ssl = False,
password = None,
silent_bot = False,
log_channels = [],
other_channels = [],
places = ['London', 'Tokyo', 'New York'],
feed_interval = 15, # minutes
feeds = [dict(
name = 'pmxbot bitbucket',
channel = '#inane',
linkurl = 'http://bitbucket.org/yougov/pmxbot',
url = 'http://bitbucket.org/yougov/pmxbot',
),
],
librarypaste = 'http://paste.jaraco.com',
)
config['logs URL'] = 'http://' + socket.getfqdn()
config['log level'] = logging.INFO
"The config object"
<commit_msg>Fix issue with conflated pmxbot.logging<commit_after># -*- coding: utf-8 -*-
# vim:ts=4:sw=4:noexpandtab
from __future__ import absolute_import
import socket
import logging as _logging
from .dictlib import ConfigDict
config = ConfigDict(
bot_nickname = 'pmxbot',
database = 'sqlite:pmxbot.sqlite',
server_host = 'localhost',
server_port = 6667,
use_ssl = False,
password = None,
silent_bot = False,
log_channels = [],
other_channels = [],
places = ['London', 'Tokyo', 'New York'],
feed_interval = 15, # minutes
feeds = [dict(
name = 'pmxbot bitbucket',
channel = '#inane',
linkurl = 'http://bitbucket.org/yougov/pmxbot',
url = 'http://bitbucket.org/yougov/pmxbot',
),
],
librarypaste = 'http://paste.jaraco.com',
)
config['logs URL'] = 'http://' + socket.getfqdn()
config['log level'] = _logging.INFO
"The config object"
|
0928060f4390f221d68518a9ec7b8b43b82423b2 | iatidq/dqfunctions.py | iatidq/dqfunctions.py |
# IATI Data Quality, tools for Data QA on IATI-formatted publications
# by Mark Brough, Martin Keegan, Ben Webb and Jennifer Smith
#
# Copyright (C) 2013 Publish What You Fund
#
# This programme is free software; you may redistribute and/or modify
# it under the terms of the GNU Affero General Public License v3.0
import json
import urllib2
from iatidq import db
import models
def add_test_status(package_id, status_id):
with db.session.begin():
pstatus = models.PackageStatus()
pstatus.package_id = package_id
pstatus.status = status_id
db.session.add(pstatus)
def clear_revisions():
with db.session.begin():
for pkg in models.Package.query.filter(
models.Package.package_revision_id!=None,
models.Package.active == True
).all():
pkg.package_revision_id = None
db.session.add(pkg)
def packages_from_registry(registry_url):
offset = 0
while True:
data = urllib2.urlopen(registry_url % (offset), timeout=60).read()
print (registry_url % (offset))
data = json.loads(data)
if len(data["results"]) < 1:
break
for pkg in data["results"]:
yield pkg
offset += 1000
|
# IATI Data Quality, tools for Data QA on IATI-formatted publications
# by Mark Brough, Martin Keegan, Ben Webb and Jennifer Smith
#
# Copyright (C) 2013 Publish What You Fund
#
# This programme is free software; you may redistribute and/or modify
# it under the terms of the GNU Affero General Public License v3.0
import json
import urllib2
from iatidq import db
import models
def add_test_status(package_id, status_id):
with db.session.begin():
pstatus = models.PackageStatus()
pstatus.package_id = package_id
pstatus.status = status_id
db.session.add(pstatus)
def clear_revisions():
with db.session.begin():
for pkg in models.Package.query.filter(
models.Package.package_revision_id!=None,
models.Package.active == True
).all():
pkg.package_revision_id = None
db.session.add(pkg)
def packages_from_registry(registry_url):
#offset = 0
#while True:
#data = urllib2.urlopen(registry_url % (offset), timeout=60).read()
#print (registry_url % (offset))
data = urllib2.urlopen(registry_url, timeout=60).read()
print registry_url
data = json.loads(data)
#if len(data["results"]) < 1:
# break
for pkg in data["data"]:
yield pkg
#offset += 1000
| Adjust for new IATI Updates API; don't have to page through results | Adjust for new IATI Updates API; don't have to page through results
| Python | agpl-3.0 | pwyf/IATI-Data-Quality,pwyf/IATI-Data-Quality,pwyf/IATI-Data-Quality,pwyf/IATI-Data-Quality |
# IATI Data Quality, tools for Data QA on IATI-formatted publications
# by Mark Brough, Martin Keegan, Ben Webb and Jennifer Smith
#
# Copyright (C) 2013 Publish What You Fund
#
# This programme is free software; you may redistribute and/or modify
# it under the terms of the GNU Affero General Public License v3.0
import json
import urllib2
from iatidq import db
import models
def add_test_status(package_id, status_id):
with db.session.begin():
pstatus = models.PackageStatus()
pstatus.package_id = package_id
pstatus.status = status_id
db.session.add(pstatus)
def clear_revisions():
with db.session.begin():
for pkg in models.Package.query.filter(
models.Package.package_revision_id!=None,
models.Package.active == True
).all():
pkg.package_revision_id = None
db.session.add(pkg)
def packages_from_registry(registry_url):
offset = 0
while True:
data = urllib2.urlopen(registry_url % (offset), timeout=60).read()
print (registry_url % (offset))
data = json.loads(data)
if len(data["results"]) < 1:
break
for pkg in data["results"]:
yield pkg
offset += 1000
Adjust for new IATI Updates API; don't have to page through results |
# IATI Data Quality, tools for Data QA on IATI-formatted publications
# by Mark Brough, Martin Keegan, Ben Webb and Jennifer Smith
#
# Copyright (C) 2013 Publish What You Fund
#
# This programme is free software; you may redistribute and/or modify
# it under the terms of the GNU Affero General Public License v3.0
import json
import urllib2
from iatidq import db
import models
def add_test_status(package_id, status_id):
with db.session.begin():
pstatus = models.PackageStatus()
pstatus.package_id = package_id
pstatus.status = status_id
db.session.add(pstatus)
def clear_revisions():
with db.session.begin():
for pkg in models.Package.query.filter(
models.Package.package_revision_id!=None,
models.Package.active == True
).all():
pkg.package_revision_id = None
db.session.add(pkg)
def packages_from_registry(registry_url):
#offset = 0
#while True:
#data = urllib2.urlopen(registry_url % (offset), timeout=60).read()
#print (registry_url % (offset))
data = urllib2.urlopen(registry_url, timeout=60).read()
print registry_url
data = json.loads(data)
#if len(data["results"]) < 1:
# break
for pkg in data["data"]:
yield pkg
#offset += 1000
| <commit_before>
# IATI Data Quality, tools for Data QA on IATI-formatted publications
# by Mark Brough, Martin Keegan, Ben Webb and Jennifer Smith
#
# Copyright (C) 2013 Publish What You Fund
#
# This programme is free software; you may redistribute and/or modify
# it under the terms of the GNU Affero General Public License v3.0
import json
import urllib2
from iatidq import db
import models
def add_test_status(package_id, status_id):
with db.session.begin():
pstatus = models.PackageStatus()
pstatus.package_id = package_id
pstatus.status = status_id
db.session.add(pstatus)
def clear_revisions():
with db.session.begin():
for pkg in models.Package.query.filter(
models.Package.package_revision_id!=None,
models.Package.active == True
).all():
pkg.package_revision_id = None
db.session.add(pkg)
def packages_from_registry(registry_url):
offset = 0
while True:
data = urllib2.urlopen(registry_url % (offset), timeout=60).read()
print (registry_url % (offset))
data = json.loads(data)
if len(data["results"]) < 1:
break
for pkg in data["results"]:
yield pkg
offset += 1000
<commit_msg>Adjust for new IATI Updates API; don't have to page through results<commit_after> |
# IATI Data Quality, tools for Data QA on IATI-formatted publications
# by Mark Brough, Martin Keegan, Ben Webb and Jennifer Smith
#
# Copyright (C) 2013 Publish What You Fund
#
# This programme is free software; you may redistribute and/or modify
# it under the terms of the GNU Affero General Public License v3.0
import json
import urllib2
from iatidq import db
import models
def add_test_status(package_id, status_id):
with db.session.begin():
pstatus = models.PackageStatus()
pstatus.package_id = package_id
pstatus.status = status_id
db.session.add(pstatus)
def clear_revisions():
with db.session.begin():
for pkg in models.Package.query.filter(
models.Package.package_revision_id!=None,
models.Package.active == True
).all():
pkg.package_revision_id = None
db.session.add(pkg)
def packages_from_registry(registry_url):
#offset = 0
#while True:
#data = urllib2.urlopen(registry_url % (offset), timeout=60).read()
#print (registry_url % (offset))
data = urllib2.urlopen(registry_url, timeout=60).read()
print registry_url
data = json.loads(data)
#if len(data["results"]) < 1:
# break
for pkg in data["data"]:
yield pkg
#offset += 1000
|
# IATI Data Quality, tools for Data QA on IATI-formatted publications
# by Mark Brough, Martin Keegan, Ben Webb and Jennifer Smith
#
# Copyright (C) 2013 Publish What You Fund
#
# This programme is free software; you may redistribute and/or modify
# it under the terms of the GNU Affero General Public License v3.0
import json
import urllib2
from iatidq import db
import models
def add_test_status(package_id, status_id):
with db.session.begin():
pstatus = models.PackageStatus()
pstatus.package_id = package_id
pstatus.status = status_id
db.session.add(pstatus)
def clear_revisions():
with db.session.begin():
for pkg in models.Package.query.filter(
models.Package.package_revision_id!=None,
models.Package.active == True
).all():
pkg.package_revision_id = None
db.session.add(pkg)
def packages_from_registry(registry_url):
offset = 0
while True:
data = urllib2.urlopen(registry_url % (offset), timeout=60).read()
print (registry_url % (offset))
data = json.loads(data)
if len(data["results"]) < 1:
break
for pkg in data["results"]:
yield pkg
offset += 1000
Adjust for new IATI Updates API; don't have to page through results
# IATI Data Quality, tools for Data QA on IATI-formatted publications
# by Mark Brough, Martin Keegan, Ben Webb and Jennifer Smith
#
# Copyright (C) 2013 Publish What You Fund
#
# This programme is free software; you may redistribute and/or modify
# it under the terms of the GNU Affero General Public License v3.0
import json
import urllib2
from iatidq import db
import models
def add_test_status(package_id, status_id):
with db.session.begin():
pstatus = models.PackageStatus()
pstatus.package_id = package_id
pstatus.status = status_id
db.session.add(pstatus)
def clear_revisions():
with db.session.begin():
for pkg in models.Package.query.filter(
models.Package.package_revision_id!=None,
models.Package.active == True
).all():
pkg.package_revision_id = None
db.session.add(pkg)
def packages_from_registry(registry_url):
#offset = 0
#while True:
#data = urllib2.urlopen(registry_url % (offset), timeout=60).read()
#print (registry_url % (offset))
data = urllib2.urlopen(registry_url, timeout=60).read()
print registry_url
data = json.loads(data)
#if len(data["results"]) < 1:
# break
for pkg in data["data"]:
yield pkg
#offset += 1000
| <commit_before>
# IATI Data Quality, tools for Data QA on IATI-formatted publications
# by Mark Brough, Martin Keegan, Ben Webb and Jennifer Smith
#
# Copyright (C) 2013 Publish What You Fund
#
# This programme is free software; you may redistribute and/or modify
# it under the terms of the GNU Affero General Public License v3.0
import json
import urllib2
from iatidq import db
import models
def add_test_status(package_id, status_id):
with db.session.begin():
pstatus = models.PackageStatus()
pstatus.package_id = package_id
pstatus.status = status_id
db.session.add(pstatus)
def clear_revisions():
with db.session.begin():
for pkg in models.Package.query.filter(
models.Package.package_revision_id!=None,
models.Package.active == True
).all():
pkg.package_revision_id = None
db.session.add(pkg)
def packages_from_registry(registry_url):
offset = 0
while True:
data = urllib2.urlopen(registry_url % (offset), timeout=60).read()
print (registry_url % (offset))
data = json.loads(data)
if len(data["results"]) < 1:
break
for pkg in data["results"]:
yield pkg
offset += 1000
<commit_msg>Adjust for new IATI Updates API; don't have to page through results<commit_after>
# IATI Data Quality, tools for Data QA on IATI-formatted publications
# by Mark Brough, Martin Keegan, Ben Webb and Jennifer Smith
#
# Copyright (C) 2013 Publish What You Fund
#
# This programme is free software; you may redistribute and/or modify
# it under the terms of the GNU Affero General Public License v3.0
import json
import urllib2
from iatidq import db
import models
def add_test_status(package_id, status_id):
with db.session.begin():
pstatus = models.PackageStatus()
pstatus.package_id = package_id
pstatus.status = status_id
db.session.add(pstatus)
def clear_revisions():
with db.session.begin():
for pkg in models.Package.query.filter(
models.Package.package_revision_id!=None,
models.Package.active == True
).all():
pkg.package_revision_id = None
db.session.add(pkg)
def packages_from_registry(registry_url):
#offset = 0
#while True:
#data = urllib2.urlopen(registry_url % (offset), timeout=60).read()
#print (registry_url % (offset))
data = urllib2.urlopen(registry_url, timeout=60).read()
print registry_url
data = json.loads(data)
#if len(data["results"]) < 1:
# break
for pkg in data["data"]:
yield pkg
#offset += 1000
|
c9f4c6442c250d8daecb7476bb1a22dae53f3859 | blockbuster/__init__.py | blockbuster/__init__.py | __author__ = 'Matt Stibbs'
__version__ = '1.27.00'
target_schema_version = '1.25.00'
from flask import Flask
app = Flask(__name__)
def startup():
import blockbuster.bb_dbconnector_factory
import blockbuster.bb_logging as log
import blockbuster.bb_auditlogger as audit
blockbuster.app.debug = blockbuster.config.debug_mode
blockbuster.bb_logging.logger.info(str.format("Application Startup - BlockBuster v{0} Schema v{1}",
blockbuster.__version__, target_schema_version))
time_setting = "Application Setting - Time Restriction Disabled" if not blockbuster.config.timerestriction else "Application Setting - Time Restriction Enabled"
print(time_setting)
if blockbuster.config.debug_mode:
print("========= APPLICATION IS RUNNING IN DEBUG MODE ==========")
try:
if blockbuster.bb_dbconnector_factory.DBConnectorInterfaceFactory().create().db_version_check():
import blockbuster.bb_routes
print("Running...")
else:
raise RuntimeError("Incorrect database schema version. Wanted ")
except RuntimeError, e:
log.logger.exception(e)
audit.BBAuditLoggerFactory().create().logException('app', 'STARTUP', str(e))
startup()
| __author__ = 'Matt Stibbs'
__version__ = '1.27.00'
target_schema_version = '1.25.00'
from flask import Flask
import logging
logger = logging.getLogger(__name__)
app = Flask(__name__)
import blockbuster.bb_auditlogger as audit
def startup():
import blockbuster.bb_dbconnector_factory
blockbuster.app.debug = blockbuster.config.debug_mode
print(str.format("Application Startup - BlockBuster v{0} Schema v{1}",
blockbuster.__version__, target_schema_version))
time_setting = "Application Setting - Time Restriction Disabled" \
if not blockbuster.config.timerestriction \
else "Application Setting - Time Restriction Enabled"
print(time_setting)
if blockbuster.config.debug_mode:
print("========= APPLICATION IS RUNNING IN DEBUG MODE ==========")
try:
if blockbuster.bb_dbconnector_factory.DBConnectorInterfaceFactory().create().db_version_check():
import blockbuster.bb_routes
print("Running...")
else:
raise RuntimeError(str.format("Incorrect database schema version. Wanted {0}", target_schema_version))
except RuntimeError, e:
logger.exception(e)
audit.BBAuditLoggerFactory().create().logException('app', 'STARTUP', str(e))
raise
startup()
| Change logging to use standard logging library. | Change logging to use standard logging library.
| Python | mit | mattstibbs/blockbuster-server,mattstibbs/blockbuster-server | __author__ = 'Matt Stibbs'
__version__ = '1.27.00'
target_schema_version = '1.25.00'
from flask import Flask
app = Flask(__name__)
def startup():
import blockbuster.bb_dbconnector_factory
import blockbuster.bb_logging as log
import blockbuster.bb_auditlogger as audit
blockbuster.app.debug = blockbuster.config.debug_mode
blockbuster.bb_logging.logger.info(str.format("Application Startup - BlockBuster v{0} Schema v{1}",
blockbuster.__version__, target_schema_version))
time_setting = "Application Setting - Time Restriction Disabled" if not blockbuster.config.timerestriction else "Application Setting - Time Restriction Enabled"
print(time_setting)
if blockbuster.config.debug_mode:
print("========= APPLICATION IS RUNNING IN DEBUG MODE ==========")
try:
if blockbuster.bb_dbconnector_factory.DBConnectorInterfaceFactory().create().db_version_check():
import blockbuster.bb_routes
print("Running...")
else:
raise RuntimeError("Incorrect database schema version. Wanted ")
except RuntimeError, e:
log.logger.exception(e)
audit.BBAuditLoggerFactory().create().logException('app', 'STARTUP', str(e))
startup()
Change logging to use standard logging library. | __author__ = 'Matt Stibbs'
__version__ = '1.27.00'
target_schema_version = '1.25.00'
from flask import Flask
import logging
logger = logging.getLogger(__name__)
app = Flask(__name__)
import blockbuster.bb_auditlogger as audit
def startup():
import blockbuster.bb_dbconnector_factory
blockbuster.app.debug = blockbuster.config.debug_mode
print(str.format("Application Startup - BlockBuster v{0} Schema v{1}",
blockbuster.__version__, target_schema_version))
time_setting = "Application Setting - Time Restriction Disabled" \
if not blockbuster.config.timerestriction \
else "Application Setting - Time Restriction Enabled"
print(time_setting)
if blockbuster.config.debug_mode:
print("========= APPLICATION IS RUNNING IN DEBUG MODE ==========")
try:
if blockbuster.bb_dbconnector_factory.DBConnectorInterfaceFactory().create().db_version_check():
import blockbuster.bb_routes
print("Running...")
else:
raise RuntimeError(str.format("Incorrect database schema version. Wanted {0}", target_schema_version))
except RuntimeError, e:
logger.exception(e)
audit.BBAuditLoggerFactory().create().logException('app', 'STARTUP', str(e))
raise
startup()
| <commit_before>__author__ = 'Matt Stibbs'
__version__ = '1.27.00'
target_schema_version = '1.25.00'
from flask import Flask
app = Flask(__name__)
def startup():
import blockbuster.bb_dbconnector_factory
import blockbuster.bb_logging as log
import blockbuster.bb_auditlogger as audit
blockbuster.app.debug = blockbuster.config.debug_mode
blockbuster.bb_logging.logger.info(str.format("Application Startup - BlockBuster v{0} Schema v{1}",
blockbuster.__version__, target_schema_version))
time_setting = "Application Setting - Time Restriction Disabled" if not blockbuster.config.timerestriction else "Application Setting - Time Restriction Enabled"
print(time_setting)
if blockbuster.config.debug_mode:
print("========= APPLICATION IS RUNNING IN DEBUG MODE ==========")
try:
if blockbuster.bb_dbconnector_factory.DBConnectorInterfaceFactory().create().db_version_check():
import blockbuster.bb_routes
print("Running...")
else:
raise RuntimeError("Incorrect database schema version. Wanted ")
except RuntimeError, e:
log.logger.exception(e)
audit.BBAuditLoggerFactory().create().logException('app', 'STARTUP', str(e))
startup()
<commit_msg>Change logging to use standard logging library.<commit_after> | __author__ = 'Matt Stibbs'
__version__ = '1.27.00'
target_schema_version = '1.25.00'
from flask import Flask
import logging
logger = logging.getLogger(__name__)
app = Flask(__name__)
import blockbuster.bb_auditlogger as audit
def startup():
import blockbuster.bb_dbconnector_factory
blockbuster.app.debug = blockbuster.config.debug_mode
print(str.format("Application Startup - BlockBuster v{0} Schema v{1}",
blockbuster.__version__, target_schema_version))
time_setting = "Application Setting - Time Restriction Disabled" \
if not blockbuster.config.timerestriction \
else "Application Setting - Time Restriction Enabled"
print(time_setting)
if blockbuster.config.debug_mode:
print("========= APPLICATION IS RUNNING IN DEBUG MODE ==========")
try:
if blockbuster.bb_dbconnector_factory.DBConnectorInterfaceFactory().create().db_version_check():
import blockbuster.bb_routes
print("Running...")
else:
raise RuntimeError(str.format("Incorrect database schema version. Wanted {0}", target_schema_version))
except RuntimeError, e:
logger.exception(e)
audit.BBAuditLoggerFactory().create().logException('app', 'STARTUP', str(e))
raise
startup()
| __author__ = 'Matt Stibbs'
__version__ = '1.27.00'
target_schema_version = '1.25.00'
from flask import Flask
app = Flask(__name__)
def startup():
import blockbuster.bb_dbconnector_factory
import blockbuster.bb_logging as log
import blockbuster.bb_auditlogger as audit
blockbuster.app.debug = blockbuster.config.debug_mode
blockbuster.bb_logging.logger.info(str.format("Application Startup - BlockBuster v{0} Schema v{1}",
blockbuster.__version__, target_schema_version))
time_setting = "Application Setting - Time Restriction Disabled" if not blockbuster.config.timerestriction else "Application Setting - Time Restriction Enabled"
print(time_setting)
if blockbuster.config.debug_mode:
print("========= APPLICATION IS RUNNING IN DEBUG MODE ==========")
try:
if blockbuster.bb_dbconnector_factory.DBConnectorInterfaceFactory().create().db_version_check():
import blockbuster.bb_routes
print("Running...")
else:
raise RuntimeError("Incorrect database schema version. Wanted ")
except RuntimeError, e:
log.logger.exception(e)
audit.BBAuditLoggerFactory().create().logException('app', 'STARTUP', str(e))
startup()
Change logging to use standard logging library.__author__ = 'Matt Stibbs'
__version__ = '1.27.00'
target_schema_version = '1.25.00'
from flask import Flask
import logging
logger = logging.getLogger(__name__)
app = Flask(__name__)
import blockbuster.bb_auditlogger as audit
def startup():
import blockbuster.bb_dbconnector_factory
blockbuster.app.debug = blockbuster.config.debug_mode
print(str.format("Application Startup - BlockBuster v{0} Schema v{1}",
blockbuster.__version__, target_schema_version))
time_setting = "Application Setting - Time Restriction Disabled" \
if not blockbuster.config.timerestriction \
else "Application Setting - Time Restriction Enabled"
print(time_setting)
if blockbuster.config.debug_mode:
print("========= APPLICATION IS RUNNING IN DEBUG MODE ==========")
try:
if blockbuster.bb_dbconnector_factory.DBConnectorInterfaceFactory().create().db_version_check():
import blockbuster.bb_routes
print("Running...")
else:
raise RuntimeError(str.format("Incorrect database schema version. Wanted {0}", target_schema_version))
except RuntimeError, e:
logger.exception(e)
audit.BBAuditLoggerFactory().create().logException('app', 'STARTUP', str(e))
raise
startup()
| <commit_before>__author__ = 'Matt Stibbs'
__version__ = '1.27.00'
target_schema_version = '1.25.00'
from flask import Flask
app = Flask(__name__)
def startup():
import blockbuster.bb_dbconnector_factory
import blockbuster.bb_logging as log
import blockbuster.bb_auditlogger as audit
blockbuster.app.debug = blockbuster.config.debug_mode
blockbuster.bb_logging.logger.info(str.format("Application Startup - BlockBuster v{0} Schema v{1}",
blockbuster.__version__, target_schema_version))
time_setting = "Application Setting - Time Restriction Disabled" if not blockbuster.config.timerestriction else "Application Setting - Time Restriction Enabled"
print(time_setting)
if blockbuster.config.debug_mode:
print("========= APPLICATION IS RUNNING IN DEBUG MODE ==========")
try:
if blockbuster.bb_dbconnector_factory.DBConnectorInterfaceFactory().create().db_version_check():
import blockbuster.bb_routes
print("Running...")
else:
raise RuntimeError("Incorrect database schema version. Wanted ")
except RuntimeError, e:
log.logger.exception(e)
audit.BBAuditLoggerFactory().create().logException('app', 'STARTUP', str(e))
startup()
<commit_msg>Change logging to use standard logging library.<commit_after>__author__ = 'Matt Stibbs'
__version__ = '1.27.00'
target_schema_version = '1.25.00'
from flask import Flask
import logging
logger = logging.getLogger(__name__)
app = Flask(__name__)
import blockbuster.bb_auditlogger as audit
def startup():
import blockbuster.bb_dbconnector_factory
blockbuster.app.debug = blockbuster.config.debug_mode
print(str.format("Application Startup - BlockBuster v{0} Schema v{1}",
blockbuster.__version__, target_schema_version))
time_setting = "Application Setting - Time Restriction Disabled" \
if not blockbuster.config.timerestriction \
else "Application Setting - Time Restriction Enabled"
print(time_setting)
if blockbuster.config.debug_mode:
print("========= APPLICATION IS RUNNING IN DEBUG MODE ==========")
try:
if blockbuster.bb_dbconnector_factory.DBConnectorInterfaceFactory().create().db_version_check():
import blockbuster.bb_routes
print("Running...")
else:
raise RuntimeError(str.format("Incorrect database schema version. Wanted {0}", target_schema_version))
except RuntimeError, e:
logger.exception(e)
audit.BBAuditLoggerFactory().create().logException('app', 'STARTUP', str(e))
raise
startup()
|
df45251622e6b935b27022e36fcbd79e9228f989 | bonobo/commands/init.py | bonobo/commands/init.py | import os
def execute(name, branch, overwrite_if_exists=False):
try:
from cookiecutter.main import cookiecutter
except ImportError as exc:
raise ImportError(
'You must install "cookiecutter" to use this command.\n\n $ pip install cookiecutter\n'
) from exc
if os.listdir(os.getcwd()) == []:
overwrite_if_exists = True
return cookiecutter(
'https://github.com/python-bonobo/cookiecutter-bonobo.git',
extra_context={'name': name},
no_input=True,
checkout=branch,
overwrite_if_exists=overwrite_if_exists
)
def register(parser):
parser.add_argument('name')
parser.add_argument('--branch', '-b', default='master')
return execute
| import os
def execute(name, branch):
try:
from cookiecutter.main import cookiecutter
except ImportError as exc:
raise ImportError(
'You must install "cookiecutter" to use this command.\n\n $ pip install cookiecutter\n'
) from exc
overwrite_if_exists = False
project_path = os.path.join(os.getcwd(), name)
if os.path.isdir(project_path) and not os.listdir(project_path):
overwrite_if_exists = True
return cookiecutter(
'https://github.com/python-bonobo/cookiecutter-bonobo.git',
extra_context={'name': name},
no_input=True,
checkout=branch,
overwrite_if_exists=overwrite_if_exists
)
def register(parser):
parser.add_argument('name')
parser.add_argument('--branch', '-b', default='master')
return execute
| Check if target directory is empty instead of current directory and remove overwrite_if_exists argument | Check if target directory is empty instead of current directory and remove overwrite_if_exists argument
| Python | apache-2.0 | hartym/bonobo,python-bonobo/bonobo,hartym/bonobo,hartym/bonobo,python-bonobo/bonobo,python-bonobo/bonobo | import os
def execute(name, branch, overwrite_if_exists=False):
try:
from cookiecutter.main import cookiecutter
except ImportError as exc:
raise ImportError(
'You must install "cookiecutter" to use this command.\n\n $ pip install cookiecutter\n'
) from exc
if os.listdir(os.getcwd()) == []:
overwrite_if_exists = True
return cookiecutter(
'https://github.com/python-bonobo/cookiecutter-bonobo.git',
extra_context={'name': name},
no_input=True,
checkout=branch,
overwrite_if_exists=overwrite_if_exists
)
def register(parser):
parser.add_argument('name')
parser.add_argument('--branch', '-b', default='master')
return execute
Check if target directory is empty instead of current directory and remove overwrite_if_exists argument | import os
def execute(name, branch):
try:
from cookiecutter.main import cookiecutter
except ImportError as exc:
raise ImportError(
'You must install "cookiecutter" to use this command.\n\n $ pip install cookiecutter\n'
) from exc
overwrite_if_exists = False
project_path = os.path.join(os.getcwd(), name)
if os.path.isdir(project_path) and not os.listdir(project_path):
overwrite_if_exists = True
return cookiecutter(
'https://github.com/python-bonobo/cookiecutter-bonobo.git',
extra_context={'name': name},
no_input=True,
checkout=branch,
overwrite_if_exists=overwrite_if_exists
)
def register(parser):
parser.add_argument('name')
parser.add_argument('--branch', '-b', default='master')
return execute
| <commit_before>import os
def execute(name, branch, overwrite_if_exists=False):
try:
from cookiecutter.main import cookiecutter
except ImportError as exc:
raise ImportError(
'You must install "cookiecutter" to use this command.\n\n $ pip install cookiecutter\n'
) from exc
if os.listdir(os.getcwd()) == []:
overwrite_if_exists = True
return cookiecutter(
'https://github.com/python-bonobo/cookiecutter-bonobo.git',
extra_context={'name': name},
no_input=True,
checkout=branch,
overwrite_if_exists=overwrite_if_exists
)
def register(parser):
parser.add_argument('name')
parser.add_argument('--branch', '-b', default='master')
return execute
<commit_msg>Check if target directory is empty instead of current directory and remove overwrite_if_exists argument<commit_after> | import os
def execute(name, branch):
try:
from cookiecutter.main import cookiecutter
except ImportError as exc:
raise ImportError(
'You must install "cookiecutter" to use this command.\n\n $ pip install cookiecutter\n'
) from exc
overwrite_if_exists = False
project_path = os.path.join(os.getcwd(), name)
if os.path.isdir(project_path) and not os.listdir(project_path):
overwrite_if_exists = True
return cookiecutter(
'https://github.com/python-bonobo/cookiecutter-bonobo.git',
extra_context={'name': name},
no_input=True,
checkout=branch,
overwrite_if_exists=overwrite_if_exists
)
def register(parser):
parser.add_argument('name')
parser.add_argument('--branch', '-b', default='master')
return execute
| import os
def execute(name, branch, overwrite_if_exists=False):
try:
from cookiecutter.main import cookiecutter
except ImportError as exc:
raise ImportError(
'You must install "cookiecutter" to use this command.\n\n $ pip install cookiecutter\n'
) from exc
if os.listdir(os.getcwd()) == []:
overwrite_if_exists = True
return cookiecutter(
'https://github.com/python-bonobo/cookiecutter-bonobo.git',
extra_context={'name': name},
no_input=True,
checkout=branch,
overwrite_if_exists=overwrite_if_exists
)
def register(parser):
parser.add_argument('name')
parser.add_argument('--branch', '-b', default='master')
return execute
Check if target directory is empty instead of current directory and remove overwrite_if_exists argumentimport os
def execute(name, branch):
try:
from cookiecutter.main import cookiecutter
except ImportError as exc:
raise ImportError(
'You must install "cookiecutter" to use this command.\n\n $ pip install cookiecutter\n'
) from exc
overwrite_if_exists = False
project_path = os.path.join(os.getcwd(), name)
if os.path.isdir(project_path) and not os.listdir(project_path):
overwrite_if_exists = True
return cookiecutter(
'https://github.com/python-bonobo/cookiecutter-bonobo.git',
extra_context={'name': name},
no_input=True,
checkout=branch,
overwrite_if_exists=overwrite_if_exists
)
def register(parser):
parser.add_argument('name')
parser.add_argument('--branch', '-b', default='master')
return execute
| <commit_before>import os
def execute(name, branch, overwrite_if_exists=False):
try:
from cookiecutter.main import cookiecutter
except ImportError as exc:
raise ImportError(
'You must install "cookiecutter" to use this command.\n\n $ pip install cookiecutter\n'
) from exc
if os.listdir(os.getcwd()) == []:
overwrite_if_exists = True
return cookiecutter(
'https://github.com/python-bonobo/cookiecutter-bonobo.git',
extra_context={'name': name},
no_input=True,
checkout=branch,
overwrite_if_exists=overwrite_if_exists
)
def register(parser):
parser.add_argument('name')
parser.add_argument('--branch', '-b', default='master')
return execute
<commit_msg>Check if target directory is empty instead of current directory and remove overwrite_if_exists argument<commit_after>import os
def execute(name, branch):
try:
from cookiecutter.main import cookiecutter
except ImportError as exc:
raise ImportError(
'You must install "cookiecutter" to use this command.\n\n $ pip install cookiecutter\n'
) from exc
overwrite_if_exists = False
project_path = os.path.join(os.getcwd(), name)
if os.path.isdir(project_path) and not os.listdir(project_path):
overwrite_if_exists = True
return cookiecutter(
'https://github.com/python-bonobo/cookiecutter-bonobo.git',
extra_context={'name': name},
no_input=True,
checkout=branch,
overwrite_if_exists=overwrite_if_exists
)
def register(parser):
parser.add_argument('name')
parser.add_argument('--branch', '-b', default='master')
return execute
|
c8a2647424a24ea97c1e5ed5a14c85f8a8eadd8b | email_tools.py | email_tools.py | from contextlib import contextmanager
import logging
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from settings import Settings
@contextmanager
def smtp_server():
s = smtplib.SMTP(Settings.SMTP_HOST, Settings.SMTP_PORT)
s.starttls()
s.login(Settings.SMTP_USERNAME, Settings.SMTP_PASSWORD)
yield s
s.quit()
def send_email(s, sender, msg_subject, html_content):
msg = MIMEMultipart()
recipients = [Settings.EMAIL_RECIPIENT]
msg['Subject'] = msg_subject
msg['From'] = sender
msg['To'] = ', '.join(recipients)
part_html = MIMEText(html_content, 'html', 'utf-8')
msg.attach(part_html)
try:
if not Settings.DRY_RUN:
s.sendmail(msg['From'], recipients, msg.as_string())
logging.info('sent email to "%s" about "%s"' % (recipients, msg_subject))
except Exception as e:
logging.error('An error occurred while sending email: %s - %s' % (e.__class__, e))
logging.debug(u'email:\n%s' % html_content)
raise
| from contextlib import contextmanager
import logging
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from settings import Settings
@contextmanager
def smtp_server():
s = smtplib.SMTP(Settings.SMTP_HOST, Settings.SMTP_PORT)
s.starttls()
s.login(Settings.SMTP_USERNAME, Settings.SMTP_PASSWORD)
yield s
s.quit()
def send_email(s, sender, msg_subject, html_content):
if Settings.EMAIL_RECIPIENT is None:
raise Exception('EMAIL_RECIPIENT env variable must be set')
recipients = [Settings.EMAIL_RECIPIENT]
msg = MIMEMultipart()
msg['Subject'] = msg_subject
msg['From'] = sender
msg['To'] = ', '.join(recipients)
part_html = MIMEText(html_content, 'html', 'utf-8')
msg.attach(part_html)
try:
if not Settings.DRY_RUN:
s.sendmail(msg['From'], recipients, msg.as_string())
logging.info('sent email to "%s" about "%s"' % (recipients, msg_subject))
except Exception as e:
logging.error('An error occurred while sending email: %s - %s' % (e.__class__, e))
logging.debug(u'email:\n%s' % html_content)
raise
| Raise exception is EMAIL_RECIPIENT not set | Raise exception is EMAIL_RECIPIENT not set
| Python | mit | achauve/pycurator | from contextlib import contextmanager
import logging
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from settings import Settings
@contextmanager
def smtp_server():
s = smtplib.SMTP(Settings.SMTP_HOST, Settings.SMTP_PORT)
s.starttls()
s.login(Settings.SMTP_USERNAME, Settings.SMTP_PASSWORD)
yield s
s.quit()
def send_email(s, sender, msg_subject, html_content):
msg = MIMEMultipart()
recipients = [Settings.EMAIL_RECIPIENT]
msg['Subject'] = msg_subject
msg['From'] = sender
msg['To'] = ', '.join(recipients)
part_html = MIMEText(html_content, 'html', 'utf-8')
msg.attach(part_html)
try:
if not Settings.DRY_RUN:
s.sendmail(msg['From'], recipients, msg.as_string())
logging.info('sent email to "%s" about "%s"' % (recipients, msg_subject))
except Exception as e:
logging.error('An error occurred while sending email: %s - %s' % (e.__class__, e))
logging.debug(u'email:\n%s' % html_content)
raise
Raise exception is EMAIL_RECIPIENT not set | from contextlib import contextmanager
import logging
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from settings import Settings
@contextmanager
def smtp_server():
s = smtplib.SMTP(Settings.SMTP_HOST, Settings.SMTP_PORT)
s.starttls()
s.login(Settings.SMTP_USERNAME, Settings.SMTP_PASSWORD)
yield s
s.quit()
def send_email(s, sender, msg_subject, html_content):
if Settings.EMAIL_RECIPIENT is None:
raise Exception('EMAIL_RECIPIENT env variable must be set')
recipients = [Settings.EMAIL_RECIPIENT]
msg = MIMEMultipart()
msg['Subject'] = msg_subject
msg['From'] = sender
msg['To'] = ', '.join(recipients)
part_html = MIMEText(html_content, 'html', 'utf-8')
msg.attach(part_html)
try:
if not Settings.DRY_RUN:
s.sendmail(msg['From'], recipients, msg.as_string())
logging.info('sent email to "%s" about "%s"' % (recipients, msg_subject))
except Exception as e:
logging.error('An error occurred while sending email: %s - %s' % (e.__class__, e))
logging.debug(u'email:\n%s' % html_content)
raise
| <commit_before>from contextlib import contextmanager
import logging
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from settings import Settings
@contextmanager
def smtp_server():
s = smtplib.SMTP(Settings.SMTP_HOST, Settings.SMTP_PORT)
s.starttls()
s.login(Settings.SMTP_USERNAME, Settings.SMTP_PASSWORD)
yield s
s.quit()
def send_email(s, sender, msg_subject, html_content):
msg = MIMEMultipart()
recipients = [Settings.EMAIL_RECIPIENT]
msg['Subject'] = msg_subject
msg['From'] = sender
msg['To'] = ', '.join(recipients)
part_html = MIMEText(html_content, 'html', 'utf-8')
msg.attach(part_html)
try:
if not Settings.DRY_RUN:
s.sendmail(msg['From'], recipients, msg.as_string())
logging.info('sent email to "%s" about "%s"' % (recipients, msg_subject))
except Exception as e:
logging.error('An error occurred while sending email: %s - %s' % (e.__class__, e))
logging.debug(u'email:\n%s' % html_content)
raise
<commit_msg>Raise exception is EMAIL_RECIPIENT not set<commit_after> | from contextlib import contextmanager
import logging
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from settings import Settings
@contextmanager
def smtp_server():
s = smtplib.SMTP(Settings.SMTP_HOST, Settings.SMTP_PORT)
s.starttls()
s.login(Settings.SMTP_USERNAME, Settings.SMTP_PASSWORD)
yield s
s.quit()
def send_email(s, sender, msg_subject, html_content):
if Settings.EMAIL_RECIPIENT is None:
raise Exception('EMAIL_RECIPIENT env variable must be set')
recipients = [Settings.EMAIL_RECIPIENT]
msg = MIMEMultipart()
msg['Subject'] = msg_subject
msg['From'] = sender
msg['To'] = ', '.join(recipients)
part_html = MIMEText(html_content, 'html', 'utf-8')
msg.attach(part_html)
try:
if not Settings.DRY_RUN:
s.sendmail(msg['From'], recipients, msg.as_string())
logging.info('sent email to "%s" about "%s"' % (recipients, msg_subject))
except Exception as e:
logging.error('An error occurred while sending email: %s - %s' % (e.__class__, e))
logging.debug(u'email:\n%s' % html_content)
raise
| from contextlib import contextmanager
import logging
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from settings import Settings
@contextmanager
def smtp_server():
s = smtplib.SMTP(Settings.SMTP_HOST, Settings.SMTP_PORT)
s.starttls()
s.login(Settings.SMTP_USERNAME, Settings.SMTP_PASSWORD)
yield s
s.quit()
def send_email(s, sender, msg_subject, html_content):
msg = MIMEMultipart()
recipients = [Settings.EMAIL_RECIPIENT]
msg['Subject'] = msg_subject
msg['From'] = sender
msg['To'] = ', '.join(recipients)
part_html = MIMEText(html_content, 'html', 'utf-8')
msg.attach(part_html)
try:
if not Settings.DRY_RUN:
s.sendmail(msg['From'], recipients, msg.as_string())
logging.info('sent email to "%s" about "%s"' % (recipients, msg_subject))
except Exception as e:
logging.error('An error occurred while sending email: %s - %s' % (e.__class__, e))
logging.debug(u'email:\n%s' % html_content)
raise
Raise exception is EMAIL_RECIPIENT not setfrom contextlib import contextmanager
import logging
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from settings import Settings
@contextmanager
def smtp_server():
s = smtplib.SMTP(Settings.SMTP_HOST, Settings.SMTP_PORT)
s.starttls()
s.login(Settings.SMTP_USERNAME, Settings.SMTP_PASSWORD)
yield s
s.quit()
def send_email(s, sender, msg_subject, html_content):
if Settings.EMAIL_RECIPIENT is None:
raise Exception('EMAIL_RECIPIENT env variable must be set')
recipients = [Settings.EMAIL_RECIPIENT]
msg = MIMEMultipart()
msg['Subject'] = msg_subject
msg['From'] = sender
msg['To'] = ', '.join(recipients)
part_html = MIMEText(html_content, 'html', 'utf-8')
msg.attach(part_html)
try:
if not Settings.DRY_RUN:
s.sendmail(msg['From'], recipients, msg.as_string())
logging.info('sent email to "%s" about "%s"' % (recipients, msg_subject))
except Exception as e:
logging.error('An error occurred while sending email: %s - %s' % (e.__class__, e))
logging.debug(u'email:\n%s' % html_content)
raise
| <commit_before>from contextlib import contextmanager
import logging
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from settings import Settings
@contextmanager
def smtp_server():
s = smtplib.SMTP(Settings.SMTP_HOST, Settings.SMTP_PORT)
s.starttls()
s.login(Settings.SMTP_USERNAME, Settings.SMTP_PASSWORD)
yield s
s.quit()
def send_email(s, sender, msg_subject, html_content):
msg = MIMEMultipart()
recipients = [Settings.EMAIL_RECIPIENT]
msg['Subject'] = msg_subject
msg['From'] = sender
msg['To'] = ', '.join(recipients)
part_html = MIMEText(html_content, 'html', 'utf-8')
msg.attach(part_html)
try:
if not Settings.DRY_RUN:
s.sendmail(msg['From'], recipients, msg.as_string())
logging.info('sent email to "%s" about "%s"' % (recipients, msg_subject))
except Exception as e:
logging.error('An error occurred while sending email: %s - %s' % (e.__class__, e))
logging.debug(u'email:\n%s' % html_content)
raise
<commit_msg>Raise exception is EMAIL_RECIPIENT not set<commit_after>from contextlib import contextmanager
import logging
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from settings import Settings
@contextmanager
def smtp_server():
s = smtplib.SMTP(Settings.SMTP_HOST, Settings.SMTP_PORT)
s.starttls()
s.login(Settings.SMTP_USERNAME, Settings.SMTP_PASSWORD)
yield s
s.quit()
def send_email(s, sender, msg_subject, html_content):
if Settings.EMAIL_RECIPIENT is None:
raise Exception('EMAIL_RECIPIENT env variable must be set')
recipients = [Settings.EMAIL_RECIPIENT]
msg = MIMEMultipart()
msg['Subject'] = msg_subject
msg['From'] = sender
msg['To'] = ', '.join(recipients)
part_html = MIMEText(html_content, 'html', 'utf-8')
msg.attach(part_html)
try:
if not Settings.DRY_RUN:
s.sendmail(msg['From'], recipients, msg.as_string())
logging.info('sent email to "%s" about "%s"' % (recipients, msg_subject))
except Exception as e:
logging.error('An error occurred while sending email: %s - %s' % (e.__class__, e))
logging.debug(u'email:\n%s' % html_content)
raise
|
c4bf617dddd15e77974b000e8fa90750e1761386 | siteconfig/__init__.py | siteconfig/__init__.py | from .configobj import Config
config = Config.from_environ()
# Add the data and some of the API as attributes of the top-level package.
globals().update(config)
get = config.get
| from .configobj import Config
config = Config.from_environ()
# Add the data and some of the API as attributes of the top-level package.
globals().update(config)
get = config.get
get_bool = config.get_bool
| Add get_bool to package exports | Add get_bool to package exports
| Python | bsd-3-clause | mikeboers/siteconfig,mikeboers/siteconfig | from .configobj import Config
config = Config.from_environ()
# Add the data and some of the API as attributes of the top-level package.
globals().update(config)
get = config.get
Add get_bool to package exports | from .configobj import Config
config = Config.from_environ()
# Add the data and some of the API as attributes of the top-level package.
globals().update(config)
get = config.get
get_bool = config.get_bool
| <commit_before>from .configobj import Config
config = Config.from_environ()
# Add the data and some of the API as attributes of the top-level package.
globals().update(config)
get = config.get
<commit_msg>Add get_bool to package exports<commit_after> | from .configobj import Config
config = Config.from_environ()
# Add the data and some of the API as attributes of the top-level package.
globals().update(config)
get = config.get
get_bool = config.get_bool
| from .configobj import Config
config = Config.from_environ()
# Add the data and some of the API as attributes of the top-level package.
globals().update(config)
get = config.get
Add get_bool to package exportsfrom .configobj import Config
config = Config.from_environ()
# Add the data and some of the API as attributes of the top-level package.
globals().update(config)
get = config.get
get_bool = config.get_bool
| <commit_before>from .configobj import Config
config = Config.from_environ()
# Add the data and some of the API as attributes of the top-level package.
globals().update(config)
get = config.get
<commit_msg>Add get_bool to package exports<commit_after>from .configobj import Config
config = Config.from_environ()
# Add the data and some of the API as attributes of the top-level package.
globals().update(config)
get = config.get
get_bool = config.get_bool
|
4c71ba23720001d06d519a7828f2866814f1c46a | tests/conftest.py | tests/conftest.py | # Copyright (c) 2016 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
import pytest
from UM.Application import Application
class FixtureApplication(Application):
def __init__(self):
Application._instance = None
super().__init__("test", "1.0")
def functionEvent(self, event):
pass
def parseCommandLine(self):
pass
@pytest.fixture()
def application():
return FixtureApplication()
| # Copyright (c) 2016 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
import pytest
from UM.Application import Application
from UM.Signal import Signal
class FixtureApplication(Application):
def __init__(self):
Application._instance = None
super().__init__("test", "1.0")
Signal._app = self
def functionEvent(self, event):
event.call()
def parseCommandLine(self):
pass
@pytest.fixture()
def application():
return FixtureApplication()
| Make sure to set the test application instance as app for Signals | Make sure to set the test application instance as app for Signals
This makes singals be properly emitted in tests
| Python | agpl-3.0 | onitake/Uranium,onitake/Uranium | # Copyright (c) 2016 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
import pytest
from UM.Application import Application
class FixtureApplication(Application):
def __init__(self):
Application._instance = None
super().__init__("test", "1.0")
def functionEvent(self, event):
pass
def parseCommandLine(self):
pass
@pytest.fixture()
def application():
return FixtureApplication()
Make sure to set the test application instance as app for Signals
This makes singals be properly emitted in tests | # Copyright (c) 2016 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
import pytest
from UM.Application import Application
from UM.Signal import Signal
class FixtureApplication(Application):
def __init__(self):
Application._instance = None
super().__init__("test", "1.0")
Signal._app = self
def functionEvent(self, event):
event.call()
def parseCommandLine(self):
pass
@pytest.fixture()
def application():
return FixtureApplication()
| <commit_before># Copyright (c) 2016 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
import pytest
from UM.Application import Application
class FixtureApplication(Application):
def __init__(self):
Application._instance = None
super().__init__("test", "1.0")
def functionEvent(self, event):
pass
def parseCommandLine(self):
pass
@pytest.fixture()
def application():
return FixtureApplication()
<commit_msg>Make sure to set the test application instance as app for Signals
This makes singals be properly emitted in tests<commit_after> | # Copyright (c) 2016 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
import pytest
from UM.Application import Application
from UM.Signal import Signal
class FixtureApplication(Application):
def __init__(self):
Application._instance = None
super().__init__("test", "1.0")
Signal._app = self
def functionEvent(self, event):
event.call()
def parseCommandLine(self):
pass
@pytest.fixture()
def application():
return FixtureApplication()
| # Copyright (c) 2016 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
import pytest
from UM.Application import Application
class FixtureApplication(Application):
def __init__(self):
Application._instance = None
super().__init__("test", "1.0")
def functionEvent(self, event):
pass
def parseCommandLine(self):
pass
@pytest.fixture()
def application():
return FixtureApplication()
Make sure to set the test application instance as app for Signals
This makes singals be properly emitted in tests# Copyright (c) 2016 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
import pytest
from UM.Application import Application
from UM.Signal import Signal
class FixtureApplication(Application):
def __init__(self):
Application._instance = None
super().__init__("test", "1.0")
Signal._app = self
def functionEvent(self, event):
event.call()
def parseCommandLine(self):
pass
@pytest.fixture()
def application():
return FixtureApplication()
| <commit_before># Copyright (c) 2016 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
import pytest
from UM.Application import Application
class FixtureApplication(Application):
def __init__(self):
Application._instance = None
super().__init__("test", "1.0")
def functionEvent(self, event):
pass
def parseCommandLine(self):
pass
@pytest.fixture()
def application():
return FixtureApplication()
<commit_msg>Make sure to set the test application instance as app for Signals
This makes singals be properly emitted in tests<commit_after># Copyright (c) 2016 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
import pytest
from UM.Application import Application
from UM.Signal import Signal
class FixtureApplication(Application):
def __init__(self):
Application._instance = None
super().__init__("test", "1.0")
Signal._app = self
def functionEvent(self, event):
event.call()
def parseCommandLine(self):
pass
@pytest.fixture()
def application():
return FixtureApplication()
|
14917a4d503569147277bfd5fefa4b2600dfea40 | tests/conftest.py | tests/conftest.py | import pytest
@pytest.fixture(autouse=True)
def tagschecker(request):
tags = set(request.config.getini('TAGS'))
tags_marker = request.node.get_marker('tags')
xfailtags_marker = request.node.get_marker('xfailtags')
skiptags_marker = request.node.get_marker('skiptags')
if tags_marker and tags.isdisjoint(set(tags_marker.args)):
pytest.skip('skipped for this tags: {}'.format(tags))
elif skiptags_marker and not tags.isdisjoint(set(skiptags_marker.args)):
pytest.skip('skipped for this tags: {}'.format(tags))
elif xfailtags_marker and not tags.isdisjoint(set(xfailtags_marker.args)):
request.node.add_marker(pytest.mark.xfail())
| import pytest
@pytest.fixture(autouse=True)
def tagschecker(request):
tags = set(request.config.getini('TAGS'))
tags_marker = request.node.get_marker('tags')
xfailtags_marker = request.node.get_marker('xfailtags')
skiptags_marker = request.node.get_marker('skiptags')
if xfailtags_marker and not tags.isdisjoint(set(xfailtags_marker.args)):
request.node.add_marker(pytest.mark.xfail())
elif (
tags_marker and tags.isdisjoint(set(tags_marker.args)) or
skiptags_marker and not tags.isdisjoint(set(skiptags_marker.args))
):
pytest.skip('skipped for this tags: {}'.format(tags))
| Set xfailtags as first priority | Set xfailtags as first priority
| Python | mit | dincamihai/salt-toaster,dincamihai/salt-toaster | import pytest
@pytest.fixture(autouse=True)
def tagschecker(request):
tags = set(request.config.getini('TAGS'))
tags_marker = request.node.get_marker('tags')
xfailtags_marker = request.node.get_marker('xfailtags')
skiptags_marker = request.node.get_marker('skiptags')
if tags_marker and tags.isdisjoint(set(tags_marker.args)):
pytest.skip('skipped for this tags: {}'.format(tags))
elif skiptags_marker and not tags.isdisjoint(set(skiptags_marker.args)):
pytest.skip('skipped for this tags: {}'.format(tags))
elif xfailtags_marker and not tags.isdisjoint(set(xfailtags_marker.args)):
request.node.add_marker(pytest.mark.xfail())
Set xfailtags as first priority | import pytest
@pytest.fixture(autouse=True)
def tagschecker(request):
tags = set(request.config.getini('TAGS'))
tags_marker = request.node.get_marker('tags')
xfailtags_marker = request.node.get_marker('xfailtags')
skiptags_marker = request.node.get_marker('skiptags')
if xfailtags_marker and not tags.isdisjoint(set(xfailtags_marker.args)):
request.node.add_marker(pytest.mark.xfail())
elif (
tags_marker and tags.isdisjoint(set(tags_marker.args)) or
skiptags_marker and not tags.isdisjoint(set(skiptags_marker.args))
):
pytest.skip('skipped for this tags: {}'.format(tags))
| <commit_before>import pytest
@pytest.fixture(autouse=True)
def tagschecker(request):
tags = set(request.config.getini('TAGS'))
tags_marker = request.node.get_marker('tags')
xfailtags_marker = request.node.get_marker('xfailtags')
skiptags_marker = request.node.get_marker('skiptags')
if tags_marker and tags.isdisjoint(set(tags_marker.args)):
pytest.skip('skipped for this tags: {}'.format(tags))
elif skiptags_marker and not tags.isdisjoint(set(skiptags_marker.args)):
pytest.skip('skipped for this tags: {}'.format(tags))
elif xfailtags_marker and not tags.isdisjoint(set(xfailtags_marker.args)):
request.node.add_marker(pytest.mark.xfail())
<commit_msg>Set xfailtags as first priority<commit_after> | import pytest
@pytest.fixture(autouse=True)
def tagschecker(request):
tags = set(request.config.getini('TAGS'))
tags_marker = request.node.get_marker('tags')
xfailtags_marker = request.node.get_marker('xfailtags')
skiptags_marker = request.node.get_marker('skiptags')
if xfailtags_marker and not tags.isdisjoint(set(xfailtags_marker.args)):
request.node.add_marker(pytest.mark.xfail())
elif (
tags_marker and tags.isdisjoint(set(tags_marker.args)) or
skiptags_marker and not tags.isdisjoint(set(skiptags_marker.args))
):
pytest.skip('skipped for this tags: {}'.format(tags))
| import pytest
@pytest.fixture(autouse=True)
def tagschecker(request):
tags = set(request.config.getini('TAGS'))
tags_marker = request.node.get_marker('tags')
xfailtags_marker = request.node.get_marker('xfailtags')
skiptags_marker = request.node.get_marker('skiptags')
if tags_marker and tags.isdisjoint(set(tags_marker.args)):
pytest.skip('skipped for this tags: {}'.format(tags))
elif skiptags_marker and not tags.isdisjoint(set(skiptags_marker.args)):
pytest.skip('skipped for this tags: {}'.format(tags))
elif xfailtags_marker and not tags.isdisjoint(set(xfailtags_marker.args)):
request.node.add_marker(pytest.mark.xfail())
Set xfailtags as first priorityimport pytest
@pytest.fixture(autouse=True)
def tagschecker(request):
tags = set(request.config.getini('TAGS'))
tags_marker = request.node.get_marker('tags')
xfailtags_marker = request.node.get_marker('xfailtags')
skiptags_marker = request.node.get_marker('skiptags')
if xfailtags_marker and not tags.isdisjoint(set(xfailtags_marker.args)):
request.node.add_marker(pytest.mark.xfail())
elif (
tags_marker and tags.isdisjoint(set(tags_marker.args)) or
skiptags_marker and not tags.isdisjoint(set(skiptags_marker.args))
):
pytest.skip('skipped for this tags: {}'.format(tags))
| <commit_before>import pytest
@pytest.fixture(autouse=True)
def tagschecker(request):
tags = set(request.config.getini('TAGS'))
tags_marker = request.node.get_marker('tags')
xfailtags_marker = request.node.get_marker('xfailtags')
skiptags_marker = request.node.get_marker('skiptags')
if tags_marker and tags.isdisjoint(set(tags_marker.args)):
pytest.skip('skipped for this tags: {}'.format(tags))
elif skiptags_marker and not tags.isdisjoint(set(skiptags_marker.args)):
pytest.skip('skipped for this tags: {}'.format(tags))
elif xfailtags_marker and not tags.isdisjoint(set(xfailtags_marker.args)):
request.node.add_marker(pytest.mark.xfail())
<commit_msg>Set xfailtags as first priority<commit_after>import pytest
@pytest.fixture(autouse=True)
def tagschecker(request):
tags = set(request.config.getini('TAGS'))
tags_marker = request.node.get_marker('tags')
xfailtags_marker = request.node.get_marker('xfailtags')
skiptags_marker = request.node.get_marker('skiptags')
if xfailtags_marker and not tags.isdisjoint(set(xfailtags_marker.args)):
request.node.add_marker(pytest.mark.xfail())
elif (
tags_marker and tags.isdisjoint(set(tags_marker.args)) or
skiptags_marker and not tags.isdisjoint(set(skiptags_marker.args))
):
pytest.skip('skipped for this tags: {}'.format(tags))
|
eacfca844e5ab590acfcd193e2ca1fa379e10009 | alg_strongly_connected_components.py | alg_strongly_connected_components.py | from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def strongly_connected_components():
"""Strongly connected components for graph.
Procedure:
- Call (Depth First Search) DFS on graph G to
compute finish times for each vertex.
- Compute the transpose graph G^T of graph G.
- Call DFS on G^T, but in the main loop of DFS,
feed the vertex in the decreasing order of finish times.
- Outpu the vertices of each tree in the DFS forest as
separate strongly connected components.
"""
pass
def main():
# 3 strongly connected graphs: {A, B, D, E, G}, {C}, {F, H, I}.
adj_dict = {
'A': ['B'],
'B': ['C', 'E'],
'C': ['C', 'F'],
'D': ['B', 'G'],
'E': ['A', 'D'],
'F': ['H'],
'G': ['E'],
'H': ['I'],
'I': ['F']
}
strongly_connected_components(adj_dict)
if __name__ == '__main__':
main()
| from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def _previsit():
pass
def _postvisit():
pass
def dfs():
pass
def _transpose_graph():
pass
def _inverse_postvisit_vertex():
pass
def strongly_connected_components():
"""Strongly connected components for graph.
Procedure:
- Call (Depth First Search) DFS on graph G to
compute finish times for each vertex.
- Compute the transpose graph G^T of graph G.
- Call DFS on G^T, but in the main loop of DFS,
feed the vertex in the decreasing order of postvisit times.
- Outpu the vertices of each tree in the DFS forest as
separate strongly connected components.
"""
pass
def main():
# 3 strongly connected graphs: {A, B, D, E, G}, {C}, {F, H, I}.
adj_dict = {
'A': ['B'],
'B': ['C', 'E'],
'C': ['C', 'F'],
'D': ['B', 'G'],
'E': ['A', 'D'],
'F': ['H'],
'G': ['E'],
'H': ['I'],
'I': ['F']
}
strongly_connected_components(adj_dict)
if __name__ == '__main__':
main()
| Add strongly connected components's methods | Add strongly connected components's methods
| Python | bsd-2-clause | bowen0701/algorithms_data_structures | from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def strongly_connected_components():
"""Strongly connected components for graph.
Procedure:
- Call (Depth First Search) DFS on graph G to
compute finish times for each vertex.
- Compute the transpose graph G^T of graph G.
- Call DFS on G^T, but in the main loop of DFS,
feed the vertex in the decreasing order of finish times.
- Outpu the vertices of each tree in the DFS forest as
separate strongly connected components.
"""
pass
def main():
# 3 strongly connected graphs: {A, B, D, E, G}, {C}, {F, H, I}.
adj_dict = {
'A': ['B'],
'B': ['C', 'E'],
'C': ['C', 'F'],
'D': ['B', 'G'],
'E': ['A', 'D'],
'F': ['H'],
'G': ['E'],
'H': ['I'],
'I': ['F']
}
strongly_connected_components(adj_dict)
if __name__ == '__main__':
main()
Add strongly connected components's methods | from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def _previsit():
pass
def _postvisit():
pass
def dfs():
pass
def _transpose_graph():
pass
def _inverse_postvisit_vertex():
pass
def strongly_connected_components():
"""Strongly connected components for graph.
Procedure:
- Call (Depth First Search) DFS on graph G to
compute finish times for each vertex.
- Compute the transpose graph G^T of graph G.
- Call DFS on G^T, but in the main loop of DFS,
feed the vertex in the decreasing order of postvisit times.
- Outpu the vertices of each tree in the DFS forest as
separate strongly connected components.
"""
pass
def main():
# 3 strongly connected graphs: {A, B, D, E, G}, {C}, {F, H, I}.
adj_dict = {
'A': ['B'],
'B': ['C', 'E'],
'C': ['C', 'F'],
'D': ['B', 'G'],
'E': ['A', 'D'],
'F': ['H'],
'G': ['E'],
'H': ['I'],
'I': ['F']
}
strongly_connected_components(adj_dict)
if __name__ == '__main__':
main()
| <commit_before>from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def strongly_connected_components():
"""Strongly connected components for graph.
Procedure:
- Call (Depth First Search) DFS on graph G to
compute finish times for each vertex.
- Compute the transpose graph G^T of graph G.
- Call DFS on G^T, but in the main loop of DFS,
feed the vertex in the decreasing order of finish times.
- Outpu the vertices of each tree in the DFS forest as
separate strongly connected components.
"""
pass
def main():
# 3 strongly connected graphs: {A, B, D, E, G}, {C}, {F, H, I}.
adj_dict = {
'A': ['B'],
'B': ['C', 'E'],
'C': ['C', 'F'],
'D': ['B', 'G'],
'E': ['A', 'D'],
'F': ['H'],
'G': ['E'],
'H': ['I'],
'I': ['F']
}
strongly_connected_components(adj_dict)
if __name__ == '__main__':
main()
<commit_msg>Add strongly connected components's methods<commit_after> | from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def _previsit():
pass
def _postvisit():
pass
def dfs():
pass
def _transpose_graph():
pass
def _inverse_postvisit_vertex():
pass
def strongly_connected_components():
"""Strongly connected components for graph.
Procedure:
- Call (Depth First Search) DFS on graph G to
compute finish times for each vertex.
- Compute the transpose graph G^T of graph G.
- Call DFS on G^T, but in the main loop of DFS,
feed the vertex in the decreasing order of postvisit times.
- Outpu the vertices of each tree in the DFS forest as
separate strongly connected components.
"""
pass
def main():
# 3 strongly connected graphs: {A, B, D, E, G}, {C}, {F, H, I}.
adj_dict = {
'A': ['B'],
'B': ['C', 'E'],
'C': ['C', 'F'],
'D': ['B', 'G'],
'E': ['A', 'D'],
'F': ['H'],
'G': ['E'],
'H': ['I'],
'I': ['F']
}
strongly_connected_components(adj_dict)
if __name__ == '__main__':
main()
| from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def strongly_connected_components():
"""Strongly connected components for graph.
Procedure:
- Call (Depth First Search) DFS on graph G to
compute finish times for each vertex.
- Compute the transpose graph G^T of graph G.
- Call DFS on G^T, but in the main loop of DFS,
feed the vertex in the decreasing order of finish times.
- Outpu the vertices of each tree in the DFS forest as
separate strongly connected components.
"""
pass
def main():
# 3 strongly connected graphs: {A, B, D, E, G}, {C}, {F, H, I}.
adj_dict = {
'A': ['B'],
'B': ['C', 'E'],
'C': ['C', 'F'],
'D': ['B', 'G'],
'E': ['A', 'D'],
'F': ['H'],
'G': ['E'],
'H': ['I'],
'I': ['F']
}
strongly_connected_components(adj_dict)
if __name__ == '__main__':
main()
Add strongly connected components's methodsfrom __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def _previsit():
pass
def _postvisit():
pass
def dfs():
pass
def _transpose_graph():
pass
def _inverse_postvisit_vertex():
pass
def strongly_connected_components():
"""Strongly connected components for graph.
Procedure:
- Call (Depth First Search) DFS on graph G to
compute finish times for each vertex.
- Compute the transpose graph G^T of graph G.
- Call DFS on G^T, but in the main loop of DFS,
feed the vertex in the decreasing order of postvisit times.
- Outpu the vertices of each tree in the DFS forest as
separate strongly connected components.
"""
pass
def main():
# 3 strongly connected graphs: {A, B, D, E, G}, {C}, {F, H, I}.
adj_dict = {
'A': ['B'],
'B': ['C', 'E'],
'C': ['C', 'F'],
'D': ['B', 'G'],
'E': ['A', 'D'],
'F': ['H'],
'G': ['E'],
'H': ['I'],
'I': ['F']
}
strongly_connected_components(adj_dict)
if __name__ == '__main__':
main()
| <commit_before>from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def strongly_connected_components():
"""Strongly connected components for graph.
Procedure:
- Call (Depth First Search) DFS on graph G to
compute finish times for each vertex.
- Compute the transpose graph G^T of graph G.
- Call DFS on G^T, but in the main loop of DFS,
feed the vertex in the decreasing order of finish times.
- Outpu the vertices of each tree in the DFS forest as
separate strongly connected components.
"""
pass
def main():
# 3 strongly connected graphs: {A, B, D, E, G}, {C}, {F, H, I}.
adj_dict = {
'A': ['B'],
'B': ['C', 'E'],
'C': ['C', 'F'],
'D': ['B', 'G'],
'E': ['A', 'D'],
'F': ['H'],
'G': ['E'],
'H': ['I'],
'I': ['F']
}
strongly_connected_components(adj_dict)
if __name__ == '__main__':
main()
<commit_msg>Add strongly connected components's methods<commit_after>from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def _previsit():
pass
def _postvisit():
pass
def dfs():
pass
def _transpose_graph():
pass
def _inverse_postvisit_vertex():
pass
def strongly_connected_components():
"""Strongly connected components for graph.
Procedure:
- Call (Depth First Search) DFS on graph G to
compute finish times for each vertex.
- Compute the transpose graph G^T of graph G.
- Call DFS on G^T, but in the main loop of DFS,
feed the vertex in the decreasing order of postvisit times.
- Outpu the vertices of each tree in the DFS forest as
separate strongly connected components.
"""
pass
def main():
# 3 strongly connected graphs: {A, B, D, E, G}, {C}, {F, H, I}.
adj_dict = {
'A': ['B'],
'B': ['C', 'E'],
'C': ['C', 'F'],
'D': ['B', 'G'],
'E': ['A', 'D'],
'F': ['H'],
'G': ['E'],
'H': ['I'],
'I': ['F']
}
strongly_connected_components(adj_dict)
if __name__ == '__main__':
main()
|
150058856f10992d0f65f47d79ac14e2f52818cc | cellcounter/urls.py | cellcounter/urls.py | from django.conf.urls import patterns, include, url
from django.views.generic.simple import direct_to_template
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from django.contrib.auth.views import login, logout
from cellcounter.main.views import new_count, view_count, edit_count
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', direct_to_template, {'template': 'main/index.html'},
name="index"),
url(r'^count/$', new_count, name="count_home"),
url(r'^count/new/$', new_count, name="new_count"),
url(r'^count/(?P<count_id>\d+)/$', view_count, name="view_count"),
url(r'^count/(?P<count_id>\d+)/edit/$', edit_count, name="edit_count"),
url(r'^login/$', login, {'template_name': 'main/login.html'}, name='login'),
url(r'^logout/$', logout, {'next_page': '/'}, name='logout'),
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += staticfiles_urlpatterns()
| from django.conf.urls import patterns, include, url
from django.views.generic.simple import direct_to_template
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from django.contrib.auth.views import login, logout
from cellcounter.main.views import new_count, view_count, edit_count, ListMyCountsView
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', direct_to_template, {'template': 'main/index.html'},
name="index"),
url(r'^count/$', new_count, name="count_home"),
url(r'^count/new/$', new_count, name="new_count"),
url(r'^count/(?P<count_id>\d+)/$', view_count, name="view_count"),
url(r'^count/(?P<count_id>\d+)/edit/$', edit_count, name="edit_count"),
url(r'^user/$', ListMyCountsView.as_view()),
url(r'^login/$', login, {'template_name': 'main/login.html'}, name='login'),
url(r'^logout/$', logout, {'next_page': '/'}, name='logout'),
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += staticfiles_urlpatterns()
| Add URL to enable user list view | Add URL to enable user list view
| Python | mit | cellcounter/cellcounter,oghm2/hackdayoxford,cellcounter/cellcounter,cellcounter/cellcounter,haematologic/cellcountr,haematologic/cellcounter,cellcounter/cellcounter,haematologic/cellcountr,haematologic/cellcounter,haematologic/cellcounter,oghm2/hackdayoxford | from django.conf.urls import patterns, include, url
from django.views.generic.simple import direct_to_template
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from django.contrib.auth.views import login, logout
from cellcounter.main.views import new_count, view_count, edit_count
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', direct_to_template, {'template': 'main/index.html'},
name="index"),
url(r'^count/$', new_count, name="count_home"),
url(r'^count/new/$', new_count, name="new_count"),
url(r'^count/(?P<count_id>\d+)/$', view_count, name="view_count"),
url(r'^count/(?P<count_id>\d+)/edit/$', edit_count, name="edit_count"),
url(r'^login/$', login, {'template_name': 'main/login.html'}, name='login'),
url(r'^logout/$', logout, {'next_page': '/'}, name='logout'),
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += staticfiles_urlpatterns()
Add URL to enable user list view | from django.conf.urls import patterns, include, url
from django.views.generic.simple import direct_to_template
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from django.contrib.auth.views import login, logout
from cellcounter.main.views import new_count, view_count, edit_count, ListMyCountsView
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', direct_to_template, {'template': 'main/index.html'},
name="index"),
url(r'^count/$', new_count, name="count_home"),
url(r'^count/new/$', new_count, name="new_count"),
url(r'^count/(?P<count_id>\d+)/$', view_count, name="view_count"),
url(r'^count/(?P<count_id>\d+)/edit/$', edit_count, name="edit_count"),
url(r'^user/$', ListMyCountsView.as_view()),
url(r'^login/$', login, {'template_name': 'main/login.html'}, name='login'),
url(r'^logout/$', logout, {'next_page': '/'}, name='logout'),
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += staticfiles_urlpatterns()
| <commit_before>from django.conf.urls import patterns, include, url
from django.views.generic.simple import direct_to_template
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from django.contrib.auth.views import login, logout
from cellcounter.main.views import new_count, view_count, edit_count
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', direct_to_template, {'template': 'main/index.html'},
name="index"),
url(r'^count/$', new_count, name="count_home"),
url(r'^count/new/$', new_count, name="new_count"),
url(r'^count/(?P<count_id>\d+)/$', view_count, name="view_count"),
url(r'^count/(?P<count_id>\d+)/edit/$', edit_count, name="edit_count"),
url(r'^login/$', login, {'template_name': 'main/login.html'}, name='login'),
url(r'^logout/$', logout, {'next_page': '/'}, name='logout'),
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += staticfiles_urlpatterns()
<commit_msg>Add URL to enable user list view<commit_after> | from django.conf.urls import patterns, include, url
from django.views.generic.simple import direct_to_template
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from django.contrib.auth.views import login, logout
from cellcounter.main.views import new_count, view_count, edit_count, ListMyCountsView
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', direct_to_template, {'template': 'main/index.html'},
name="index"),
url(r'^count/$', new_count, name="count_home"),
url(r'^count/new/$', new_count, name="new_count"),
url(r'^count/(?P<count_id>\d+)/$', view_count, name="view_count"),
url(r'^count/(?P<count_id>\d+)/edit/$', edit_count, name="edit_count"),
url(r'^user/$', ListMyCountsView.as_view()),
url(r'^login/$', login, {'template_name': 'main/login.html'}, name='login'),
url(r'^logout/$', logout, {'next_page': '/'}, name='logout'),
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += staticfiles_urlpatterns()
| from django.conf.urls import patterns, include, url
from django.views.generic.simple import direct_to_template
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from django.contrib.auth.views import login, logout
from cellcounter.main.views import new_count, view_count, edit_count
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', direct_to_template, {'template': 'main/index.html'},
name="index"),
url(r'^count/$', new_count, name="count_home"),
url(r'^count/new/$', new_count, name="new_count"),
url(r'^count/(?P<count_id>\d+)/$', view_count, name="view_count"),
url(r'^count/(?P<count_id>\d+)/edit/$', edit_count, name="edit_count"),
url(r'^login/$', login, {'template_name': 'main/login.html'}, name='login'),
url(r'^logout/$', logout, {'next_page': '/'}, name='logout'),
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += staticfiles_urlpatterns()
Add URL to enable user list viewfrom django.conf.urls import patterns, include, url
from django.views.generic.simple import direct_to_template
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from django.contrib.auth.views import login, logout
from cellcounter.main.views import new_count, view_count, edit_count, ListMyCountsView
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', direct_to_template, {'template': 'main/index.html'},
name="index"),
url(r'^count/$', new_count, name="count_home"),
url(r'^count/new/$', new_count, name="new_count"),
url(r'^count/(?P<count_id>\d+)/$', view_count, name="view_count"),
url(r'^count/(?P<count_id>\d+)/edit/$', edit_count, name="edit_count"),
url(r'^user/$', ListMyCountsView.as_view()),
url(r'^login/$', login, {'template_name': 'main/login.html'}, name='login'),
url(r'^logout/$', logout, {'next_page': '/'}, name='logout'),
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += staticfiles_urlpatterns()
| <commit_before>from django.conf.urls import patterns, include, url
from django.views.generic.simple import direct_to_template
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from django.contrib.auth.views import login, logout
from cellcounter.main.views import new_count, view_count, edit_count
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', direct_to_template, {'template': 'main/index.html'},
name="index"),
url(r'^count/$', new_count, name="count_home"),
url(r'^count/new/$', new_count, name="new_count"),
url(r'^count/(?P<count_id>\d+)/$', view_count, name="view_count"),
url(r'^count/(?P<count_id>\d+)/edit/$', edit_count, name="edit_count"),
url(r'^login/$', login, {'template_name': 'main/login.html'}, name='login'),
url(r'^logout/$', logout, {'next_page': '/'}, name='logout'),
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += staticfiles_urlpatterns()
<commit_msg>Add URL to enable user list view<commit_after>from django.conf.urls import patterns, include, url
from django.views.generic.simple import direct_to_template
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from django.contrib.auth.views import login, logout
from cellcounter.main.views import new_count, view_count, edit_count, ListMyCountsView
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', direct_to_template, {'template': 'main/index.html'},
name="index"),
url(r'^count/$', new_count, name="count_home"),
url(r'^count/new/$', new_count, name="new_count"),
url(r'^count/(?P<count_id>\d+)/$', view_count, name="view_count"),
url(r'^count/(?P<count_id>\d+)/edit/$', edit_count, name="edit_count"),
url(r'^user/$', ListMyCountsView.as_view()),
url(r'^login/$', login, {'template_name': 'main/login.html'}, name='login'),
url(r'^logout/$', logout, {'next_page': '/'}, name='logout'),
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += staticfiles_urlpatterns()
|
c18bdb90e6197e298cc6bfb5f52ffbf1d48f029e | paramrunner.py | paramrunner.py | # -*- coding: utf-8 -*-
# paramrunner.py
# Run findparams on linde model and save results
import helpers
import findparams
import tables
import numpy as np
savedir = "/home/ith/results/"
savefile = "linde-params.hf5"
WMAP5PIVOT = np.array([5.25e-60])
lindefx = {"vars":["mass","lambda"], "values": [np.linspace(4.9e-8,6e-8), np.linspace(5e-14, 1.6e-13)],
"pivotk":WMAP5PIVOT, "pot": "linde",
"ystart": np.array([25.0,
-1.0,
0.0,
1.0,
0,
1.0,
0])}
def run_and_save_model(sf=None, fx=None, sd=None):
"""Run linde model and save results."""
if sf is None:
sf = savefile
if sd is None:
sd = savedir
helpers.ensurepath(sd)
if fx is None:
fx = lindefx
results = findparams.param_vs_spectrum(fx)
try:
rfile = tables.openFile(sd + sf, "w")
rfile.createArray(rfile.root, "params-results", results, "Model parameter search results")
print "Results saved in %s" % str(sd + sf)
finally:
rfile.close()
if __name__ == "__main__":
run_linde_model() | # -*- coding: utf-8 -*-
# paramrunner.py
# Run findparams on linde model and save results
import helpers
import findparams
import tables
import numpy as np
savedir = "/home/ith/results/"
savefile = "linde-params.hf5"
WMAP5PIVOT = np.array([5.25e-60])
lindefx = {"vars":["mass","lambda"], "values": [np.linspace(4.9e-8,6e-8), np.linspace(5e-14, 1.6e-13)],
"pivotk":WMAP5PIVOT, "pot": "linde",
"ystart": np.array([25.0,
-1.0,
0.0,
1.0,
0,
1.0,
0])}
def run_and_save_model(sf=None, fx=None, sd=None):
"""Run linde model and save results."""
if sf is None:
sf = savefile
if sd is None:
sd = savedir
helpers.ensurepath(sd)
if fx is None:
fx = lindefx
results = findparams.param_vs_spectrum(fx)
try:
rfile = tables.openFile(sd + sf, "w")
rfile.createArray(rfile.root, "params_results", results, "Model parameter search results")
print "Results saved in %s" % str(sd + sf)
finally:
rfile.close()
if __name__ == "__main__":
run_linde_model() | Change name of results array. | Change name of results array.
| Python | bsd-3-clause | ihuston/pyflation,ihuston/pyflation | # -*- coding: utf-8 -*-
# paramrunner.py
# Run findparams on linde model and save results
import helpers
import findparams
import tables
import numpy as np
savedir = "/home/ith/results/"
savefile = "linde-params.hf5"
WMAP5PIVOT = np.array([5.25e-60])
lindefx = {"vars":["mass","lambda"], "values": [np.linspace(4.9e-8,6e-8), np.linspace(5e-14, 1.6e-13)],
"pivotk":WMAP5PIVOT, "pot": "linde",
"ystart": np.array([25.0,
-1.0,
0.0,
1.0,
0,
1.0,
0])}
def run_and_save_model(sf=None, fx=None, sd=None):
"""Run linde model and save results."""
if sf is None:
sf = savefile
if sd is None:
sd = savedir
helpers.ensurepath(sd)
if fx is None:
fx = lindefx
results = findparams.param_vs_spectrum(fx)
try:
rfile = tables.openFile(sd + sf, "w")
rfile.createArray(rfile.root, "params-results", results, "Model parameter search results")
print "Results saved in %s" % str(sd + sf)
finally:
rfile.close()
if __name__ == "__main__":
run_linde_model()Change name of results array. | # -*- coding: utf-8 -*-
# paramrunner.py
# Run findparams on linde model and save results
import helpers
import findparams
import tables
import numpy as np
savedir = "/home/ith/results/"
savefile = "linde-params.hf5"
WMAP5PIVOT = np.array([5.25e-60])
lindefx = {"vars":["mass","lambda"], "values": [np.linspace(4.9e-8,6e-8), np.linspace(5e-14, 1.6e-13)],
"pivotk":WMAP5PIVOT, "pot": "linde",
"ystart": np.array([25.0,
-1.0,
0.0,
1.0,
0,
1.0,
0])}
def run_and_save_model(sf=None, fx=None, sd=None):
"""Run linde model and save results."""
if sf is None:
sf = savefile
if sd is None:
sd = savedir
helpers.ensurepath(sd)
if fx is None:
fx = lindefx
results = findparams.param_vs_spectrum(fx)
try:
rfile = tables.openFile(sd + sf, "w")
rfile.createArray(rfile.root, "params_results", results, "Model parameter search results")
print "Results saved in %s" % str(sd + sf)
finally:
rfile.close()
if __name__ == "__main__":
run_linde_model() | <commit_before># -*- coding: utf-8 -*-
# paramrunner.py
# Run findparams on linde model and save results
import helpers
import findparams
import tables
import numpy as np
savedir = "/home/ith/results/"
savefile = "linde-params.hf5"
WMAP5PIVOT = np.array([5.25e-60])
lindefx = {"vars":["mass","lambda"], "values": [np.linspace(4.9e-8,6e-8), np.linspace(5e-14, 1.6e-13)],
"pivotk":WMAP5PIVOT, "pot": "linde",
"ystart": np.array([25.0,
-1.0,
0.0,
1.0,
0,
1.0,
0])}
def run_and_save_model(sf=None, fx=None, sd=None):
"""Run linde model and save results."""
if sf is None:
sf = savefile
if sd is None:
sd = savedir
helpers.ensurepath(sd)
if fx is None:
fx = lindefx
results = findparams.param_vs_spectrum(fx)
try:
rfile = tables.openFile(sd + sf, "w")
rfile.createArray(rfile.root, "params-results", results, "Model parameter search results")
print "Results saved in %s" % str(sd + sf)
finally:
rfile.close()
if __name__ == "__main__":
run_linde_model()<commit_msg>Change name of results array.<commit_after> | # -*- coding: utf-8 -*-
# paramrunner.py
# Run findparams on linde model and save results
import helpers
import findparams
import tables
import numpy as np
savedir = "/home/ith/results/"
savefile = "linde-params.hf5"
WMAP5PIVOT = np.array([5.25e-60])
lindefx = {"vars":["mass","lambda"], "values": [np.linspace(4.9e-8,6e-8), np.linspace(5e-14, 1.6e-13)],
"pivotk":WMAP5PIVOT, "pot": "linde",
"ystart": np.array([25.0,
-1.0,
0.0,
1.0,
0,
1.0,
0])}
def run_and_save_model(sf=None, fx=None, sd=None):
"""Run linde model and save results."""
if sf is None:
sf = savefile
if sd is None:
sd = savedir
helpers.ensurepath(sd)
if fx is None:
fx = lindefx
results = findparams.param_vs_spectrum(fx)
try:
rfile = tables.openFile(sd + sf, "w")
rfile.createArray(rfile.root, "params_results", results, "Model parameter search results")
print "Results saved in %s" % str(sd + sf)
finally:
rfile.close()
if __name__ == "__main__":
run_linde_model() | # -*- coding: utf-8 -*-
# paramrunner.py
# Run findparams on linde model and save results
import helpers
import findparams
import tables
import numpy as np
savedir = "/home/ith/results/"
savefile = "linde-params.hf5"
WMAP5PIVOT = np.array([5.25e-60])
lindefx = {"vars":["mass","lambda"], "values": [np.linspace(4.9e-8,6e-8), np.linspace(5e-14, 1.6e-13)],
"pivotk":WMAP5PIVOT, "pot": "linde",
"ystart": np.array([25.0,
-1.0,
0.0,
1.0,
0,
1.0,
0])}
def run_and_save_model(sf=None, fx=None, sd=None):
"""Run linde model and save results."""
if sf is None:
sf = savefile
if sd is None:
sd = savedir
helpers.ensurepath(sd)
if fx is None:
fx = lindefx
results = findparams.param_vs_spectrum(fx)
try:
rfile = tables.openFile(sd + sf, "w")
rfile.createArray(rfile.root, "params-results", results, "Model parameter search results")
print "Results saved in %s" % str(sd + sf)
finally:
rfile.close()
if __name__ == "__main__":
run_linde_model()Change name of results array.# -*- coding: utf-8 -*-
# paramrunner.py
# Run findparams on linde model and save results
import helpers
import findparams
import tables
import numpy as np
savedir = "/home/ith/results/"
savefile = "linde-params.hf5"
WMAP5PIVOT = np.array([5.25e-60])
lindefx = {"vars":["mass","lambda"], "values": [np.linspace(4.9e-8,6e-8), np.linspace(5e-14, 1.6e-13)],
"pivotk":WMAP5PIVOT, "pot": "linde",
"ystart": np.array([25.0,
-1.0,
0.0,
1.0,
0,
1.0,
0])}
def run_and_save_model(sf=None, fx=None, sd=None):
"""Run linde model and save results."""
if sf is None:
sf = savefile
if sd is None:
sd = savedir
helpers.ensurepath(sd)
if fx is None:
fx = lindefx
results = findparams.param_vs_spectrum(fx)
try:
rfile = tables.openFile(sd + sf, "w")
rfile.createArray(rfile.root, "params_results", results, "Model parameter search results")
print "Results saved in %s" % str(sd + sf)
finally:
rfile.close()
if __name__ == "__main__":
run_linde_model() | <commit_before># -*- coding: utf-8 -*-
# paramrunner.py
# Run findparams on linde model and save results
import helpers
import findparams
import tables
import numpy as np
savedir = "/home/ith/results/"
savefile = "linde-params.hf5"
WMAP5PIVOT = np.array([5.25e-60])
lindefx = {"vars":["mass","lambda"], "values": [np.linspace(4.9e-8,6e-8), np.linspace(5e-14, 1.6e-13)],
"pivotk":WMAP5PIVOT, "pot": "linde",
"ystart": np.array([25.0,
-1.0,
0.0,
1.0,
0,
1.0,
0])}
def run_and_save_model(sf=None, fx=None, sd=None):
"""Run linde model and save results."""
if sf is None:
sf = savefile
if sd is None:
sd = savedir
helpers.ensurepath(sd)
if fx is None:
fx = lindefx
results = findparams.param_vs_spectrum(fx)
try:
rfile = tables.openFile(sd + sf, "w")
rfile.createArray(rfile.root, "params-results", results, "Model parameter search results")
print "Results saved in %s" % str(sd + sf)
finally:
rfile.close()
if __name__ == "__main__":
run_linde_model()<commit_msg>Change name of results array.<commit_after># -*- coding: utf-8 -*-
# paramrunner.py
# Run findparams on linde model and save results
import helpers
import findparams
import tables
import numpy as np
savedir = "/home/ith/results/"
savefile = "linde-params.hf5"
WMAP5PIVOT = np.array([5.25e-60])
lindefx = {"vars":["mass","lambda"], "values": [np.linspace(4.9e-8,6e-8), np.linspace(5e-14, 1.6e-13)],
"pivotk":WMAP5PIVOT, "pot": "linde",
"ystart": np.array([25.0,
-1.0,
0.0,
1.0,
0,
1.0,
0])}
def run_and_save_model(sf=None, fx=None, sd=None):
"""Run linde model and save results."""
if sf is None:
sf = savefile
if sd is None:
sd = savedir
helpers.ensurepath(sd)
if fx is None:
fx = lindefx
results = findparams.param_vs_spectrum(fx)
try:
rfile = tables.openFile(sd + sf, "w")
rfile.createArray(rfile.root, "params_results", results, "Model parameter search results")
print "Results saved in %s" % str(sd + sf)
finally:
rfile.close()
if __name__ == "__main__":
run_linde_model() |
705b93f7fd688c4889562a9950c220db23ffa98a | tomso/__init__.py | tomso/__init__.py | __version__ = "0.0.12"
__all__ = [
'adipls',
'utils',
'constants',
'gyre',
'fgong',
'mesa',
'stars'
]
| __version__ = "0.0.12"
__all__ = [
'adipls',
'constants',
'fgong',
'gyre',
'mesa',
'stars',
'utils'
]
| Put modules in alphabetical order | Put modules in alphabetical order
| Python | mit | warrickball/tomso | __version__ = "0.0.12"
__all__ = [
'adipls',
'utils',
'constants',
'gyre',
'fgong',
'mesa',
'stars'
]
Put modules in alphabetical order | __version__ = "0.0.12"
__all__ = [
'adipls',
'constants',
'fgong',
'gyre',
'mesa',
'stars',
'utils'
]
| <commit_before>__version__ = "0.0.12"
__all__ = [
'adipls',
'utils',
'constants',
'gyre',
'fgong',
'mesa',
'stars'
]
<commit_msg>Put modules in alphabetical order<commit_after> | __version__ = "0.0.12"
__all__ = [
'adipls',
'constants',
'fgong',
'gyre',
'mesa',
'stars',
'utils'
]
| __version__ = "0.0.12"
__all__ = [
'adipls',
'utils',
'constants',
'gyre',
'fgong',
'mesa',
'stars'
]
Put modules in alphabetical order__version__ = "0.0.12"
__all__ = [
'adipls',
'constants',
'fgong',
'gyre',
'mesa',
'stars',
'utils'
]
| <commit_before>__version__ = "0.0.12"
__all__ = [
'adipls',
'utils',
'constants',
'gyre',
'fgong',
'mesa',
'stars'
]
<commit_msg>Put modules in alphabetical order<commit_after>__version__ = "0.0.12"
__all__ = [
'adipls',
'constants',
'fgong',
'gyre',
'mesa',
'stars',
'utils'
]
|
594f6b189f496ec01c8c6742cde8639689dd50cc | tomso/__init__.py | tomso/__init__.py | __version__ = "0.0.11a"
__all__ = [
'adipls',
'common',
'gyre',
'io',
'mesa',
'stars'
]
| __version__ = "0.0.11a"
__all__ = [
'adipls',
'common',
'gyre',
'fgong',
'mesa',
'stars'
]
| Fix list of submodules so that from tomso import * works | Fix list of submodules so that from tomso import * works
| Python | mit | warrickball/tomso | __version__ = "0.0.11a"
__all__ = [
'adipls',
'common',
'gyre',
'io',
'mesa',
'stars'
]
Fix list of submodules so that from tomso import * works | __version__ = "0.0.11a"
__all__ = [
'adipls',
'common',
'gyre',
'fgong',
'mesa',
'stars'
]
| <commit_before>__version__ = "0.0.11a"
__all__ = [
'adipls',
'common',
'gyre',
'io',
'mesa',
'stars'
]
<commit_msg>Fix list of submodules so that from tomso import * works<commit_after> | __version__ = "0.0.11a"
__all__ = [
'adipls',
'common',
'gyre',
'fgong',
'mesa',
'stars'
]
| __version__ = "0.0.11a"
__all__ = [
'adipls',
'common',
'gyre',
'io',
'mesa',
'stars'
]
Fix list of submodules so that from tomso import * works__version__ = "0.0.11a"
__all__ = [
'adipls',
'common',
'gyre',
'fgong',
'mesa',
'stars'
]
| <commit_before>__version__ = "0.0.11a"
__all__ = [
'adipls',
'common',
'gyre',
'io',
'mesa',
'stars'
]
<commit_msg>Fix list of submodules so that from tomso import * works<commit_after>__version__ = "0.0.11a"
__all__ = [
'adipls',
'common',
'gyre',
'fgong',
'mesa',
'stars'
]
|
72455eeeab41810e49a182422c41a4c6e315055e | build.py | build.py | import os.path
from buildlib import *
CONFIGURATION = 'Debug'
project = Project(__file__, 'build')
project.version = '0.1.0'
project.versioninfo = 'alpha'
project.build_number = 0
project.configuration = 'Release'
project.start()
project.clean()
project.write_version('src/.version/VersionInfo.cs')
project.msbuild('src/Dolstagis.Web.sln', 'Clean', 'Build', Platform='Any CPU')
project.nunit('src/Dolstagis.Tests/Dolstagis.Tests.nunit')
project.make_nugets(
'Dolstagis.Web',
'Dolstagis.Web.Aspnet',
# 'Dolstagis.Web.Owin',
'Dolstagis.Web.Views.Nustache'
)
| import os.path
from buildlib import *
project = Project(__file__, 'build')
project.version = '0.0.0'
project.versioninfo = 'alpha'
project.build_number = 0
project.configuration = 'Debug'
project.start()
project.clean()
project.write_version('src/.version/VersionInfo.cs')
project.msbuild('src/Dolstagis.Web.sln', 'Clean', 'Build', Platform='Any CPU')
project.nunit('src/Dolstagis.Tests/Dolstagis.Tests.nunit')
project.make_nugets(
'Dolstagis.Web',
'Dolstagis.Web.Aspnet',
# 'Dolstagis.Web.Owin',
'Dolstagis.Web.Views.Nustache'
)
| Build as 0.0.0-debug for now. | Build as 0.0.0-debug for now.
| Python | mit | jammycakes/dolstagis.web,jammycakes/dolstagis.web,jammycakes/dolstagis.web | import os.path
from buildlib import *
CONFIGURATION = 'Debug'
project = Project(__file__, 'build')
project.version = '0.1.0'
project.versioninfo = 'alpha'
project.build_number = 0
project.configuration = 'Release'
project.start()
project.clean()
project.write_version('src/.version/VersionInfo.cs')
project.msbuild('src/Dolstagis.Web.sln', 'Clean', 'Build', Platform='Any CPU')
project.nunit('src/Dolstagis.Tests/Dolstagis.Tests.nunit')
project.make_nugets(
'Dolstagis.Web',
'Dolstagis.Web.Aspnet',
# 'Dolstagis.Web.Owin',
'Dolstagis.Web.Views.Nustache'
)
Build as 0.0.0-debug for now. | import os.path
from buildlib import *
project = Project(__file__, 'build')
project.version = '0.0.0'
project.versioninfo = 'alpha'
project.build_number = 0
project.configuration = 'Debug'
project.start()
project.clean()
project.write_version('src/.version/VersionInfo.cs')
project.msbuild('src/Dolstagis.Web.sln', 'Clean', 'Build', Platform='Any CPU')
project.nunit('src/Dolstagis.Tests/Dolstagis.Tests.nunit')
project.make_nugets(
'Dolstagis.Web',
'Dolstagis.Web.Aspnet',
# 'Dolstagis.Web.Owin',
'Dolstagis.Web.Views.Nustache'
)
| <commit_before>import os.path
from buildlib import *
CONFIGURATION = 'Debug'
project = Project(__file__, 'build')
project.version = '0.1.0'
project.versioninfo = 'alpha'
project.build_number = 0
project.configuration = 'Release'
project.start()
project.clean()
project.write_version('src/.version/VersionInfo.cs')
project.msbuild('src/Dolstagis.Web.sln', 'Clean', 'Build', Platform='Any CPU')
project.nunit('src/Dolstagis.Tests/Dolstagis.Tests.nunit')
project.make_nugets(
'Dolstagis.Web',
'Dolstagis.Web.Aspnet',
# 'Dolstagis.Web.Owin',
'Dolstagis.Web.Views.Nustache'
)
<commit_msg>Build as 0.0.0-debug for now.<commit_after> | import os.path
from buildlib import *
project = Project(__file__, 'build')
project.version = '0.0.0'
project.versioninfo = 'alpha'
project.build_number = 0
project.configuration = 'Debug'
project.start()
project.clean()
project.write_version('src/.version/VersionInfo.cs')
project.msbuild('src/Dolstagis.Web.sln', 'Clean', 'Build', Platform='Any CPU')
project.nunit('src/Dolstagis.Tests/Dolstagis.Tests.nunit')
project.make_nugets(
'Dolstagis.Web',
'Dolstagis.Web.Aspnet',
# 'Dolstagis.Web.Owin',
'Dolstagis.Web.Views.Nustache'
)
| import os.path
from buildlib import *
CONFIGURATION = 'Debug'
project = Project(__file__, 'build')
project.version = '0.1.0'
project.versioninfo = 'alpha'
project.build_number = 0
project.configuration = 'Release'
project.start()
project.clean()
project.write_version('src/.version/VersionInfo.cs')
project.msbuild('src/Dolstagis.Web.sln', 'Clean', 'Build', Platform='Any CPU')
project.nunit('src/Dolstagis.Tests/Dolstagis.Tests.nunit')
project.make_nugets(
'Dolstagis.Web',
'Dolstagis.Web.Aspnet',
# 'Dolstagis.Web.Owin',
'Dolstagis.Web.Views.Nustache'
)
Build as 0.0.0-debug for now.import os.path
from buildlib import *
project = Project(__file__, 'build')
project.version = '0.0.0'
project.versioninfo = 'alpha'
project.build_number = 0
project.configuration = 'Debug'
project.start()
project.clean()
project.write_version('src/.version/VersionInfo.cs')
project.msbuild('src/Dolstagis.Web.sln', 'Clean', 'Build', Platform='Any CPU')
project.nunit('src/Dolstagis.Tests/Dolstagis.Tests.nunit')
project.make_nugets(
'Dolstagis.Web',
'Dolstagis.Web.Aspnet',
# 'Dolstagis.Web.Owin',
'Dolstagis.Web.Views.Nustache'
)
| <commit_before>import os.path
from buildlib import *
CONFIGURATION = 'Debug'
project = Project(__file__, 'build')
project.version = '0.1.0'
project.versioninfo = 'alpha'
project.build_number = 0
project.configuration = 'Release'
project.start()
project.clean()
project.write_version('src/.version/VersionInfo.cs')
project.msbuild('src/Dolstagis.Web.sln', 'Clean', 'Build', Platform='Any CPU')
project.nunit('src/Dolstagis.Tests/Dolstagis.Tests.nunit')
project.make_nugets(
'Dolstagis.Web',
'Dolstagis.Web.Aspnet',
# 'Dolstagis.Web.Owin',
'Dolstagis.Web.Views.Nustache'
)
<commit_msg>Build as 0.0.0-debug for now.<commit_after>import os.path
from buildlib import *
project = Project(__file__, 'build')
project.version = '0.0.0'
project.versioninfo = 'alpha'
project.build_number = 0
project.configuration = 'Debug'
project.start()
project.clean()
project.write_version('src/.version/VersionInfo.cs')
project.msbuild('src/Dolstagis.Web.sln', 'Clean', 'Build', Platform='Any CPU')
project.nunit('src/Dolstagis.Tests/Dolstagis.Tests.nunit')
project.make_nugets(
'Dolstagis.Web',
'Dolstagis.Web.Aspnet',
# 'Dolstagis.Web.Owin',
'Dolstagis.Web.Views.Nustache'
)
|
8445c6bc549285fea5313a72c0500e2240460332 | avalonstar/apps/subscribers/admin.py | avalonstar/apps/subscribers/admin.py | # -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Ticket
class TicketAdmin(admin.ModelAdmin):
list_display = ['name', 'display_name', 'updated', 'is_active', 'is_paid', 'twid']
list_editable = ['is_active', 'is_paid']
ordering = ['-updated']
admin.site.register(Ticket, TicketAdmin)
| # -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Ticket
class TicketAdmin(admin.ModelAdmin):
list_display = ['name', 'display_name', 'created', 'updated', 'is_active', 'is_paid', 'twid']
list_editable = ['created', 'updated', 'is_active', 'is_paid']
ordering = ['-updated']
admin.site.register(Ticket, TicketAdmin)
| Add created to list_display; make it editable. | Add created to list_display; make it editable.
| Python | apache-2.0 | bryanveloso/avalonstar-tv,bryanveloso/avalonstar-tv,bryanveloso/avalonstar-tv | # -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Ticket
class TicketAdmin(admin.ModelAdmin):
list_display = ['name', 'display_name', 'updated', 'is_active', 'is_paid', 'twid']
list_editable = ['is_active', 'is_paid']
ordering = ['-updated']
admin.site.register(Ticket, TicketAdmin)
Add created to list_display; make it editable. | # -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Ticket
class TicketAdmin(admin.ModelAdmin):
list_display = ['name', 'display_name', 'created', 'updated', 'is_active', 'is_paid', 'twid']
list_editable = ['created', 'updated', 'is_active', 'is_paid']
ordering = ['-updated']
admin.site.register(Ticket, TicketAdmin)
| <commit_before># -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Ticket
class TicketAdmin(admin.ModelAdmin):
list_display = ['name', 'display_name', 'updated', 'is_active', 'is_paid', 'twid']
list_editable = ['is_active', 'is_paid']
ordering = ['-updated']
admin.site.register(Ticket, TicketAdmin)
<commit_msg>Add created to list_display; make it editable.<commit_after> | # -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Ticket
class TicketAdmin(admin.ModelAdmin):
list_display = ['name', 'display_name', 'created', 'updated', 'is_active', 'is_paid', 'twid']
list_editable = ['created', 'updated', 'is_active', 'is_paid']
ordering = ['-updated']
admin.site.register(Ticket, TicketAdmin)
| # -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Ticket
class TicketAdmin(admin.ModelAdmin):
list_display = ['name', 'display_name', 'updated', 'is_active', 'is_paid', 'twid']
list_editable = ['is_active', 'is_paid']
ordering = ['-updated']
admin.site.register(Ticket, TicketAdmin)
Add created to list_display; make it editable.# -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Ticket
class TicketAdmin(admin.ModelAdmin):
list_display = ['name', 'display_name', 'created', 'updated', 'is_active', 'is_paid', 'twid']
list_editable = ['created', 'updated', 'is_active', 'is_paid']
ordering = ['-updated']
admin.site.register(Ticket, TicketAdmin)
| <commit_before># -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Ticket
class TicketAdmin(admin.ModelAdmin):
list_display = ['name', 'display_name', 'updated', 'is_active', 'is_paid', 'twid']
list_editable = ['is_active', 'is_paid']
ordering = ['-updated']
admin.site.register(Ticket, TicketAdmin)
<commit_msg>Add created to list_display; make it editable.<commit_after># -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Ticket
class TicketAdmin(admin.ModelAdmin):
list_display = ['name', 'display_name', 'created', 'updated', 'is_active', 'is_paid', 'twid']
list_editable = ['created', 'updated', 'is_active', 'is_paid']
ordering = ['-updated']
admin.site.register(Ticket, TicketAdmin)
|
5af67c1bf24768e5d2fe573198866af7d3e766de | lms/djangoapps/heartbeat/views.py | lms/djangoapps/heartbeat/views.py | import json
from datetime import datetime
from django.http import HttpResponse
from xmodule.modulestore.django import modulestore
def heartbeat(request):
"""
Simple view that a loadbalancer can check to verify that the app is up
"""
output = {
'date': datetime.now().isoformat(),
'courses': [course.location for course in modulestore().get_courses()],
}
return HttpResponse(json.dumps(output, indent=4))
| import json
from datetime import datetime
from django.http import HttpResponse
from xmodule.modulestore.django import modulestore
def heartbeat(request):
"""
Simple view that a loadbalancer can check to verify that the app is up
"""
output = {
'date': datetime.now().isoformat(),
'courses': [course.location.url() for course in modulestore().get_courses()],
}
return HttpResponse(json.dumps(output, indent=4))
| Put course location urls in heartbeat page, rather than lists | Put course location urls in heartbeat page, rather than lists
| Python | agpl-3.0 | edx-solutions/edx-platform,beni55/edx-platform,arifsetiawan/edx-platform,lduarte1991/edx-platform,dsajkl/123,J861449197/edx-platform,ampax/edx-platform-backup,motion2015/edx-platform,alexthered/kienhoc-platform,cyanna/edx-platform,ZLLab-Mooc/edx-platform,motion2015/a3,amir-qayyum-khan/edx-platform,wwj718/edx-platform,jruiperezv/ANALYSE,hkawasaki/kawasaki-aio8-0,eemirtekin/edx-platform,deepsrijit1105/edx-platform,pabloborrego93/edx-platform,xinjiguaike/edx-platform,Edraak/edx-platform,louyihua/edx-platform,jazkarta/edx-platform,morpheby/levelup-by,MakeHer/edx-platform,etzhou/edx-platform,xuxiao19910803/edx-platform,chand3040/cloud_that,y12uc231/edx-platform,edry/edx-platform,LICEF/edx-platform,DNFcode/edx-platform,mtlchun/edx,zubair-arbi/edx-platform,Unow/edx-platform,raccoongang/edx-platform,kamalx/edx-platform,andyzsf/edx,mushtaqak/edx-platform,defance/edx-platform,nttks/jenkins-test,dsajkl/reqiop,abdoosh00/edx-rtl-final,rationalAgent/edx-platform-custom,auferack08/edx-platform,pdehaye/theming-edx-platform,nttks/edx-platform,shabab12/edx-platform,inares/edx-platform,ahmadio/edx-platform,wwj718/ANALYSE,hkawasaki/kawasaki-aio8-0,bigdatauniversity/edx-platform,shurihell/testasia,UOMx/edx-platform,unicri/edx-platform,ovnicraft/edx-platform,kalebhartje/schoolboost,jelugbo/tundex,cselis86/edx-platform,DNFcode/edx-platform,mjg2203/edx-platform-seas,shashank971/edx-platform,hamzehd/edx-platform,JCBarahona/edX,synergeticsedx/deployment-wipro,ahmadiga/min_edx,mjirayu/sit_academy,miptliot/edx-platform,SivilTaram/edx-platform,polimediaupv/edx-platform,B-MOOC/edx-platform,WatanabeYasumasa/edx-platform,IITBinterns13/edx-platform-dev,edry/edx-platform,apigee/edx-platform,naresh21/synergetics-edx-platform,amir-qayyum-khan/edx-platform,romain-li/edx-platform,franosincic/edx-platform,mcgachey/edx-platform,mcgachey/edx-platform,RPI-OPENEDX/edx-platform,vasyarv/edx-platform,UOMx/edx-platform,vikas1885/test1,kursitet/edx-platform,shabab12/edx-platform,Ayub-Khan/edx-platform,sudheerchintala/LearnEraPlatForm,appliedx/edx-platform,Shrhawk/edx-platform,itsjeyd/edx-platform,mushtaqak/edx-platform,shubhdev/edx-platform,ahmadio/edx-platform,gsehub/edx-platform,polimediaupv/edx-platform,alu042/edx-platform,shubhdev/openedx,vismartltd/edx-platform,nikolas/edx-platform,jbassen/edx-platform,kursitet/edx-platform,unicri/edx-platform,stvstnfrd/edx-platform,Edraak/circleci-edx-platform,chrisndodge/edx-platform,jamiefolsom/edx-platform,pepeportela/edx-platform,kmoocdev2/edx-platform,xingyepei/edx-platform,unicri/edx-platform,deepsrijit1105/edx-platform,fintech-circle/edx-platform,mtlchun/edx,jswope00/griffinx,angelapper/edx-platform,jbzdak/edx-platform,LearnEra/LearnEraPlaftform,nanolearning/edx-platform,jbzdak/edx-platform,chrisndodge/edx-platform,etzhou/edx-platform,JioEducation/edx-platform,pdehaye/theming-edx-platform,4eek/edx-platform,MSOpenTech/edx-platform,openfun/edx-platform,Kalyzee/edx-platform,a-parhom/edx-platform,jolyonb/edx-platform,iivic/BoiseStateX,openfun/edx-platform,Lektorium-LLC/edx-platform,philanthropy-u/edx-platform,J861449197/edx-platform,devs1991/test_edx_docmode,cecep-edu/edx-platform,franosincic/edx-platform,ahmadio/edx-platform,nagyistoce/edx-platform,pabloborrego93/edx-platform,xuxiao19910803/edx,kamalx/edx-platform,Semi-global/edx-platform,philanthropy-u/edx-platform,atsolakid/edx-platform,pepeportela/edx-platform,abdoosh00/edx-rtl-final,IONISx/edx-platform,shabab12/edx-platform,procangroup/edx-platform,rue89-tech/edx-platform,cpennington/edx-platform,a-parhom/edx-platform,Unow/edx-platform,jamiefolsom/edx-platform,edx-solutions/edx-platform,longmen21/edx-platform,kalebhartje/schoolboost,devs1991/test_edx_docmode,don-github/edx-platform,MakeHer/edx-platform,alexthered/kienhoc-platform,EduPepperPD/pepper2013,kxliugang/edx-platform,SivilTaram/edx-platform,DefyVentures/edx-platform,vismartltd/edx-platform,4eek/edx-platform,morpheby/levelup-by,TeachAtTUM/edx-platform,don-github/edx-platform,ahmadiga/min_edx,antonve/s4-project-mooc,LearnEra/LearnEraPlaftform,jelugbo/tundex,wwj718/edx-platform,jolyonb/edx-platform,xuxiao19910803/edx,nanolearningllc/edx-platform-cypress,beacloudgenius/edx-platform,xingyepei/edx-platform,longmen21/edx-platform,shubhdev/openedx,analyseuc3m/ANALYSE-v1,zofuthan/edx-platform,ahmedaljazzar/edx-platform,adoosii/edx-platform,Softmotions/edx-platform,wwj718/edx-platform,chand3040/cloud_that,pelikanchik/edx-platform,torchingloom/edx-platform,Kalyzee/edx-platform,jswope00/GAI,Livit/Livit.Learn.EdX,gymnasium/edx-platform,jjmiranda/edx-platform,shubhdev/edx-platform,beacloudgenius/edx-platform,rhndg/openedx,louyihua/edx-platform,devs1991/test_edx_docmode,zhenzhai/edx-platform,nttks/edx-platform,synergeticsedx/deployment-wipro,praveen-pal/edx-platform,fly19890211/edx-platform,beni55/edx-platform,deepsrijit1105/edx-platform,Kalyzee/edx-platform,amir-qayyum-khan/edx-platform,mbareta/edx-platform-ft,playm2mboy/edx-platform,xuxiao19910803/edx-platform,arifsetiawan/edx-platform,zubair-arbi/edx-platform,nanolearningllc/edx-platform-cypress-2,devs1991/test_edx_docmode,rhndg/openedx,mahendra-r/edx-platform,shubhdev/openedx,mitocw/edx-platform,EDUlib/edx-platform,itsjeyd/edx-platform,jelugbo/tundex,andyzsf/edx,Lektorium-LLC/edx-platform,jolyonb/edx-platform,jswope00/griffinx,syjeon/new_edx,nagyistoce/edx-platform,simbs/edx-platform,naresh21/synergetics-edx-platform,mcgachey/edx-platform,ampax/edx-platform,ahmadiga/min_edx,fintech-circle/edx-platform,shubhdev/edx-platform,eduNEXT/edx-platform,Stanford-Online/edx-platform,iivic/BoiseStateX,bitifirefly/edx-platform,ESOedX/edx-platform,abdoosh00/edraak,Livit/Livit.Learn.EdX,bdero/edx-platform,adoosii/edx-platform,praveen-pal/edx-platform,mushtaqak/edx-platform,hamzehd/edx-platform,jbzdak/edx-platform,abdoosh00/edx-rtl-final,polimediaupv/edx-platform,mushtaqak/edx-platform,CourseTalk/edx-platform,chauhanhardik/populo_2,shubhdev/edxOnBaadal,zerobatu/edx-platform,ak2703/edx-platform,caesar2164/edx-platform,yokose-ks/edx-platform,mbareta/edx-platform-ft,fintech-circle/edx-platform,praveen-pal/edx-platform,chand3040/cloud_that,ubc/edx-platform,nikolas/edx-platform,jazkarta/edx-platform-for-isc,ferabra/edx-platform,miptliot/edx-platform,nanolearningllc/edx-platform-cypress,kmoocdev2/edx-platform,ampax/edx-platform,carsongee/edx-platform,dkarakats/edx-platform,shubhdev/openedx,cecep-edu/edx-platform,mitocw/edx-platform,cognitiveclass/edx-platform,JCBarahona/edX,martynovp/edx-platform,appliedx/edx-platform,dkarakats/edx-platform,Edraak/edraak-platform,jruiperezv/ANALYSE,zhenzhai/edx-platform,romain-li/edx-platform,doismellburning/edx-platform,bigdatauniversity/edx-platform,Edraak/circleci-edx-platform,peterm-itr/edx-platform,bigdatauniversity/edx-platform,kamalx/edx-platform,martynovp/edx-platform,jazkarta/edx-platform,eestay/edx-platform,alu042/edx-platform,beacloudgenius/edx-platform,simbs/edx-platform,halvertoluke/edx-platform,LICEF/edx-platform,JioEducation/edx-platform,wwj718/edx-platform,Stanford-Online/edx-platform,analyseuc3m/ANALYSE-v1,franosincic/edx-platform,LICEF/edx-platform,peterm-itr/edx-platform,ampax/edx-platform,EduPepperPDTesting/pepper2013-testing,Ayub-Khan/edx-platform,gymnasium/edx-platform,vasyarv/edx-platform,hkawasaki/kawasaki-aio8-2,raccoongang/edx-platform,jswope00/GAI,nanolearningllc/edx-platform-cypress,solashirai/edx-platform,rismalrv/edx-platform,tiagochiavericosta/edx-platform,vismartltd/edx-platform,eemirtekin/edx-platform,nanolearningllc/edx-platform-cypress,teltek/edx-platform,philanthropy-u/edx-platform,DNFcode/edx-platform,Endika/edx-platform,UOMx/edx-platform,arbrandes/edx-platform,hkawasaki/kawasaki-aio8-1,nikolas/edx-platform,playm2mboy/edx-platform,chauhanhardik/populo,mjirayu/sit_academy,naresh21/synergetics-edx-platform,pelikanchik/edx-platform,vikas1885/test1,SravanthiSinha/edx-platform,tiagochiavericosta/edx-platform,jamesblunt/edx-platform,beacloudgenius/edx-platform,dcosentino/edx-platform,jruiperezv/ANALYSE,defance/edx-platform,kxliugang/edx-platform,dcosentino/edx-platform,atsolakid/edx-platform,Shrhawk/edx-platform,knehez/edx-platform,don-github/edx-platform,SravanthiSinha/edx-platform,jazkarta/edx-platform-for-isc,dsajkl/reqiop,MakeHer/edx-platform,CredoReference/edx-platform,nttks/jenkins-test,ZLLab-Mooc/edx-platform,don-github/edx-platform,appsembler/edx-platform,pku9104038/edx-platform,cpennington/edx-platform,rationalAgent/edx-platform-custom,nanolearningllc/edx-platform-cypress-2,jamesblunt/edx-platform,kmoocdev2/edx-platform,jjmiranda/edx-platform,EduPepperPD/pepper2013,jzoldak/edx-platform,defance/edx-platform,bitifirefly/edx-platform,EDUlib/edx-platform,halvertoluke/edx-platform,longmen21/edx-platform,tiagochiavericosta/edx-platform,playm2mboy/edx-platform,cecep-edu/edx-platform,doganov/edx-platform,synergeticsedx/deployment-wipro,prarthitm/edxplatform,kalebhartje/schoolboost,tanmaykm/edx-platform,raccoongang/edx-platform,etzhou/edx-platform,Edraak/edraak-platform,mahendra-r/edx-platform,motion2015/edx-platform,Softmotions/edx-platform,auferack08/edx-platform,pelikanchik/edx-platform,ampax/edx-platform-backup,SivilTaram/edx-platform,dsajkl/123,ubc/edx-platform,rue89-tech/edx-platform,antonve/s4-project-mooc,ahmadio/edx-platform,Lektorium-LLC/edx-platform,chudaol/edx-platform,gsehub/edx-platform,marcore/edx-platform,andyzsf/edx,bdero/edx-platform,jbassen/edx-platform,mahendra-r/edx-platform,appsembler/edx-platform,jonathan-beard/edx-platform,RPI-OPENEDX/edx-platform,EduPepperPDTesting/pepper2013-testing,hkawasaki/kawasaki-aio8-1,hmcmooc/muddx-platform,zerobatu/edx-platform,nanolearning/edx-platform,IndonesiaX/edx-platform,AkA84/edx-platform,proversity-org/edx-platform,romain-li/edx-platform,pku9104038/edx-platform,hkawasaki/kawasaki-aio8-2,nagyistoce/edx-platform,rhndg/openedx,mbareta/edx-platform-ft,leansoft/edx-platform,shubhdev/edx-platform,jazztpt/edx-platform,BehavioralInsightsTeam/edx-platform,etzhou/edx-platform,wwj718/ANALYSE,shubhdev/edxOnBaadal,caesar2164/edx-platform,nanolearningllc/edx-platform-cypress,synergeticsedx/deployment-wipro,MSOpenTech/edx-platform,xingyepei/edx-platform,mtlchun/edx,pku9104038/edx-platform,mjirayu/sit_academy,J861449197/edx-platform,carsongee/edx-platform,fly19890211/edx-platform,EDUlib/edx-platform,waheedahmed/edx-platform,andyzsf/edx,LICEF/edx-platform,jzoldak/edx-platform,4eek/edx-platform,openfun/edx-platform,nagyistoce/edx-platform,syjeon/new_edx,ak2703/edx-platform,rationalAgent/edx-platform-custom,WatanabeYasumasa/edx-platform,PepperPD/edx-pepper-platform,sudheerchintala/LearnEraPlatForm,MSOpenTech/edx-platform,msegado/edx-platform,utecuy/edx-platform,don-github/edx-platform,cselis86/edx-platform,morenopc/edx-platform,JioEducation/edx-platform,motion2015/edx-platform,sudheerchintala/LearnEraPlatForm,hkawasaki/kawasaki-aio8-0,eestay/edx-platform,EduPepperPDTesting/pepper2013-testing,doismellburning/edx-platform,edx/edx-platform,nttks/jenkins-test,Stanford-Online/edx-platform,vikas1885/test1,cselis86/edx-platform,jruiperezv/ANALYSE,msegado/edx-platform,a-parhom/edx-platform,hamzehd/edx-platform,zhenzhai/edx-platform,nanolearning/edx-platform,halvertoluke/edx-platform,IONISx/edx-platform,ak2703/edx-platform,rismalrv/edx-platform,edx-solutions/edx-platform,pelikanchik/edx-platform,jazkarta/edx-platform,chauhanhardik/populo_2,Edraak/edx-platform,beni55/edx-platform,shashank971/edx-platform,mjirayu/sit_academy,shurihell/testasia,gymnasium/edx-platform,CourseTalk/edx-platform,vasyarv/edx-platform,lduarte1991/edx-platform,cyanna/edx-platform,tiagochiavericosta/edx-platform,jzoldak/edx-platform,Stanford-Online/edx-platform,ZLLab-Mooc/edx-platform,tanmaykm/edx-platform,hmcmooc/muddx-platform,valtech-mooc/edx-platform,jamesblunt/edx-platform,pepeportela/edx-platform,kmoocdev2/edx-platform,zadgroup/edx-platform,prarthitm/edxplatform,jazkarta/edx-platform-for-isc,UXE/local-edx,eduNEXT/edx-platform,shubhdev/openedx,eduNEXT/edunext-platform,proversity-org/edx-platform,J861449197/edx-platform,leansoft/edx-platform,gsehub/edx-platform,xuxiao19910803/edx,CourseTalk/edx-platform,sameetb-cuelogic/edx-platform-test,jamiefolsom/edx-platform,vismartltd/edx-platform,rismalrv/edx-platform,eemirtekin/edx-platform,Edraak/edraak-platform,atsolakid/edx-platform,Kalyzee/edx-platform,nagyistoce/edx-platform,devs1991/test_edx_docmode,Edraak/circleci-edx-platform,motion2015/a3,antonve/s4-project-mooc,kmoocdev2/edx-platform,inares/edx-platform,simbs/edx-platform,JioEducation/edx-platform,procangroup/edx-platform,marcore/edx-platform,RPI-OPENEDX/edx-platform,xinjiguaike/edx-platform,dsajkl/reqiop,pku9104038/edx-platform,xuxiao19910803/edx-platform,eestay/edx-platform,sudheerchintala/LearnEraPlatForm,unicri/edx-platform,a-parhom/edx-platform,xuxiao19910803/edx-platform,EduPepperPD/pepper2013,DefyVentures/edx-platform,zhenzhai/edx-platform,alexthered/kienhoc-platform,olexiim/edx-platform,AkA84/edx-platform,xinjiguaike/edx-platform,jonathan-beard/edx-platform,mitocw/edx-platform,Softmotions/edx-platform,mushtaqak/edx-platform,romain-li/edx-platform,ahmedaljazzar/edx-platform,deepsrijit1105/edx-platform,valtech-mooc/edx-platform,kursitet/edx-platform,angelapper/edx-platform,zubair-arbi/edx-platform,benpatterson/edx-platform,vikas1885/test1,pomegranited/edx-platform,hkawasaki/kawasaki-aio8-2,RPI-OPENEDX/edx-platform,kamalx/edx-platform,IITBinterns13/edx-platform-dev,jamiefolsom/edx-platform,eemirtekin/edx-platform,zofuthan/edx-platform,simbs/edx-platform,MakeHer/edx-platform,olexiim/edx-platform,xinjiguaike/edx-platform,xingyepei/edx-platform,knehez/edx-platform,ferabra/edx-platform,Livit/Livit.Learn.EdX,morenopc/edx-platform,iivic/BoiseStateX,hkawasaki/kawasaki-aio8-1,mjg2203/edx-platform-seas,morenopc/edx-platform,shurihell/testasia,halvertoluke/edx-platform,hmcmooc/muddx-platform,rue89-tech/edx-platform,edx-solutions/edx-platform,knehez/edx-platform,apigee/edx-platform,nanolearning/edx-platform,antonve/s4-project-mooc,WatanabeYasumasa/edx-platform,hkawasaki/kawasaki-aio8-1,edx/edx-platform,shashank971/edx-platform,louyihua/edx-platform,nikolas/edx-platform,ahmadiga/min_edx,eduNEXT/edunext-platform,TeachAtTUM/edx-platform,itsjeyd/edx-platform,hastexo/edx-platform,martynovp/edx-platform,jzoldak/edx-platform,EduPepperPDTesting/pepper2013-testing,xinjiguaike/edx-platform,ampax/edx-platform,IndonesiaX/edx-platform,abdoosh00/edraak,antonve/s4-project-mooc,Edraak/edx-platform,polimediaupv/edx-platform,10clouds/edx-platform,yokose-ks/edx-platform,alu042/edx-platform,zadgroup/edx-platform,vismartltd/edx-platform,y12uc231/edx-platform,TeachAtTUM/edx-platform,zofuthan/edx-platform,PepperPD/edx-pepper-platform,playm2mboy/edx-platform,auferack08/edx-platform,ak2703/edx-platform,zerobatu/edx-platform,cselis86/edx-platform,chand3040/cloud_that,mbareta/edx-platform-ft,BehavioralInsightsTeam/edx-platform,SravanthiSinha/edx-platform,waheedahmed/edx-platform,EduPepperPD/pepper2013,Ayub-Khan/edx-platform,cpennington/edx-platform,iivic/BoiseStateX,10clouds/edx-platform,nttks/jenkins-test,fintech-circle/edx-platform,ESOedX/edx-platform,kmoocdev/edx-platform,Endika/edx-platform,UXE/local-edx,ZLLab-Mooc/edx-platform,Semi-global/edx-platform,philanthropy-u/edx-platform,polimediaupv/edx-platform,morenopc/edx-platform,antoviaque/edx-platform,antoviaque/edx-platform,nttks/edx-platform,sameetb-cuelogic/edx-platform-test,shubhdev/edxOnBaadal,prarthitm/edxplatform,leansoft/edx-platform,eduNEXT/edx-platform,doganov/edx-platform,eestay/edx-platform,Lektorium-LLC/edx-platform,apigee/edx-platform,leansoft/edx-platform,Endika/edx-platform,ESOedX/edx-platform,sameetb-cuelogic/edx-platform-test,ovnicraft/edx-platform,proversity-org/edx-platform,zadgroup/edx-platform,kxliugang/edx-platform,tanmaykm/edx-platform,Edraak/edraak-platform,adoosii/edx-platform,nttks/jenkins-test,OmarIthawi/edx-platform,appsembler/edx-platform,appsembler/edx-platform,lduarte1991/edx-platform,ahmadiga/min_edx,10clouds/edx-platform,jazztpt/edx-platform,jbassen/edx-platform,pabloborrego93/edx-platform,etzhou/edx-platform,mcgachey/edx-platform,UOMx/edx-platform,chudaol/edx-platform,teltek/edx-platform,kmoocdev/edx-platform,shabab12/edx-platform,EduPepperPDTesting/pepper2013-testing,IndonesiaX/edx-platform,Unow/edx-platform,stvstnfrd/edx-platform,bitifirefly/edx-platform,bigdatauniversity/edx-platform,rationalAgent/edx-platform-custom,simbs/edx-platform,rhndg/openedx,CredoReference/edx-platform,knehez/edx-platform,mahendra-r/edx-platform,B-MOOC/edx-platform,bdero/edx-platform,devs1991/test_edx_docmode,hkawasaki/kawasaki-aio8-2,msegado/edx-platform,procangroup/edx-platform,gsehub/edx-platform,Edraak/circleci-edx-platform,ferabra/edx-platform,waheedahmed/edx-platform,ferabra/edx-platform,inares/edx-platform,kalebhartje/schoolboost,mahendra-r/edx-platform,hamzehd/edx-platform,PepperPD/edx-pepper-platform,chauhanhardik/populo,rue89-tech/edx-platform,zofuthan/edx-platform,wwj718/edx-platform,mitocw/edx-platform,chudaol/edx-platform,JCBarahona/edX,Edraak/edx-platform,TsinghuaX/edx-platform,hastexo/edx-platform,caesar2164/edx-platform,raccoongang/edx-platform,AkA84/edx-platform,olexiim/edx-platform,Shrhawk/edx-platform,mjirayu/sit_academy,devs1991/test_edx_docmode,miptliot/edx-platform,yokose-ks/edx-platform,chudaol/edx-platform,pepeportela/edx-platform,msegado/edx-platform,valtech-mooc/edx-platform,mcgachey/edx-platform,MSOpenTech/edx-platform,peterm-itr/edx-platform,xingyepei/edx-platform,Semi-global/edx-platform,alexthered/kienhoc-platform,utecuy/edx-platform,vikas1885/test1,leansoft/edx-platform,kursitet/edx-platform,zadgroup/edx-platform,ESOedX/edx-platform,B-MOOC/edx-platform,jswope00/GAI,valtech-mooc/edx-platform,fly19890211/edx-platform,Shrhawk/edx-platform,kxliugang/edx-platform,TeachAtTUM/edx-platform,antoviaque/edx-platform,benpatterson/edx-platform,nanolearningllc/edx-platform-cypress-2,motion2015/a3,longmen21/edx-platform,lduarte1991/edx-platform,shubhdev/edxOnBaadal,auferack08/edx-platform,bigdatauniversity/edx-platform,y12uc231/edx-platform,chrisndodge/edx-platform,cselis86/edx-platform,chrisndodge/edx-platform,rismalrv/edx-platform,ampax/edx-platform-backup,alexthered/kienhoc-platform,eduNEXT/edunext-platform,cognitiveclass/edx-platform,benpatterson/edx-platform,SravanthiSinha/edx-platform,pdehaye/theming-edx-platform,yokose-ks/edx-platform,jazztpt/edx-platform,LearnEra/LearnEraPlaftform,J861449197/edx-platform,jbzdak/edx-platform,carsongee/edx-platform,adoosii/edx-platform,Semi-global/edx-platform,waheedahmed/edx-platform,jolyonb/edx-platform,valtech-mooc/edx-platform,zubair-arbi/edx-platform,playm2mboy/edx-platform,pomegranited/edx-platform,AkA84/edx-platform,inares/edx-platform,antoviaque/edx-platform,Ayub-Khan/edx-platform,rismalrv/edx-platform,franosincic/edx-platform,eduNEXT/edunext-platform,hastexo/edx-platform,jelugbo/tundex,DefyVentures/edx-platform,shurihell/testasia,OmarIthawi/edx-platform,tiagochiavericosta/edx-platform,kalebhartje/schoolboost,jazztpt/edx-platform,morpheby/levelup-by,zerobatu/edx-platform,marcore/edx-platform,motion2015/a3,jamiefolsom/edx-platform,abdoosh00/edraak,knehez/edx-platform,hastexo/edx-platform,abdoosh00/edraak,chauhanhardik/populo,atsolakid/edx-platform,abdoosh00/edx-rtl-final,ak2703/edx-platform,Shrhawk/edx-platform,arbrandes/edx-platform,solashirai/edx-platform,chauhanhardik/populo,sameetb-cuelogic/edx-platform-test,appliedx/edx-platform,LearnEra/LearnEraPlaftform,solashirai/edx-platform,romain-li/edx-platform,apigee/edx-platform,IONISx/edx-platform,angelapper/edx-platform,tanmaykm/edx-platform,B-MOOC/edx-platform,angelapper/edx-platform,zubair-arbi/edx-platform,4eek/edx-platform,amir-qayyum-khan/edx-platform,OmarIthawi/edx-platform,franosincic/edx-platform,benpatterson/edx-platform,jbassen/edx-platform,fly19890211/edx-platform,UXE/local-edx,jbassen/edx-platform,chand3040/cloud_that,RPI-OPENEDX/edx-platform,edry/edx-platform,jswope00/griffinx,kmoocdev/edx-platform,ovnicraft/edx-platform,IITBinterns13/edx-platform-dev,IONISx/edx-platform,ovnicraft/edx-platform,DefyVentures/edx-platform,itsjeyd/edx-platform,hamzehd/edx-platform,mjg2203/edx-platform-seas,cognitiveclass/edx-platform,SravanthiSinha/edx-platform,4eek/edx-platform,motion2015/edx-platform,beni55/edx-platform,morpheby/levelup-by,wwj718/ANALYSE,martynovp/edx-platform,pdehaye/theming-edx-platform,praveen-pal/edx-platform,dsajkl/123,unicri/edx-platform,caesar2164/edx-platform,y12uc231/edx-platform,sameetb-cuelogic/edx-platform-test,CourseTalk/edx-platform,doganov/edx-platform,Softmotions/edx-platform,chauhanhardik/populo_2,chauhanhardik/populo_2,zadgroup/edx-platform,ubc/edx-platform,nanolearningllc/edx-platform-cypress-2,torchingloom/edx-platform,waheedahmed/edx-platform,defance/edx-platform,EDUlib/edx-platform,shubhdev/edxOnBaadal,y12uc231/edx-platform,cecep-edu/edx-platform,CredoReference/edx-platform,kursitet/edx-platform,IONISx/edx-platform,jonathan-beard/edx-platform,louyihua/edx-platform,bdero/edx-platform,pabloborrego93/edx-platform,edx/edx-platform,Ayub-Khan/edx-platform,iivic/BoiseStateX,olexiim/edx-platform,ampax/edx-platform-backup,fly19890211/edx-platform,doismellburning/edx-platform,proversity-org/edx-platform,cyanna/edx-platform,torchingloom/edx-platform,MakeHer/edx-platform,rationalAgent/edx-platform-custom,stvstnfrd/edx-platform,BehavioralInsightsTeam/edx-platform,chauhanhardik/populo_2,analyseuc3m/ANALYSE-v1,Edraak/circleci-edx-platform,jonathan-beard/edx-platform,PepperPD/edx-pepper-platform,IndonesiaX/edx-platform,SivilTaram/edx-platform,dsajkl/reqiop,B-MOOC/edx-platform,10clouds/edx-platform,cyanna/edx-platform,eduNEXT/edx-platform,cpennington/edx-platform,jswope00/griffinx,TsinghuaX/edx-platform,EduPepperPDTesting/pepper2013-testing,bitifirefly/edx-platform,arifsetiawan/edx-platform,jswope00/griffinx,AkA84/edx-platform,martynovp/edx-platform,zhenzhai/edx-platform,analyseuc3m/ANALYSE-v1,jonathan-beard/edx-platform,dkarakats/edx-platform,nanolearningllc/edx-platform-cypress-2,SivilTaram/edx-platform,arifsetiawan/edx-platform,jazztpt/edx-platform,chauhanhardik/populo,solashirai/edx-platform,IITBinterns13/edx-platform-dev,jamesblunt/edx-platform,inares/edx-platform,mjg2203/edx-platform-seas,dkarakats/edx-platform,Semi-global/edx-platform,alu042/edx-platform,nttks/edx-platform,arbrandes/edx-platform,mtlchun/edx,JCBarahona/edX,TsinghuaX/edx-platform,jswope00/GAI,eestay/edx-platform,adoosii/edx-platform,shubhdev/edx-platform,ZLLab-Mooc/edx-platform,CredoReference/edx-platform,syjeon/new_edx,jelugbo/tundex,cyanna/edx-platform,doismellburning/edx-platform,WatanabeYasumasa/edx-platform,hkawasaki/kawasaki-aio8-0,shashank971/edx-platform,PepperPD/edx-pepper-platform,dsajkl/123,prarthitm/edxplatform,Kalyzee/edx-platform,kmoocdev/edx-platform,halvertoluke/edx-platform,zerobatu/edx-platform,kmoocdev/edx-platform,teltek/edx-platform,peterm-itr/edx-platform,morenopc/edx-platform,beni55/edx-platform,jjmiranda/edx-platform,jjmiranda/edx-platform,stvstnfrd/edx-platform,jazkarta/edx-platform,wwj718/ANALYSE,cecep-edu/edx-platform,longmen21/edx-platform,chudaol/edx-platform,jazkarta/edx-platform-for-isc,dcosentino/edx-platform,benpatterson/edx-platform,msegado/edx-platform,rue89-tech/edx-platform,pomegranited/edx-platform,procangroup/edx-platform,syjeon/new_edx,zofuthan/edx-platform,dkarakats/edx-platform,kxliugang/edx-platform,DefyVentures/edx-platform,dcosentino/edx-platform,appliedx/edx-platform,BehavioralInsightsTeam/edx-platform,beacloudgenius/edx-platform,Livit/Livit.Learn.EdX,Unow/edx-platform,edx/edx-platform,utecuy/edx-platform,openfun/edx-platform,xuxiao19910803/edx,ahmedaljazzar/edx-platform,utecuy/edx-platform,kamalx/edx-platform,eemirtekin/edx-platform,torchingloom/edx-platform,EduPepperPD/pepper2013,openfun/edx-platform,ovnicraft/edx-platform,pomegranited/edx-platform,ferabra/edx-platform,edry/edx-platform,jamesblunt/edx-platform,ubc/edx-platform,DNFcode/edx-platform,mtlchun/edx,cognitiveclass/edx-platform,OmarIthawi/edx-platform,Endika/edx-platform,doganov/edx-platform,motion2015/a3,carsongee/edx-platform,appliedx/edx-platform,doismellburning/edx-platform,IndonesiaX/edx-platform,pomegranited/edx-platform,shashank971/edx-platform,cognitiveclass/edx-platform,xuxiao19910803/edx-platform,olexiim/edx-platform,utecuy/edx-platform,ahmedaljazzar/edx-platform,marcore/edx-platform,UXE/local-edx,jazkarta/edx-platform-for-isc,miptliot/edx-platform,nttks/edx-platform,naresh21/synergetics-edx-platform,gymnasium/edx-platform,hmcmooc/muddx-platform,teltek/edx-platform,vasyarv/edx-platform,dsajkl/123,jruiperezv/ANALYSE,ahmadio/edx-platform,jazkarta/edx-platform,doganov/edx-platform,solashirai/edx-platform,dcosentino/edx-platform,ampax/edx-platform-backup,wwj718/ANALYSE,jbzdak/edx-platform,TsinghuaX/edx-platform,bitifirefly/edx-platform,nikolas/edx-platform,atsolakid/edx-platform,Edraak/edx-platform,yokose-ks/edx-platform,JCBarahona/edX,motion2015/edx-platform,DNFcode/edx-platform,torchingloom/edx-platform,MSOpenTech/edx-platform,arbrandes/edx-platform,rhndg/openedx,Softmotions/edx-platform,LICEF/edx-platform,shurihell/testasia,devs1991/test_edx_docmode,edry/edx-platform,arifsetiawan/edx-platform,xuxiao19910803/edx,ubc/edx-platform,vasyarv/edx-platform,nanolearning/edx-platform | import json
from datetime import datetime
from django.http import HttpResponse
from xmodule.modulestore.django import modulestore
def heartbeat(request):
"""
Simple view that a loadbalancer can check to verify that the app is up
"""
output = {
'date': datetime.now().isoformat(),
'courses': [course.location for course in modulestore().get_courses()],
}
return HttpResponse(json.dumps(output, indent=4))
Put course location urls in heartbeat page, rather than lists | import json
from datetime import datetime
from django.http import HttpResponse
from xmodule.modulestore.django import modulestore
def heartbeat(request):
"""
Simple view that a loadbalancer can check to verify that the app is up
"""
output = {
'date': datetime.now().isoformat(),
'courses': [course.location.url() for course in modulestore().get_courses()],
}
return HttpResponse(json.dumps(output, indent=4))
| <commit_before>import json
from datetime import datetime
from django.http import HttpResponse
from xmodule.modulestore.django import modulestore
def heartbeat(request):
"""
Simple view that a loadbalancer can check to verify that the app is up
"""
output = {
'date': datetime.now().isoformat(),
'courses': [course.location for course in modulestore().get_courses()],
}
return HttpResponse(json.dumps(output, indent=4))
<commit_msg>Put course location urls in heartbeat page, rather than lists<commit_after> | import json
from datetime import datetime
from django.http import HttpResponse
from xmodule.modulestore.django import modulestore
def heartbeat(request):
"""
Simple view that a loadbalancer can check to verify that the app is up
"""
output = {
'date': datetime.now().isoformat(),
'courses': [course.location.url() for course in modulestore().get_courses()],
}
return HttpResponse(json.dumps(output, indent=4))
| import json
from datetime import datetime
from django.http import HttpResponse
from xmodule.modulestore.django import modulestore
def heartbeat(request):
"""
Simple view that a loadbalancer can check to verify that the app is up
"""
output = {
'date': datetime.now().isoformat(),
'courses': [course.location for course in modulestore().get_courses()],
}
return HttpResponse(json.dumps(output, indent=4))
Put course location urls in heartbeat page, rather than listsimport json
from datetime import datetime
from django.http import HttpResponse
from xmodule.modulestore.django import modulestore
def heartbeat(request):
"""
Simple view that a loadbalancer can check to verify that the app is up
"""
output = {
'date': datetime.now().isoformat(),
'courses': [course.location.url() for course in modulestore().get_courses()],
}
return HttpResponse(json.dumps(output, indent=4))
| <commit_before>import json
from datetime import datetime
from django.http import HttpResponse
from xmodule.modulestore.django import modulestore
def heartbeat(request):
"""
Simple view that a loadbalancer can check to verify that the app is up
"""
output = {
'date': datetime.now().isoformat(),
'courses': [course.location for course in modulestore().get_courses()],
}
return HttpResponse(json.dumps(output, indent=4))
<commit_msg>Put course location urls in heartbeat page, rather than lists<commit_after>import json
from datetime import datetime
from django.http import HttpResponse
from xmodule.modulestore.django import modulestore
def heartbeat(request):
"""
Simple view that a loadbalancer can check to verify that the app is up
"""
output = {
'date': datetime.now().isoformat(),
'courses': [course.location.url() for course in modulestore().get_courses()],
}
return HttpResponse(json.dumps(output, indent=4))
|
7ba76f2f4dd861e4acd34536e3484fc8ef001b3f | IPython/nbconvert/exporters/python.py | IPython/nbconvert/exporters/python.py | """Python script Exporter class"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from .templateexporter import TemplateExporter
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class PythonExporter(TemplateExporter):
"""
Exports a Python code file.
"""
def _file_extension_default(self):
return 'py'
def _template_file_default(self):
return 'python'
output_mimetype = 'text/x-python'
| """Python script Exporter class"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from .templateexporter import TemplateExporter
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class PythonExporter(TemplateExporter):
"""
Exports a Python code file.
"""
def _file_extension_default(self):
return '.py'
def _template_file_default(self):
return 'python'
output_mimetype = 'text/x-python'
| Add the . into file_extension | Add the . into file_extension
| Python | bsd-3-clause | cornhundred/ipywidgets,SylvainCorlay/ipywidgets,jupyter-widgets/ipywidgets,cornhundred/ipywidgets,jupyter-widgets/ipywidgets,ipython/ipywidgets,cornhundred/ipywidgets,ipython/ipywidgets,cornhundred/ipywidgets,cornhundred/ipywidgets,ipython/ipywidgets,SylvainCorlay/ipywidgets,SylvainCorlay/ipywidgets,jupyter-widgets/ipywidgets,SylvainCorlay/ipywidgets,ipython/ipywidgets,jupyter-widgets/ipywidgets,ipython/ipywidgets | """Python script Exporter class"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from .templateexporter import TemplateExporter
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class PythonExporter(TemplateExporter):
"""
Exports a Python code file.
"""
def _file_extension_default(self):
return 'py'
def _template_file_default(self):
return 'python'
output_mimetype = 'text/x-python'
Add the . into file_extension | """Python script Exporter class"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from .templateexporter import TemplateExporter
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class PythonExporter(TemplateExporter):
"""
Exports a Python code file.
"""
def _file_extension_default(self):
return '.py'
def _template_file_default(self):
return 'python'
output_mimetype = 'text/x-python'
| <commit_before>"""Python script Exporter class"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from .templateexporter import TemplateExporter
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class PythonExporter(TemplateExporter):
"""
Exports a Python code file.
"""
def _file_extension_default(self):
return 'py'
def _template_file_default(self):
return 'python'
output_mimetype = 'text/x-python'
<commit_msg>Add the . into file_extension<commit_after> | """Python script Exporter class"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from .templateexporter import TemplateExporter
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class PythonExporter(TemplateExporter):
"""
Exports a Python code file.
"""
def _file_extension_default(self):
return '.py'
def _template_file_default(self):
return 'python'
output_mimetype = 'text/x-python'
| """Python script Exporter class"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from .templateexporter import TemplateExporter
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class PythonExporter(TemplateExporter):
"""
Exports a Python code file.
"""
def _file_extension_default(self):
return 'py'
def _template_file_default(self):
return 'python'
output_mimetype = 'text/x-python'
Add the . into file_extension"""Python script Exporter class"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from .templateexporter import TemplateExporter
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class PythonExporter(TemplateExporter):
"""
Exports a Python code file.
"""
def _file_extension_default(self):
return '.py'
def _template_file_default(self):
return 'python'
output_mimetype = 'text/x-python'
| <commit_before>"""Python script Exporter class"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from .templateexporter import TemplateExporter
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class PythonExporter(TemplateExporter):
"""
Exports a Python code file.
"""
def _file_extension_default(self):
return 'py'
def _template_file_default(self):
return 'python'
output_mimetype = 'text/x-python'
<commit_msg>Add the . into file_extension<commit_after>"""Python script Exporter class"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from .templateexporter import TemplateExporter
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class PythonExporter(TemplateExporter):
"""
Exports a Python code file.
"""
def _file_extension_default(self):
return '.py'
def _template_file_default(self):
return 'python'
output_mimetype = 'text/x-python'
|
2e1b5f4804023cd551b1d641e4b4dc5ba693ff62 | demos/simple.py | demos/simple.py | import numpy as np
import matplotlib.pyplot as pl
import pygp as pg
import pybo.models as pbm
import pybo.policies as pbp
if __name__ == '__main__':
sn = 0.2
ell = 0.670104947766
sf = 1.25415619045
model = pbm.Sinusoidal(0.2)
gp = pg.BasicGP(sn, ell, sf)
policy = pbp.GPUCB(gp, model.bounds)
xmin = model.bounds[0][0]
xmax = model.bounds[0][1]
X = np.linspace(xmin, xmax, 200)[:, None]
x = (xmax-xmin) / 2
for i in xrange(40):
pg.gpplot(policy.gp, xmin=xmin, xmax=xmax)
pl.plot(X, policy.get_index(X), lw=2)
pl.axvline(x, color='r')
pl.axis('tight')
pl.axis(xmin=xmin, xmax=xmax)
pl.draw()
y = model.get_data(x)
policy.add_data(x, y)
x = policy.get_next()
| import numpy as np
import matplotlib.pyplot as pl
import pygp as pg
import pybo.models as pbm
import pybo.policies as pbp
def run_model(Model, sn, ell, sf, T):
model = Model(0.2)
gp = pg.BasicGP(sn, ell, sf)
policy = pbp.GPUCB(gp, model.bounds)
xmin = model.bounds[0][0]
xmax = model.bounds[0][1]
X = np.linspace(xmin, xmax, 200)[:, None]
x = (xmax-xmin) / 2
for i in xrange(T):
pg.gpplot(policy.gp, xmin=xmin, xmax=xmax)
pl.plot(X, policy.get_index(X), lw=2)
pl.axvline(x, color='r')
pl.axis('tight')
pl.axis(xmin=xmin, xmax=xmax)
pl.draw()
y = model.get_data(x)
policy.add_data(x, y)
x = policy.get_next()
if __name__ == '__main__':
# run_model(pbm.Sinusoidal, 0.2, 0.70, 1.25, 100)
run_model(pbm.Gramacy, 0.2, 0.05, 1.25, 100)
| Add a harder test example. | Add a harder test example.
| Python | bsd-2-clause | mwhoffman/pybo,jhartford/pybo | import numpy as np
import matplotlib.pyplot as pl
import pygp as pg
import pybo.models as pbm
import pybo.policies as pbp
if __name__ == '__main__':
sn = 0.2
ell = 0.670104947766
sf = 1.25415619045
model = pbm.Sinusoidal(0.2)
gp = pg.BasicGP(sn, ell, sf)
policy = pbp.GPUCB(gp, model.bounds)
xmin = model.bounds[0][0]
xmax = model.bounds[0][1]
X = np.linspace(xmin, xmax, 200)[:, None]
x = (xmax-xmin) / 2
for i in xrange(40):
pg.gpplot(policy.gp, xmin=xmin, xmax=xmax)
pl.plot(X, policy.get_index(X), lw=2)
pl.axvline(x, color='r')
pl.axis('tight')
pl.axis(xmin=xmin, xmax=xmax)
pl.draw()
y = model.get_data(x)
policy.add_data(x, y)
x = policy.get_next()
Add a harder test example. | import numpy as np
import matplotlib.pyplot as pl
import pygp as pg
import pybo.models as pbm
import pybo.policies as pbp
def run_model(Model, sn, ell, sf, T):
model = Model(0.2)
gp = pg.BasicGP(sn, ell, sf)
policy = pbp.GPUCB(gp, model.bounds)
xmin = model.bounds[0][0]
xmax = model.bounds[0][1]
X = np.linspace(xmin, xmax, 200)[:, None]
x = (xmax-xmin) / 2
for i in xrange(T):
pg.gpplot(policy.gp, xmin=xmin, xmax=xmax)
pl.plot(X, policy.get_index(X), lw=2)
pl.axvline(x, color='r')
pl.axis('tight')
pl.axis(xmin=xmin, xmax=xmax)
pl.draw()
y = model.get_data(x)
policy.add_data(x, y)
x = policy.get_next()
if __name__ == '__main__':
# run_model(pbm.Sinusoidal, 0.2, 0.70, 1.25, 100)
run_model(pbm.Gramacy, 0.2, 0.05, 1.25, 100)
| <commit_before>import numpy as np
import matplotlib.pyplot as pl
import pygp as pg
import pybo.models as pbm
import pybo.policies as pbp
if __name__ == '__main__':
sn = 0.2
ell = 0.670104947766
sf = 1.25415619045
model = pbm.Sinusoidal(0.2)
gp = pg.BasicGP(sn, ell, sf)
policy = pbp.GPUCB(gp, model.bounds)
xmin = model.bounds[0][0]
xmax = model.bounds[0][1]
X = np.linspace(xmin, xmax, 200)[:, None]
x = (xmax-xmin) / 2
for i in xrange(40):
pg.gpplot(policy.gp, xmin=xmin, xmax=xmax)
pl.plot(X, policy.get_index(X), lw=2)
pl.axvline(x, color='r')
pl.axis('tight')
pl.axis(xmin=xmin, xmax=xmax)
pl.draw()
y = model.get_data(x)
policy.add_data(x, y)
x = policy.get_next()
<commit_msg>Add a harder test example.<commit_after> | import numpy as np
import matplotlib.pyplot as pl
import pygp as pg
import pybo.models as pbm
import pybo.policies as pbp
def run_model(Model, sn, ell, sf, T):
model = Model(0.2)
gp = pg.BasicGP(sn, ell, sf)
policy = pbp.GPUCB(gp, model.bounds)
xmin = model.bounds[0][0]
xmax = model.bounds[0][1]
X = np.linspace(xmin, xmax, 200)[:, None]
x = (xmax-xmin) / 2
for i in xrange(T):
pg.gpplot(policy.gp, xmin=xmin, xmax=xmax)
pl.plot(X, policy.get_index(X), lw=2)
pl.axvline(x, color='r')
pl.axis('tight')
pl.axis(xmin=xmin, xmax=xmax)
pl.draw()
y = model.get_data(x)
policy.add_data(x, y)
x = policy.get_next()
if __name__ == '__main__':
# run_model(pbm.Sinusoidal, 0.2, 0.70, 1.25, 100)
run_model(pbm.Gramacy, 0.2, 0.05, 1.25, 100)
| import numpy as np
import matplotlib.pyplot as pl
import pygp as pg
import pybo.models as pbm
import pybo.policies as pbp
if __name__ == '__main__':
sn = 0.2
ell = 0.670104947766
sf = 1.25415619045
model = pbm.Sinusoidal(0.2)
gp = pg.BasicGP(sn, ell, sf)
policy = pbp.GPUCB(gp, model.bounds)
xmin = model.bounds[0][0]
xmax = model.bounds[0][1]
X = np.linspace(xmin, xmax, 200)[:, None]
x = (xmax-xmin) / 2
for i in xrange(40):
pg.gpplot(policy.gp, xmin=xmin, xmax=xmax)
pl.plot(X, policy.get_index(X), lw=2)
pl.axvline(x, color='r')
pl.axis('tight')
pl.axis(xmin=xmin, xmax=xmax)
pl.draw()
y = model.get_data(x)
policy.add_data(x, y)
x = policy.get_next()
Add a harder test example.import numpy as np
import matplotlib.pyplot as pl
import pygp as pg
import pybo.models as pbm
import pybo.policies as pbp
def run_model(Model, sn, ell, sf, T):
model = Model(0.2)
gp = pg.BasicGP(sn, ell, sf)
policy = pbp.GPUCB(gp, model.bounds)
xmin = model.bounds[0][0]
xmax = model.bounds[0][1]
X = np.linspace(xmin, xmax, 200)[:, None]
x = (xmax-xmin) / 2
for i in xrange(T):
pg.gpplot(policy.gp, xmin=xmin, xmax=xmax)
pl.plot(X, policy.get_index(X), lw=2)
pl.axvline(x, color='r')
pl.axis('tight')
pl.axis(xmin=xmin, xmax=xmax)
pl.draw()
y = model.get_data(x)
policy.add_data(x, y)
x = policy.get_next()
if __name__ == '__main__':
# run_model(pbm.Sinusoidal, 0.2, 0.70, 1.25, 100)
run_model(pbm.Gramacy, 0.2, 0.05, 1.25, 100)
| <commit_before>import numpy as np
import matplotlib.pyplot as pl
import pygp as pg
import pybo.models as pbm
import pybo.policies as pbp
if __name__ == '__main__':
sn = 0.2
ell = 0.670104947766
sf = 1.25415619045
model = pbm.Sinusoidal(0.2)
gp = pg.BasicGP(sn, ell, sf)
policy = pbp.GPUCB(gp, model.bounds)
xmin = model.bounds[0][0]
xmax = model.bounds[0][1]
X = np.linspace(xmin, xmax, 200)[:, None]
x = (xmax-xmin) / 2
for i in xrange(40):
pg.gpplot(policy.gp, xmin=xmin, xmax=xmax)
pl.plot(X, policy.get_index(X), lw=2)
pl.axvline(x, color='r')
pl.axis('tight')
pl.axis(xmin=xmin, xmax=xmax)
pl.draw()
y = model.get_data(x)
policy.add_data(x, y)
x = policy.get_next()
<commit_msg>Add a harder test example.<commit_after>import numpy as np
import matplotlib.pyplot as pl
import pygp as pg
import pybo.models as pbm
import pybo.policies as pbp
def run_model(Model, sn, ell, sf, T):
model = Model(0.2)
gp = pg.BasicGP(sn, ell, sf)
policy = pbp.GPUCB(gp, model.bounds)
xmin = model.bounds[0][0]
xmax = model.bounds[0][1]
X = np.linspace(xmin, xmax, 200)[:, None]
x = (xmax-xmin) / 2
for i in xrange(T):
pg.gpplot(policy.gp, xmin=xmin, xmax=xmax)
pl.plot(X, policy.get_index(X), lw=2)
pl.axvline(x, color='r')
pl.axis('tight')
pl.axis(xmin=xmin, xmax=xmax)
pl.draw()
y = model.get_data(x)
policy.add_data(x, y)
x = policy.get_next()
if __name__ == '__main__':
# run_model(pbm.Sinusoidal, 0.2, 0.70, 1.25, 100)
run_model(pbm.Gramacy, 0.2, 0.05, 1.25, 100)
|
d008a08d0c79610eba715842c2f437bf89f8787c | puffin/gui/form.py | puffin/gui/form.py | from flask_wtf import Form
from wtforms import StringField, IntegerField, PasswordField, SubmitField, SelectField
from wtforms.validators import Required, Length, Regexp
from ..core.db import db
from ..core.security import User
from .. import app
class ApplicationForm(Form):
start = SubmitField('Start')
stop = SubmitField('Stop')
class ApplicationSettingsForm(Form):
domain = StringField('Domain', description="If you change it then make sure you also configure it with your DNS provider")
submit = SubmitField('Update')
def validate(self):
rv = Form.validate(self)
if not rv:
return False
if self.domain.data:
server_name = app.config["SERVER_NAME_FULL"]
if (server_name != "localhost"
and not self.domain.data.endswith(current_user.login + "." + server_name)
and self.domain.data.endswith(server_name)):
self.domain.errors.append('Invalid domain, cannot end with ' + server_name)
return False
return True
class ProfileForm(Form):
login = StringField('Login')
email = StringField('Email')
name = StringField('Name', validators=[Required(), Length(1, 64),
Regexp(r'^[A-Za-z0-9_\- ]+$', 0, 'Name must have only letters, numbers, spaces, dots, dashes or underscores')])
submit = SubmitField('Update')
| from flask_wtf import Form
from flask_security.core import current_user
from wtforms import StringField, IntegerField, PasswordField, SubmitField, SelectField
from wtforms.validators import Required, Length, Regexp
from ..core.db import db
from ..core.security import User
from .. import app
class ApplicationForm(Form):
start = SubmitField('Start')
stop = SubmitField('Stop')
class ApplicationSettingsForm(Form):
domain = StringField('Domain', description="If you change it then make sure you also configure it with your DNS provider")
submit = SubmitField('Update')
def validate(self):
rv = Form.validate(self)
if not rv:
return False
if self.domain.data:
server_name = app.config["SERVER_NAME_FULL"]
if (server_name != "localhost"
and not self.domain.data.endswith(current_user.login + "." + server_name)
and self.domain.data.endswith(server_name)):
self.domain.errors.append('Invalid domain, cannot end with ' + server_name)
return False
return True
class ProfileForm(Form):
login = StringField('Login')
email = StringField('Email')
name = StringField('Name', validators=[Required(), Length(1, 64),
Regexp(r'^[A-Za-z0-9_\- ]+$', 0, 'Name must have only letters, numbers, spaces, dots, dashes or underscores')])
submit = SubmitField('Update')
| Allow changing domain to own | Allow changing domain to own
| Python | agpl-3.0 | loomchild/jenca-puffin,loomchild/puffin,puffinrocks/puffin,loomchild/puffin,puffinrocks/puffin,loomchild/puffin,loomchild/puffin,loomchild/puffin,loomchild/jenca-puffin | from flask_wtf import Form
from wtforms import StringField, IntegerField, PasswordField, SubmitField, SelectField
from wtforms.validators import Required, Length, Regexp
from ..core.db import db
from ..core.security import User
from .. import app
class ApplicationForm(Form):
start = SubmitField('Start')
stop = SubmitField('Stop')
class ApplicationSettingsForm(Form):
domain = StringField('Domain', description="If you change it then make sure you also configure it with your DNS provider")
submit = SubmitField('Update')
def validate(self):
rv = Form.validate(self)
if not rv:
return False
if self.domain.data:
server_name = app.config["SERVER_NAME_FULL"]
if (server_name != "localhost"
and not self.domain.data.endswith(current_user.login + "." + server_name)
and self.domain.data.endswith(server_name)):
self.domain.errors.append('Invalid domain, cannot end with ' + server_name)
return False
return True
class ProfileForm(Form):
login = StringField('Login')
email = StringField('Email')
name = StringField('Name', validators=[Required(), Length(1, 64),
Regexp(r'^[A-Za-z0-9_\- ]+$', 0, 'Name must have only letters, numbers, spaces, dots, dashes or underscores')])
submit = SubmitField('Update')
Allow changing domain to own | from flask_wtf import Form
from flask_security.core import current_user
from wtforms import StringField, IntegerField, PasswordField, SubmitField, SelectField
from wtforms.validators import Required, Length, Regexp
from ..core.db import db
from ..core.security import User
from .. import app
class ApplicationForm(Form):
start = SubmitField('Start')
stop = SubmitField('Stop')
class ApplicationSettingsForm(Form):
domain = StringField('Domain', description="If you change it then make sure you also configure it with your DNS provider")
submit = SubmitField('Update')
def validate(self):
rv = Form.validate(self)
if not rv:
return False
if self.domain.data:
server_name = app.config["SERVER_NAME_FULL"]
if (server_name != "localhost"
and not self.domain.data.endswith(current_user.login + "." + server_name)
and self.domain.data.endswith(server_name)):
self.domain.errors.append('Invalid domain, cannot end with ' + server_name)
return False
return True
class ProfileForm(Form):
login = StringField('Login')
email = StringField('Email')
name = StringField('Name', validators=[Required(), Length(1, 64),
Regexp(r'^[A-Za-z0-9_\- ]+$', 0, 'Name must have only letters, numbers, spaces, dots, dashes or underscores')])
submit = SubmitField('Update')
| <commit_before>from flask_wtf import Form
from wtforms import StringField, IntegerField, PasswordField, SubmitField, SelectField
from wtforms.validators import Required, Length, Regexp
from ..core.db import db
from ..core.security import User
from .. import app
class ApplicationForm(Form):
start = SubmitField('Start')
stop = SubmitField('Stop')
class ApplicationSettingsForm(Form):
domain = StringField('Domain', description="If you change it then make sure you also configure it with your DNS provider")
submit = SubmitField('Update')
def validate(self):
rv = Form.validate(self)
if not rv:
return False
if self.domain.data:
server_name = app.config["SERVER_NAME_FULL"]
if (server_name != "localhost"
and not self.domain.data.endswith(current_user.login + "." + server_name)
and self.domain.data.endswith(server_name)):
self.domain.errors.append('Invalid domain, cannot end with ' + server_name)
return False
return True
class ProfileForm(Form):
login = StringField('Login')
email = StringField('Email')
name = StringField('Name', validators=[Required(), Length(1, 64),
Regexp(r'^[A-Za-z0-9_\- ]+$', 0, 'Name must have only letters, numbers, spaces, dots, dashes or underscores')])
submit = SubmitField('Update')
<commit_msg>Allow changing domain to own<commit_after> | from flask_wtf import Form
from flask_security.core import current_user
from wtforms import StringField, IntegerField, PasswordField, SubmitField, SelectField
from wtforms.validators import Required, Length, Regexp
from ..core.db import db
from ..core.security import User
from .. import app
class ApplicationForm(Form):
start = SubmitField('Start')
stop = SubmitField('Stop')
class ApplicationSettingsForm(Form):
domain = StringField('Domain', description="If you change it then make sure you also configure it with your DNS provider")
submit = SubmitField('Update')
def validate(self):
rv = Form.validate(self)
if not rv:
return False
if self.domain.data:
server_name = app.config["SERVER_NAME_FULL"]
if (server_name != "localhost"
and not self.domain.data.endswith(current_user.login + "." + server_name)
and self.domain.data.endswith(server_name)):
self.domain.errors.append('Invalid domain, cannot end with ' + server_name)
return False
return True
class ProfileForm(Form):
login = StringField('Login')
email = StringField('Email')
name = StringField('Name', validators=[Required(), Length(1, 64),
Regexp(r'^[A-Za-z0-9_\- ]+$', 0, 'Name must have only letters, numbers, spaces, dots, dashes or underscores')])
submit = SubmitField('Update')
| from flask_wtf import Form
from wtforms import StringField, IntegerField, PasswordField, SubmitField, SelectField
from wtforms.validators import Required, Length, Regexp
from ..core.db import db
from ..core.security import User
from .. import app
class ApplicationForm(Form):
start = SubmitField('Start')
stop = SubmitField('Stop')
class ApplicationSettingsForm(Form):
domain = StringField('Domain', description="If you change it then make sure you also configure it with your DNS provider")
submit = SubmitField('Update')
def validate(self):
rv = Form.validate(self)
if not rv:
return False
if self.domain.data:
server_name = app.config["SERVER_NAME_FULL"]
if (server_name != "localhost"
and not self.domain.data.endswith(current_user.login + "." + server_name)
and self.domain.data.endswith(server_name)):
self.domain.errors.append('Invalid domain, cannot end with ' + server_name)
return False
return True
class ProfileForm(Form):
login = StringField('Login')
email = StringField('Email')
name = StringField('Name', validators=[Required(), Length(1, 64),
Regexp(r'^[A-Za-z0-9_\- ]+$', 0, 'Name must have only letters, numbers, spaces, dots, dashes or underscores')])
submit = SubmitField('Update')
Allow changing domain to ownfrom flask_wtf import Form
from flask_security.core import current_user
from wtforms import StringField, IntegerField, PasswordField, SubmitField, SelectField
from wtforms.validators import Required, Length, Regexp
from ..core.db import db
from ..core.security import User
from .. import app
class ApplicationForm(Form):
start = SubmitField('Start')
stop = SubmitField('Stop')
class ApplicationSettingsForm(Form):
domain = StringField('Domain', description="If you change it then make sure you also configure it with your DNS provider")
submit = SubmitField('Update')
def validate(self):
rv = Form.validate(self)
if not rv:
return False
if self.domain.data:
server_name = app.config["SERVER_NAME_FULL"]
if (server_name != "localhost"
and not self.domain.data.endswith(current_user.login + "." + server_name)
and self.domain.data.endswith(server_name)):
self.domain.errors.append('Invalid domain, cannot end with ' + server_name)
return False
return True
class ProfileForm(Form):
login = StringField('Login')
email = StringField('Email')
name = StringField('Name', validators=[Required(), Length(1, 64),
Regexp(r'^[A-Za-z0-9_\- ]+$', 0, 'Name must have only letters, numbers, spaces, dots, dashes or underscores')])
submit = SubmitField('Update')
| <commit_before>from flask_wtf import Form
from wtforms import StringField, IntegerField, PasswordField, SubmitField, SelectField
from wtforms.validators import Required, Length, Regexp
from ..core.db import db
from ..core.security import User
from .. import app
class ApplicationForm(Form):
start = SubmitField('Start')
stop = SubmitField('Stop')
class ApplicationSettingsForm(Form):
domain = StringField('Domain', description="If you change it then make sure you also configure it with your DNS provider")
submit = SubmitField('Update')
def validate(self):
rv = Form.validate(self)
if not rv:
return False
if self.domain.data:
server_name = app.config["SERVER_NAME_FULL"]
if (server_name != "localhost"
and not self.domain.data.endswith(current_user.login + "." + server_name)
and self.domain.data.endswith(server_name)):
self.domain.errors.append('Invalid domain, cannot end with ' + server_name)
return False
return True
class ProfileForm(Form):
login = StringField('Login')
email = StringField('Email')
name = StringField('Name', validators=[Required(), Length(1, 64),
Regexp(r'^[A-Za-z0-9_\- ]+$', 0, 'Name must have only letters, numbers, spaces, dots, dashes or underscores')])
submit = SubmitField('Update')
<commit_msg>Allow changing domain to own<commit_after>from flask_wtf import Form
from flask_security.core import current_user
from wtforms import StringField, IntegerField, PasswordField, SubmitField, SelectField
from wtforms.validators import Required, Length, Regexp
from ..core.db import db
from ..core.security import User
from .. import app
class ApplicationForm(Form):
start = SubmitField('Start')
stop = SubmitField('Stop')
class ApplicationSettingsForm(Form):
domain = StringField('Domain', description="If you change it then make sure you also configure it with your DNS provider")
submit = SubmitField('Update')
def validate(self):
rv = Form.validate(self)
if not rv:
return False
if self.domain.data:
server_name = app.config["SERVER_NAME_FULL"]
if (server_name != "localhost"
and not self.domain.data.endswith(current_user.login + "." + server_name)
and self.domain.data.endswith(server_name)):
self.domain.errors.append('Invalid domain, cannot end with ' + server_name)
return False
return True
class ProfileForm(Form):
login = StringField('Login')
email = StringField('Email')
name = StringField('Name', validators=[Required(), Length(1, 64),
Regexp(r'^[A-Za-z0-9_\- ]+$', 0, 'Name must have only letters, numbers, spaces, dots, dashes or underscores')])
submit = SubmitField('Update')
|
5944ada060154cde31c1fe04adeb1fb10a718eaf | urllibRequests.py | urllibRequests.py | import urllib.request
import urllib.parse
def get(urlStr,params={}):
if params == {}:
req = urllib.request.urlopen(urlStr)
else:
reqdata = urllib.request.Request(urlStr,urllib.parse.urlencode(params).encode('ascii'))
req = urllib.request.urlopen(reqdata)
return req.read()
| import urllib.request
import urllib.parse
def get(urlStr,params={}):
reqdata = urllib.request.Request(urlStr)
reqdata.add_header('User-Agent',
'VocabTool/0.2 (https://github.com/RihanWu/vocabtool)')
if params != {}:
reqdata.data = urllib.parse.urlencode(params).encode('ascii')
req = urllib.request.urlopen(reqdata)
return req.read()
| Add User-Agent for better netiquette | Add User-Agent for better netiquette
| Python | mit | RihanWu/vocabtool | import urllib.request
import urllib.parse
def get(urlStr,params={}):
if params == {}:
req = urllib.request.urlopen(urlStr)
else:
reqdata = urllib.request.Request(urlStr,urllib.parse.urlencode(params).encode('ascii'))
req = urllib.request.urlopen(reqdata)
return req.read()
Add User-Agent for better netiquette | import urllib.request
import urllib.parse
def get(urlStr,params={}):
reqdata = urllib.request.Request(urlStr)
reqdata.add_header('User-Agent',
'VocabTool/0.2 (https://github.com/RihanWu/vocabtool)')
if params != {}:
reqdata.data = urllib.parse.urlencode(params).encode('ascii')
req = urllib.request.urlopen(reqdata)
return req.read()
| <commit_before>import urllib.request
import urllib.parse
def get(urlStr,params={}):
if params == {}:
req = urllib.request.urlopen(urlStr)
else:
reqdata = urllib.request.Request(urlStr,urllib.parse.urlencode(params).encode('ascii'))
req = urllib.request.urlopen(reqdata)
return req.read()
<commit_msg>Add User-Agent for better netiquette<commit_after> | import urllib.request
import urllib.parse
def get(urlStr,params={}):
reqdata = urllib.request.Request(urlStr)
reqdata.add_header('User-Agent',
'VocabTool/0.2 (https://github.com/RihanWu/vocabtool)')
if params != {}:
reqdata.data = urllib.parse.urlencode(params).encode('ascii')
req = urllib.request.urlopen(reqdata)
return req.read()
| import urllib.request
import urllib.parse
def get(urlStr,params={}):
if params == {}:
req = urllib.request.urlopen(urlStr)
else:
reqdata = urllib.request.Request(urlStr,urllib.parse.urlencode(params).encode('ascii'))
req = urllib.request.urlopen(reqdata)
return req.read()
Add User-Agent for better netiquetteimport urllib.request
import urllib.parse
def get(urlStr,params={}):
reqdata = urllib.request.Request(urlStr)
reqdata.add_header('User-Agent',
'VocabTool/0.2 (https://github.com/RihanWu/vocabtool)')
if params != {}:
reqdata.data = urllib.parse.urlencode(params).encode('ascii')
req = urllib.request.urlopen(reqdata)
return req.read()
| <commit_before>import urllib.request
import urllib.parse
def get(urlStr,params={}):
if params == {}:
req = urllib.request.urlopen(urlStr)
else:
reqdata = urllib.request.Request(urlStr,urllib.parse.urlencode(params).encode('ascii'))
req = urllib.request.urlopen(reqdata)
return req.read()
<commit_msg>Add User-Agent for better netiquette<commit_after>import urllib.request
import urllib.parse
def get(urlStr,params={}):
reqdata = urllib.request.Request(urlStr)
reqdata.add_header('User-Agent',
'VocabTool/0.2 (https://github.com/RihanWu/vocabtool)')
if params != {}:
reqdata.data = urllib.parse.urlencode(params).encode('ascii')
req = urllib.request.urlopen(reqdata)
return req.read()
|
c72120b4b3b7131450f1ee34c9b373f61a80c581 | utils/database.py | utils/database.py | import json
class Database(dict):
"""Holds a dict that contains all the information about the users in a channel"""
def __init__(self, irc):
super(Database, self).__init__(json.load(open("userdb.json")))
self.irc = irc
def remove_entry(self, event, nick):
try:
del self[event.target][nick]
except KeyError:
for i in self[event.target].values():
if i['host'] == event.source.host:
del self[event.target][i['hostmask'].split("!")[0]]
break
def add_entry(self, channel, nick, hostmask, account):
temp = {
'hostmask': hostmask,
'host': hostmask.split("@")[1],
'account': account,
'seen': [{"time":__import__("time").time(), messsage:""}]
}
if nick in self[channel]:
del temp['seen']
self[channel][nick].update(temp)
else:
self[channel][nick] = temp
def get_user_host(self, channel, nick):
try:
host = "*!*@" + self[channel][nick]['host']
except KeyError:
self.irc.send("WHO {0} nuhs%nhuac".format(channel))
host = "*!*@" + self[channel][nick]['host']
return host
def flush(self):
with open('userdb.json', 'w') as f:
json.dump(self, f, indent=2, separators=(',', ': '))
f.write("\n")
| import json
class Database(dict):
"""Holds a dict that contains all the information about the users in a channel"""
def __init__(self, irc):
super(Database, self).__init__(json.load(open("userdb.json")))
self.irc = irc
def remove_entry(self, event, nick):
try:
del self[event.target][nick]
except KeyError:
for i in self[event.target].values():
if i['host'] == event.source.host:
del self[event.target][i['hostmask'].split("!")[0]]
break
def add_entry(self, channel, nick, hostmask, account):
temp = {
'hostmask': hostmask,
'host': hostmask.split("@")[1],
'account': account,
'seen': [{"time":__import__("time").time(), "messsage":""}]
}
if nick in self[channel]:
del temp['seen']
self[channel][nick].update(temp)
else:
self[channel][nick] = temp
def get_user_host(self, channel, nick):
try:
host = "*!*@" + self[channel][nick]['host']
except KeyError:
self.irc.send("WHO {0} nuhs%nhuac".format(channel))
host = "*!*@" + self[channel][nick]['host']
return host
def flush(self):
with open('userdb.json', 'w') as f:
json.dump(self, f, indent=2, separators=(',', ': '))
f.write("\n")
| Fix NameError: global name 'messsage' is not defined | Fix NameError: global name 'messsage' is not defined
| Python | mit | wolfy1339/Python-IRC-Bot | import json
class Database(dict):
"""Holds a dict that contains all the information about the users in a channel"""
def __init__(self, irc):
super(Database, self).__init__(json.load(open("userdb.json")))
self.irc = irc
def remove_entry(self, event, nick):
try:
del self[event.target][nick]
except KeyError:
for i in self[event.target].values():
if i['host'] == event.source.host:
del self[event.target][i['hostmask'].split("!")[0]]
break
def add_entry(self, channel, nick, hostmask, account):
temp = {
'hostmask': hostmask,
'host': hostmask.split("@")[1],
'account': account,
'seen': [{"time":__import__("time").time(), messsage:""}]
}
if nick in self[channel]:
del temp['seen']
self[channel][nick].update(temp)
else:
self[channel][nick] = temp
def get_user_host(self, channel, nick):
try:
host = "*!*@" + self[channel][nick]['host']
except KeyError:
self.irc.send("WHO {0} nuhs%nhuac".format(channel))
host = "*!*@" + self[channel][nick]['host']
return host
def flush(self):
with open('userdb.json', 'w') as f:
json.dump(self, f, indent=2, separators=(',', ': '))
f.write("\n")
Fix NameError: global name 'messsage' is not defined | import json
class Database(dict):
"""Holds a dict that contains all the information about the users in a channel"""
def __init__(self, irc):
super(Database, self).__init__(json.load(open("userdb.json")))
self.irc = irc
def remove_entry(self, event, nick):
try:
del self[event.target][nick]
except KeyError:
for i in self[event.target].values():
if i['host'] == event.source.host:
del self[event.target][i['hostmask'].split("!")[0]]
break
def add_entry(self, channel, nick, hostmask, account):
temp = {
'hostmask': hostmask,
'host': hostmask.split("@")[1],
'account': account,
'seen': [{"time":__import__("time").time(), "messsage":""}]
}
if nick in self[channel]:
del temp['seen']
self[channel][nick].update(temp)
else:
self[channel][nick] = temp
def get_user_host(self, channel, nick):
try:
host = "*!*@" + self[channel][nick]['host']
except KeyError:
self.irc.send("WHO {0} nuhs%nhuac".format(channel))
host = "*!*@" + self[channel][nick]['host']
return host
def flush(self):
with open('userdb.json', 'w') as f:
json.dump(self, f, indent=2, separators=(',', ': '))
f.write("\n")
| <commit_before>import json
class Database(dict):
"""Holds a dict that contains all the information about the users in a channel"""
def __init__(self, irc):
super(Database, self).__init__(json.load(open("userdb.json")))
self.irc = irc
def remove_entry(self, event, nick):
try:
del self[event.target][nick]
except KeyError:
for i in self[event.target].values():
if i['host'] == event.source.host:
del self[event.target][i['hostmask'].split("!")[0]]
break
def add_entry(self, channel, nick, hostmask, account):
temp = {
'hostmask': hostmask,
'host': hostmask.split("@")[1],
'account': account,
'seen': [{"time":__import__("time").time(), messsage:""}]
}
if nick in self[channel]:
del temp['seen']
self[channel][nick].update(temp)
else:
self[channel][nick] = temp
def get_user_host(self, channel, nick):
try:
host = "*!*@" + self[channel][nick]['host']
except KeyError:
self.irc.send("WHO {0} nuhs%nhuac".format(channel))
host = "*!*@" + self[channel][nick]['host']
return host
def flush(self):
with open('userdb.json', 'w') as f:
json.dump(self, f, indent=2, separators=(',', ': '))
f.write("\n")
<commit_msg>Fix NameError: global name 'messsage' is not defined<commit_after> | import json
class Database(dict):
"""Holds a dict that contains all the information about the users in a channel"""
def __init__(self, irc):
super(Database, self).__init__(json.load(open("userdb.json")))
self.irc = irc
def remove_entry(self, event, nick):
try:
del self[event.target][nick]
except KeyError:
for i in self[event.target].values():
if i['host'] == event.source.host:
del self[event.target][i['hostmask'].split("!")[0]]
break
def add_entry(self, channel, nick, hostmask, account):
temp = {
'hostmask': hostmask,
'host': hostmask.split("@")[1],
'account': account,
'seen': [{"time":__import__("time").time(), "messsage":""}]
}
if nick in self[channel]:
del temp['seen']
self[channel][nick].update(temp)
else:
self[channel][nick] = temp
def get_user_host(self, channel, nick):
try:
host = "*!*@" + self[channel][nick]['host']
except KeyError:
self.irc.send("WHO {0} nuhs%nhuac".format(channel))
host = "*!*@" + self[channel][nick]['host']
return host
def flush(self):
with open('userdb.json', 'w') as f:
json.dump(self, f, indent=2, separators=(',', ': '))
f.write("\n")
| import json
class Database(dict):
"""Holds a dict that contains all the information about the users in a channel"""
def __init__(self, irc):
super(Database, self).__init__(json.load(open("userdb.json")))
self.irc = irc
def remove_entry(self, event, nick):
try:
del self[event.target][nick]
except KeyError:
for i in self[event.target].values():
if i['host'] == event.source.host:
del self[event.target][i['hostmask'].split("!")[0]]
break
def add_entry(self, channel, nick, hostmask, account):
temp = {
'hostmask': hostmask,
'host': hostmask.split("@")[1],
'account': account,
'seen': [{"time":__import__("time").time(), messsage:""}]
}
if nick in self[channel]:
del temp['seen']
self[channel][nick].update(temp)
else:
self[channel][nick] = temp
def get_user_host(self, channel, nick):
try:
host = "*!*@" + self[channel][nick]['host']
except KeyError:
self.irc.send("WHO {0} nuhs%nhuac".format(channel))
host = "*!*@" + self[channel][nick]['host']
return host
def flush(self):
with open('userdb.json', 'w') as f:
json.dump(self, f, indent=2, separators=(',', ': '))
f.write("\n")
Fix NameError: global name 'messsage' is not definedimport json
class Database(dict):
"""Holds a dict that contains all the information about the users in a channel"""
def __init__(self, irc):
super(Database, self).__init__(json.load(open("userdb.json")))
self.irc = irc
def remove_entry(self, event, nick):
try:
del self[event.target][nick]
except KeyError:
for i in self[event.target].values():
if i['host'] == event.source.host:
del self[event.target][i['hostmask'].split("!")[0]]
break
def add_entry(self, channel, nick, hostmask, account):
temp = {
'hostmask': hostmask,
'host': hostmask.split("@")[1],
'account': account,
'seen': [{"time":__import__("time").time(), "messsage":""}]
}
if nick in self[channel]:
del temp['seen']
self[channel][nick].update(temp)
else:
self[channel][nick] = temp
def get_user_host(self, channel, nick):
try:
host = "*!*@" + self[channel][nick]['host']
except KeyError:
self.irc.send("WHO {0} nuhs%nhuac".format(channel))
host = "*!*@" + self[channel][nick]['host']
return host
def flush(self):
with open('userdb.json', 'w') as f:
json.dump(self, f, indent=2, separators=(',', ': '))
f.write("\n")
| <commit_before>import json
class Database(dict):
"""Holds a dict that contains all the information about the users in a channel"""
def __init__(self, irc):
super(Database, self).__init__(json.load(open("userdb.json")))
self.irc = irc
def remove_entry(self, event, nick):
try:
del self[event.target][nick]
except KeyError:
for i in self[event.target].values():
if i['host'] == event.source.host:
del self[event.target][i['hostmask'].split("!")[0]]
break
def add_entry(self, channel, nick, hostmask, account):
temp = {
'hostmask': hostmask,
'host': hostmask.split("@")[1],
'account': account,
'seen': [{"time":__import__("time").time(), messsage:""}]
}
if nick in self[channel]:
del temp['seen']
self[channel][nick].update(temp)
else:
self[channel][nick] = temp
def get_user_host(self, channel, nick):
try:
host = "*!*@" + self[channel][nick]['host']
except KeyError:
self.irc.send("WHO {0} nuhs%nhuac".format(channel))
host = "*!*@" + self[channel][nick]['host']
return host
def flush(self):
with open('userdb.json', 'w') as f:
json.dump(self, f, indent=2, separators=(',', ': '))
f.write("\n")
<commit_msg>Fix NameError: global name 'messsage' is not defined<commit_after>import json
class Database(dict):
"""Holds a dict that contains all the information about the users in a channel"""
def __init__(self, irc):
super(Database, self).__init__(json.load(open("userdb.json")))
self.irc = irc
def remove_entry(self, event, nick):
try:
del self[event.target][nick]
except KeyError:
for i in self[event.target].values():
if i['host'] == event.source.host:
del self[event.target][i['hostmask'].split("!")[0]]
break
def add_entry(self, channel, nick, hostmask, account):
temp = {
'hostmask': hostmask,
'host': hostmask.split("@")[1],
'account': account,
'seen': [{"time":__import__("time").time(), "messsage":""}]
}
if nick in self[channel]:
del temp['seen']
self[channel][nick].update(temp)
else:
self[channel][nick] = temp
def get_user_host(self, channel, nick):
try:
host = "*!*@" + self[channel][nick]['host']
except KeyError:
self.irc.send("WHO {0} nuhs%nhuac".format(channel))
host = "*!*@" + self[channel][nick]['host']
return host
def flush(self):
with open('userdb.json', 'w') as f:
json.dump(self, f, indent=2, separators=(',', ': '))
f.write("\n")
|
cde59e7f8f8ea74c720f107c80c933b5b9aa913e | recipy/__init__.py | recipy/__init__.py | # These lines ARE needed, as they actually set up sys.meta_path
from . import PatchWarnings
from . import PatchBaseScientific
from . import PatchScientific
from .log import *
from .utils import open
__version__ = '0.2.3'
# Patch built-in open function
# orig_open = __builtins__['open']
# def patched_open(*args, **kwargs):
# print('Called open!')
# print(args)
# print(kwargs)
# return(orig_open(*args, **kwargs))
# __builtins__['open'] = patched_open
log_init()
| # These lines ARE needed, as they actually set up sys.meta_path
from . import PatchWarnings
from . import PatchBaseScientific
from . import PatchScientific
from .log import *
from .utils import open
__version__ = '0.3'
log_init()
| Remove old commented out code, and update version to 0.3 | Remove old commented out code, and update version to 0.3
| Python | apache-2.0 | recipy/recipy,recipy/recipy | # These lines ARE needed, as they actually set up sys.meta_path
from . import PatchWarnings
from . import PatchBaseScientific
from . import PatchScientific
from .log import *
from .utils import open
__version__ = '0.2.3'
# Patch built-in open function
# orig_open = __builtins__['open']
# def patched_open(*args, **kwargs):
# print('Called open!')
# print(args)
# print(kwargs)
# return(orig_open(*args, **kwargs))
# __builtins__['open'] = patched_open
log_init()
Remove old commented out code, and update version to 0.3 | # These lines ARE needed, as they actually set up sys.meta_path
from . import PatchWarnings
from . import PatchBaseScientific
from . import PatchScientific
from .log import *
from .utils import open
__version__ = '0.3'
log_init()
| <commit_before># These lines ARE needed, as they actually set up sys.meta_path
from . import PatchWarnings
from . import PatchBaseScientific
from . import PatchScientific
from .log import *
from .utils import open
__version__ = '0.2.3'
# Patch built-in open function
# orig_open = __builtins__['open']
# def patched_open(*args, **kwargs):
# print('Called open!')
# print(args)
# print(kwargs)
# return(orig_open(*args, **kwargs))
# __builtins__['open'] = patched_open
log_init()
<commit_msg>Remove old commented out code, and update version to 0.3<commit_after> | # These lines ARE needed, as they actually set up sys.meta_path
from . import PatchWarnings
from . import PatchBaseScientific
from . import PatchScientific
from .log import *
from .utils import open
__version__ = '0.3'
log_init()
| # These lines ARE needed, as they actually set up sys.meta_path
from . import PatchWarnings
from . import PatchBaseScientific
from . import PatchScientific
from .log import *
from .utils import open
__version__ = '0.2.3'
# Patch built-in open function
# orig_open = __builtins__['open']
# def patched_open(*args, **kwargs):
# print('Called open!')
# print(args)
# print(kwargs)
# return(orig_open(*args, **kwargs))
# __builtins__['open'] = patched_open
log_init()
Remove old commented out code, and update version to 0.3# These lines ARE needed, as they actually set up sys.meta_path
from . import PatchWarnings
from . import PatchBaseScientific
from . import PatchScientific
from .log import *
from .utils import open
__version__ = '0.3'
log_init()
| <commit_before># These lines ARE needed, as they actually set up sys.meta_path
from . import PatchWarnings
from . import PatchBaseScientific
from . import PatchScientific
from .log import *
from .utils import open
__version__ = '0.2.3'
# Patch built-in open function
# orig_open = __builtins__['open']
# def patched_open(*args, **kwargs):
# print('Called open!')
# print(args)
# print(kwargs)
# return(orig_open(*args, **kwargs))
# __builtins__['open'] = patched_open
log_init()
<commit_msg>Remove old commented out code, and update version to 0.3<commit_after># These lines ARE needed, as they actually set up sys.meta_path
from . import PatchWarnings
from . import PatchBaseScientific
from . import PatchScientific
from .log import *
from .utils import open
__version__ = '0.3'
log_init()
|
f7dd603d4e24134affda6430736838ecaaab9938 | jungle/cli.py | jungle/cli.py | # -*- coding: utf-8 -*-
import click
from . import __version__
class JungleCLI(click.MultiCommand):
"""Jangle CLI main class"""
def list_commands(self, ctx):
"""return available modules"""
return ['ec2', 'elb', 'emr', 'asg']
def get_command(self, ctx, name):
"""get command"""
mod = __import__('jungle.' + name, None, None, ['cli'])
return mod.cli
cli = JungleCLI(help="aws operation cli (v{})".format(__version__))
if __name__ == '__main__':
cli()
| # -*- coding: utf-8 -*-
import click
from . import __version__
class JungleCLI(click.MultiCommand):
"""Jangle CLI main class"""
def list_commands(self, ctx):
"""return available modules"""
return ['ec2', 'elb', 'emr', 'asg']
def get_command(self, ctx, name):
"""get command"""
try:
mod = __import__('jungle.' + name, None, None, ['cli'])
return mod.cli
except ImportError:
pass
cli = JungleCLI(help="aws operation cli (v{0})".format(__version__))
if __name__ == '__main__':
cli()
| Fix unintended ImportError for wrong subcommnad | Fix unintended ImportError for wrong subcommnad
| Python | mit | achiku/jungle | # -*- coding: utf-8 -*-
import click
from . import __version__
class JungleCLI(click.MultiCommand):
"""Jangle CLI main class"""
def list_commands(self, ctx):
"""return available modules"""
return ['ec2', 'elb', 'emr', 'asg']
def get_command(self, ctx, name):
"""get command"""
mod = __import__('jungle.' + name, None, None, ['cli'])
return mod.cli
cli = JungleCLI(help="aws operation cli (v{})".format(__version__))
if __name__ == '__main__':
cli()
Fix unintended ImportError for wrong subcommnad | # -*- coding: utf-8 -*-
import click
from . import __version__
class JungleCLI(click.MultiCommand):
"""Jangle CLI main class"""
def list_commands(self, ctx):
"""return available modules"""
return ['ec2', 'elb', 'emr', 'asg']
def get_command(self, ctx, name):
"""get command"""
try:
mod = __import__('jungle.' + name, None, None, ['cli'])
return mod.cli
except ImportError:
pass
cli = JungleCLI(help="aws operation cli (v{0})".format(__version__))
if __name__ == '__main__':
cli()
| <commit_before># -*- coding: utf-8 -*-
import click
from . import __version__
class JungleCLI(click.MultiCommand):
"""Jangle CLI main class"""
def list_commands(self, ctx):
"""return available modules"""
return ['ec2', 'elb', 'emr', 'asg']
def get_command(self, ctx, name):
"""get command"""
mod = __import__('jungle.' + name, None, None, ['cli'])
return mod.cli
cli = JungleCLI(help="aws operation cli (v{})".format(__version__))
if __name__ == '__main__':
cli()
<commit_msg>Fix unintended ImportError for wrong subcommnad<commit_after> | # -*- coding: utf-8 -*-
import click
from . import __version__
class JungleCLI(click.MultiCommand):
"""Jangle CLI main class"""
def list_commands(self, ctx):
"""return available modules"""
return ['ec2', 'elb', 'emr', 'asg']
def get_command(self, ctx, name):
"""get command"""
try:
mod = __import__('jungle.' + name, None, None, ['cli'])
return mod.cli
except ImportError:
pass
cli = JungleCLI(help="aws operation cli (v{0})".format(__version__))
if __name__ == '__main__':
cli()
| # -*- coding: utf-8 -*-
import click
from . import __version__
class JungleCLI(click.MultiCommand):
"""Jangle CLI main class"""
def list_commands(self, ctx):
"""return available modules"""
return ['ec2', 'elb', 'emr', 'asg']
def get_command(self, ctx, name):
"""get command"""
mod = __import__('jungle.' + name, None, None, ['cli'])
return mod.cli
cli = JungleCLI(help="aws operation cli (v{})".format(__version__))
if __name__ == '__main__':
cli()
Fix unintended ImportError for wrong subcommnad# -*- coding: utf-8 -*-
import click
from . import __version__
class JungleCLI(click.MultiCommand):
"""Jangle CLI main class"""
def list_commands(self, ctx):
"""return available modules"""
return ['ec2', 'elb', 'emr', 'asg']
def get_command(self, ctx, name):
"""get command"""
try:
mod = __import__('jungle.' + name, None, None, ['cli'])
return mod.cli
except ImportError:
pass
cli = JungleCLI(help="aws operation cli (v{0})".format(__version__))
if __name__ == '__main__':
cli()
| <commit_before># -*- coding: utf-8 -*-
import click
from . import __version__
class JungleCLI(click.MultiCommand):
"""Jangle CLI main class"""
def list_commands(self, ctx):
"""return available modules"""
return ['ec2', 'elb', 'emr', 'asg']
def get_command(self, ctx, name):
"""get command"""
mod = __import__('jungle.' + name, None, None, ['cli'])
return mod.cli
cli = JungleCLI(help="aws operation cli (v{})".format(__version__))
if __name__ == '__main__':
cli()
<commit_msg>Fix unintended ImportError for wrong subcommnad<commit_after># -*- coding: utf-8 -*-
import click
from . import __version__
class JungleCLI(click.MultiCommand):
"""Jangle CLI main class"""
def list_commands(self, ctx):
"""return available modules"""
return ['ec2', 'elb', 'emr', 'asg']
def get_command(self, ctx, name):
"""get command"""
try:
mod = __import__('jungle.' + name, None, None, ['cli'])
return mod.cli
except ImportError:
pass
cli = JungleCLI(help="aws operation cli (v{0})".format(__version__))
if __name__ == '__main__':
cli()
|
d0211871e11d2a2ce9f9a961750aa12a28658c62 | vroom/graphics.py | vroom/graphics.py | import pygame
class Graphic:
car_color = (255, 50, 50)
car_width = 3
road_color = (255, 255, 255)
road_width = 6
draw_methods = {
'Car': 'draw_car',
'Road': 'draw_road',
}
def __init__(self, surface):
self.surface = surface
def draw(self, obj):
object_class = obj.__class__.__name__
method_name = self.draw_methods.get(object_class, None)
if method_name:
method = getattr(self, method_name)
method(obj)
def draw_car(self, car):
coord = car.coordinates
rect = pygame.Rect(coord.x, coord.y,
self.car_width, self.car_width)
pygame.draw.rect(self.surface, self.car_color, rect, 0)
def draw_road(self, road):
pointlist = road.pointlist()
closed = False
pygame.draw.lines(self.surface, self.road_color, closed, pointlist,
self.road_width)
| import pygame
class Graphic:
car_color = (255, 50, 50)
car_width = 3
road_color = (255, 255, 255)
road_width = 6
draw_methods = {
'Car': 'draw_car',
'Road': 'draw_road',
}
def __init__(self, surface):
self.surface = surface
def draw(self, obj):
object_class = obj.__class__.__name__
method_name = self.draw_methods.get(object_class, None)
if method_name:
method = getattr(self, method_name)
method(obj)
def draw_car(self, car):
coord = car.coordinates
acceleration_rate = car.acceleration_rate
rect = pygame.Rect(coord.x, coord.y,
self.car_width, self.car_width)
# Change car color depending on acceleration
if acceleration_rate > 0:
rate = min(1, acceleration_rate)
color = (50, 50, int(rate * 255))
else:
rate = max(-1, acceleration_rate)
color = (int(-rate * 255), 50, 50)
pygame.draw.rect(self.surface, color, rect, 0)
def draw_road(self, road):
pointlist = road.pointlist()
closed = False
pygame.draw.lines(self.surface, self.road_color, closed, pointlist,
self.road_width)
| Change car color depending on acceleration rate | Change car color depending on acceleration rate
| Python | mit | thibault/vroom | import pygame
class Graphic:
car_color = (255, 50, 50)
car_width = 3
road_color = (255, 255, 255)
road_width = 6
draw_methods = {
'Car': 'draw_car',
'Road': 'draw_road',
}
def __init__(self, surface):
self.surface = surface
def draw(self, obj):
object_class = obj.__class__.__name__
method_name = self.draw_methods.get(object_class, None)
if method_name:
method = getattr(self, method_name)
method(obj)
def draw_car(self, car):
coord = car.coordinates
rect = pygame.Rect(coord.x, coord.y,
self.car_width, self.car_width)
pygame.draw.rect(self.surface, self.car_color, rect, 0)
def draw_road(self, road):
pointlist = road.pointlist()
closed = False
pygame.draw.lines(self.surface, self.road_color, closed, pointlist,
self.road_width)
Change car color depending on acceleration rate | import pygame
class Graphic:
car_color = (255, 50, 50)
car_width = 3
road_color = (255, 255, 255)
road_width = 6
draw_methods = {
'Car': 'draw_car',
'Road': 'draw_road',
}
def __init__(self, surface):
self.surface = surface
def draw(self, obj):
object_class = obj.__class__.__name__
method_name = self.draw_methods.get(object_class, None)
if method_name:
method = getattr(self, method_name)
method(obj)
def draw_car(self, car):
coord = car.coordinates
acceleration_rate = car.acceleration_rate
rect = pygame.Rect(coord.x, coord.y,
self.car_width, self.car_width)
# Change car color depending on acceleration
if acceleration_rate > 0:
rate = min(1, acceleration_rate)
color = (50, 50, int(rate * 255))
else:
rate = max(-1, acceleration_rate)
color = (int(-rate * 255), 50, 50)
pygame.draw.rect(self.surface, color, rect, 0)
def draw_road(self, road):
pointlist = road.pointlist()
closed = False
pygame.draw.lines(self.surface, self.road_color, closed, pointlist,
self.road_width)
| <commit_before>import pygame
class Graphic:
car_color = (255, 50, 50)
car_width = 3
road_color = (255, 255, 255)
road_width = 6
draw_methods = {
'Car': 'draw_car',
'Road': 'draw_road',
}
def __init__(self, surface):
self.surface = surface
def draw(self, obj):
object_class = obj.__class__.__name__
method_name = self.draw_methods.get(object_class, None)
if method_name:
method = getattr(self, method_name)
method(obj)
def draw_car(self, car):
coord = car.coordinates
rect = pygame.Rect(coord.x, coord.y,
self.car_width, self.car_width)
pygame.draw.rect(self.surface, self.car_color, rect, 0)
def draw_road(self, road):
pointlist = road.pointlist()
closed = False
pygame.draw.lines(self.surface, self.road_color, closed, pointlist,
self.road_width)
<commit_msg>Change car color depending on acceleration rate<commit_after> | import pygame
class Graphic:
car_color = (255, 50, 50)
car_width = 3
road_color = (255, 255, 255)
road_width = 6
draw_methods = {
'Car': 'draw_car',
'Road': 'draw_road',
}
def __init__(self, surface):
self.surface = surface
def draw(self, obj):
object_class = obj.__class__.__name__
method_name = self.draw_methods.get(object_class, None)
if method_name:
method = getattr(self, method_name)
method(obj)
def draw_car(self, car):
coord = car.coordinates
acceleration_rate = car.acceleration_rate
rect = pygame.Rect(coord.x, coord.y,
self.car_width, self.car_width)
# Change car color depending on acceleration
if acceleration_rate > 0:
rate = min(1, acceleration_rate)
color = (50, 50, int(rate * 255))
else:
rate = max(-1, acceleration_rate)
color = (int(-rate * 255), 50, 50)
pygame.draw.rect(self.surface, color, rect, 0)
def draw_road(self, road):
pointlist = road.pointlist()
closed = False
pygame.draw.lines(self.surface, self.road_color, closed, pointlist,
self.road_width)
| import pygame
class Graphic:
car_color = (255, 50, 50)
car_width = 3
road_color = (255, 255, 255)
road_width = 6
draw_methods = {
'Car': 'draw_car',
'Road': 'draw_road',
}
def __init__(self, surface):
self.surface = surface
def draw(self, obj):
object_class = obj.__class__.__name__
method_name = self.draw_methods.get(object_class, None)
if method_name:
method = getattr(self, method_name)
method(obj)
def draw_car(self, car):
coord = car.coordinates
rect = pygame.Rect(coord.x, coord.y,
self.car_width, self.car_width)
pygame.draw.rect(self.surface, self.car_color, rect, 0)
def draw_road(self, road):
pointlist = road.pointlist()
closed = False
pygame.draw.lines(self.surface, self.road_color, closed, pointlist,
self.road_width)
Change car color depending on acceleration rateimport pygame
class Graphic:
car_color = (255, 50, 50)
car_width = 3
road_color = (255, 255, 255)
road_width = 6
draw_methods = {
'Car': 'draw_car',
'Road': 'draw_road',
}
def __init__(self, surface):
self.surface = surface
def draw(self, obj):
object_class = obj.__class__.__name__
method_name = self.draw_methods.get(object_class, None)
if method_name:
method = getattr(self, method_name)
method(obj)
def draw_car(self, car):
coord = car.coordinates
acceleration_rate = car.acceleration_rate
rect = pygame.Rect(coord.x, coord.y,
self.car_width, self.car_width)
# Change car color depending on acceleration
if acceleration_rate > 0:
rate = min(1, acceleration_rate)
color = (50, 50, int(rate * 255))
else:
rate = max(-1, acceleration_rate)
color = (int(-rate * 255), 50, 50)
pygame.draw.rect(self.surface, color, rect, 0)
def draw_road(self, road):
pointlist = road.pointlist()
closed = False
pygame.draw.lines(self.surface, self.road_color, closed, pointlist,
self.road_width)
| <commit_before>import pygame
class Graphic:
car_color = (255, 50, 50)
car_width = 3
road_color = (255, 255, 255)
road_width = 6
draw_methods = {
'Car': 'draw_car',
'Road': 'draw_road',
}
def __init__(self, surface):
self.surface = surface
def draw(self, obj):
object_class = obj.__class__.__name__
method_name = self.draw_methods.get(object_class, None)
if method_name:
method = getattr(self, method_name)
method(obj)
def draw_car(self, car):
coord = car.coordinates
rect = pygame.Rect(coord.x, coord.y,
self.car_width, self.car_width)
pygame.draw.rect(self.surface, self.car_color, rect, 0)
def draw_road(self, road):
pointlist = road.pointlist()
closed = False
pygame.draw.lines(self.surface, self.road_color, closed, pointlist,
self.road_width)
<commit_msg>Change car color depending on acceleration rate<commit_after>import pygame
class Graphic:
car_color = (255, 50, 50)
car_width = 3
road_color = (255, 255, 255)
road_width = 6
draw_methods = {
'Car': 'draw_car',
'Road': 'draw_road',
}
def __init__(self, surface):
self.surface = surface
def draw(self, obj):
object_class = obj.__class__.__name__
method_name = self.draw_methods.get(object_class, None)
if method_name:
method = getattr(self, method_name)
method(obj)
def draw_car(self, car):
coord = car.coordinates
acceleration_rate = car.acceleration_rate
rect = pygame.Rect(coord.x, coord.y,
self.car_width, self.car_width)
# Change car color depending on acceleration
if acceleration_rate > 0:
rate = min(1, acceleration_rate)
color = (50, 50, int(rate * 255))
else:
rate = max(-1, acceleration_rate)
color = (int(-rate * 255), 50, 50)
pygame.draw.rect(self.surface, color, rect, 0)
def draw_road(self, road):
pointlist = road.pointlist()
closed = False
pygame.draw.lines(self.surface, self.road_color, closed, pointlist,
self.road_width)
|
bddf5358b92d58549496de41ffeea724aeb2feb7 | openmm/run_test.py | openmm/run_test.py | #!/usr/bin/env python
from simtk import openmm
# Check major version number
# If Z=0 for version X.Y.Z, out put is "X.Y"
assert openmm.Platform.getOpenMMVersion() == '7.1.1', "openmm.Platform.getOpenMMVersion() = %s" % openmm.Platform.getOpenMMVersion()
# Check git hash
assert openmm.version.git_revision == 'c1a64aaa3b4b71f8dd9648fa724d2548a99d4ced', "openmm.version.git_revision = %s" % openmm.version.git_revision
| #!/usr/bin/env python
from simtk import openmm
# Check major version number
# If Z=0 for version X.Y.Z, out put is "X.Y"
assert openmm.Platform.getOpenMMVersion() == '7.2', "openmm.Platform.getOpenMMVersion() = %s" % openmm.Platform.getOpenMMVersion()
# Check git hash
assert openmm.version.git_revision == '07c1b86c905870afac97bd54dd776433c1b602c2', "openmm.version.git_revision = %s" % openmm.version.git_revision
| Update test for 7.2 beta build | [openmm] Update test for 7.2 beta build
| Python | mit | peastman/conda-recipes,omnia-md/conda-recipes,peastman/conda-recipes,omnia-md/conda-recipes,omnia-md/conda-recipes,peastman/conda-recipes | #!/usr/bin/env python
from simtk import openmm
# Check major version number
# If Z=0 for version X.Y.Z, out put is "X.Y"
assert openmm.Platform.getOpenMMVersion() == '7.1.1', "openmm.Platform.getOpenMMVersion() = %s" % openmm.Platform.getOpenMMVersion()
# Check git hash
assert openmm.version.git_revision == 'c1a64aaa3b4b71f8dd9648fa724d2548a99d4ced', "openmm.version.git_revision = %s" % openmm.version.git_revision
[openmm] Update test for 7.2 beta build | #!/usr/bin/env python
from simtk import openmm
# Check major version number
# If Z=0 for version X.Y.Z, out put is "X.Y"
assert openmm.Platform.getOpenMMVersion() == '7.2', "openmm.Platform.getOpenMMVersion() = %s" % openmm.Platform.getOpenMMVersion()
# Check git hash
assert openmm.version.git_revision == '07c1b86c905870afac97bd54dd776433c1b602c2', "openmm.version.git_revision = %s" % openmm.version.git_revision
| <commit_before>#!/usr/bin/env python
from simtk import openmm
# Check major version number
# If Z=0 for version X.Y.Z, out put is "X.Y"
assert openmm.Platform.getOpenMMVersion() == '7.1.1', "openmm.Platform.getOpenMMVersion() = %s" % openmm.Platform.getOpenMMVersion()
# Check git hash
assert openmm.version.git_revision == 'c1a64aaa3b4b71f8dd9648fa724d2548a99d4ced', "openmm.version.git_revision = %s" % openmm.version.git_revision
<commit_msg>[openmm] Update test for 7.2 beta build<commit_after> | #!/usr/bin/env python
from simtk import openmm
# Check major version number
# If Z=0 for version X.Y.Z, out put is "X.Y"
assert openmm.Platform.getOpenMMVersion() == '7.2', "openmm.Platform.getOpenMMVersion() = %s" % openmm.Platform.getOpenMMVersion()
# Check git hash
assert openmm.version.git_revision == '07c1b86c905870afac97bd54dd776433c1b602c2', "openmm.version.git_revision = %s" % openmm.version.git_revision
| #!/usr/bin/env python
from simtk import openmm
# Check major version number
# If Z=0 for version X.Y.Z, out put is "X.Y"
assert openmm.Platform.getOpenMMVersion() == '7.1.1', "openmm.Platform.getOpenMMVersion() = %s" % openmm.Platform.getOpenMMVersion()
# Check git hash
assert openmm.version.git_revision == 'c1a64aaa3b4b71f8dd9648fa724d2548a99d4ced', "openmm.version.git_revision = %s" % openmm.version.git_revision
[openmm] Update test for 7.2 beta build#!/usr/bin/env python
from simtk import openmm
# Check major version number
# If Z=0 for version X.Y.Z, out put is "X.Y"
assert openmm.Platform.getOpenMMVersion() == '7.2', "openmm.Platform.getOpenMMVersion() = %s" % openmm.Platform.getOpenMMVersion()
# Check git hash
assert openmm.version.git_revision == '07c1b86c905870afac97bd54dd776433c1b602c2', "openmm.version.git_revision = %s" % openmm.version.git_revision
| <commit_before>#!/usr/bin/env python
from simtk import openmm
# Check major version number
# If Z=0 for version X.Y.Z, out put is "X.Y"
assert openmm.Platform.getOpenMMVersion() == '7.1.1', "openmm.Platform.getOpenMMVersion() = %s" % openmm.Platform.getOpenMMVersion()
# Check git hash
assert openmm.version.git_revision == 'c1a64aaa3b4b71f8dd9648fa724d2548a99d4ced', "openmm.version.git_revision = %s" % openmm.version.git_revision
<commit_msg>[openmm] Update test for 7.2 beta build<commit_after>#!/usr/bin/env python
from simtk import openmm
# Check major version number
# If Z=0 for version X.Y.Z, out put is "X.Y"
assert openmm.Platform.getOpenMMVersion() == '7.2', "openmm.Platform.getOpenMMVersion() = %s" % openmm.Platform.getOpenMMVersion()
# Check git hash
assert openmm.version.git_revision == '07c1b86c905870afac97bd54dd776433c1b602c2', "openmm.version.git_revision = %s" % openmm.version.git_revision
|
9c184878af2e7dd45bc7ff653979d008a77b0e30 | SimPEG/regularization/__init__.py | SimPEG/regularization/__init__.py | from .base import (
BaseRegularization,
LeastSquaresRegularization,
BaseSimilarityMeasure,
Small,
SmoothDeriv,
SmoothDeriv2,
)
from .regularization_mesh import RegularizationMesh
from .sparse import SparseSmall, SparseDeriv, Sparse
from .pgi import (
PGIsmallness,
PGI,
PGIwithNonlinearRelationshipsSmallness,
PGIwithRelationships,
)
from .tikhonov import Tikhonov
from .cross_gradient import CrossGradient
from .correspondence import LinearCorrespondence
from .jtv import JointTotalVariation
| from ..utils.code_utils import deprecate_class
from .base import (
BaseRegularization,
LeastSquaresRegularization,
BaseSimilarityMeasure,
Small,
SmoothDeriv,
SmoothDeriv2,
)
from .regularization_mesh import RegularizationMesh
from .sparse import SparseSmall, SparseDeriv, Sparse
from .pgi import (
PGIsmallness,
PGI,
PGIwithNonlinearRelationshipsSmallness,
PGIwithRelationships,
)
from .cross_gradient import CrossGradient
from .correspondence import LinearCorrespondence
from .jtv import JointTotalVariation
@deprecate_class(removal_version="0.16.0", future_warn=True)
class SimpleSmall(Small):
def __init__(self, mesh=None, **kwargs):
if "alpha_s" not in kwargs:
kwargs["alpha_s"] = 1.0
super().__init__(mesh=mesh, **kwargs)
@deprecate_class(removal_version="0.16.0", future_warn=True)
class SimpleSmoothDeriv(SmoothDeriv):
def __init__(self, mesh=None, **kwargs):
super().__init__(mesh=mesh, normalized_gradients=True, **kwargs)
@deprecate_class(removal_version="0.16.0", future_warn=True)
class Simple(LeastSquaresRegularization):
def __init__(self, mesh=None, **kwargs):
if "alpha_s" not in kwargs:
kwargs["alpha_s"] = 1.0
super().__init__(mesh=mesh, normalized_gradients=True, **kwargs)
@deprecate_class(removal_version="0.x.0", future_warn=True)
class Tikhonov(LeastSquaresRegularization):
pass
| Move deprecate reg classed to init | Move deprecate reg classed to init
| Python | mit | simpeg/simpeg | from .base import (
BaseRegularization,
LeastSquaresRegularization,
BaseSimilarityMeasure,
Small,
SmoothDeriv,
SmoothDeriv2,
)
from .regularization_mesh import RegularizationMesh
from .sparse import SparseSmall, SparseDeriv, Sparse
from .pgi import (
PGIsmallness,
PGI,
PGIwithNonlinearRelationshipsSmallness,
PGIwithRelationships,
)
from .tikhonov import Tikhonov
from .cross_gradient import CrossGradient
from .correspondence import LinearCorrespondence
from .jtv import JointTotalVariation
Move deprecate reg classed to init | from ..utils.code_utils import deprecate_class
from .base import (
BaseRegularization,
LeastSquaresRegularization,
BaseSimilarityMeasure,
Small,
SmoothDeriv,
SmoothDeriv2,
)
from .regularization_mesh import RegularizationMesh
from .sparse import SparseSmall, SparseDeriv, Sparse
from .pgi import (
PGIsmallness,
PGI,
PGIwithNonlinearRelationshipsSmallness,
PGIwithRelationships,
)
from .cross_gradient import CrossGradient
from .correspondence import LinearCorrespondence
from .jtv import JointTotalVariation
@deprecate_class(removal_version="0.16.0", future_warn=True)
class SimpleSmall(Small):
def __init__(self, mesh=None, **kwargs):
if "alpha_s" not in kwargs:
kwargs["alpha_s"] = 1.0
super().__init__(mesh=mesh, **kwargs)
@deprecate_class(removal_version="0.16.0", future_warn=True)
class SimpleSmoothDeriv(SmoothDeriv):
def __init__(self, mesh=None, **kwargs):
super().__init__(mesh=mesh, normalized_gradients=True, **kwargs)
@deprecate_class(removal_version="0.16.0", future_warn=True)
class Simple(LeastSquaresRegularization):
def __init__(self, mesh=None, **kwargs):
if "alpha_s" not in kwargs:
kwargs["alpha_s"] = 1.0
super().__init__(mesh=mesh, normalized_gradients=True, **kwargs)
@deprecate_class(removal_version="0.x.0", future_warn=True)
class Tikhonov(LeastSquaresRegularization):
pass
| <commit_before>from .base import (
BaseRegularization,
LeastSquaresRegularization,
BaseSimilarityMeasure,
Small,
SmoothDeriv,
SmoothDeriv2,
)
from .regularization_mesh import RegularizationMesh
from .sparse import SparseSmall, SparseDeriv, Sparse
from .pgi import (
PGIsmallness,
PGI,
PGIwithNonlinearRelationshipsSmallness,
PGIwithRelationships,
)
from .tikhonov import Tikhonov
from .cross_gradient import CrossGradient
from .correspondence import LinearCorrespondence
from .jtv import JointTotalVariation
<commit_msg>Move deprecate reg classed to init<commit_after> | from ..utils.code_utils import deprecate_class
from .base import (
BaseRegularization,
LeastSquaresRegularization,
BaseSimilarityMeasure,
Small,
SmoothDeriv,
SmoothDeriv2,
)
from .regularization_mesh import RegularizationMesh
from .sparse import SparseSmall, SparseDeriv, Sparse
from .pgi import (
PGIsmallness,
PGI,
PGIwithNonlinearRelationshipsSmallness,
PGIwithRelationships,
)
from .cross_gradient import CrossGradient
from .correspondence import LinearCorrespondence
from .jtv import JointTotalVariation
@deprecate_class(removal_version="0.16.0", future_warn=True)
class SimpleSmall(Small):
def __init__(self, mesh=None, **kwargs):
if "alpha_s" not in kwargs:
kwargs["alpha_s"] = 1.0
super().__init__(mesh=mesh, **kwargs)
@deprecate_class(removal_version="0.16.0", future_warn=True)
class SimpleSmoothDeriv(SmoothDeriv):
def __init__(self, mesh=None, **kwargs):
super().__init__(mesh=mesh, normalized_gradients=True, **kwargs)
@deprecate_class(removal_version="0.16.0", future_warn=True)
class Simple(LeastSquaresRegularization):
def __init__(self, mesh=None, **kwargs):
if "alpha_s" not in kwargs:
kwargs["alpha_s"] = 1.0
super().__init__(mesh=mesh, normalized_gradients=True, **kwargs)
@deprecate_class(removal_version="0.x.0", future_warn=True)
class Tikhonov(LeastSquaresRegularization):
pass
| from .base import (
BaseRegularization,
LeastSquaresRegularization,
BaseSimilarityMeasure,
Small,
SmoothDeriv,
SmoothDeriv2,
)
from .regularization_mesh import RegularizationMesh
from .sparse import SparseSmall, SparseDeriv, Sparse
from .pgi import (
PGIsmallness,
PGI,
PGIwithNonlinearRelationshipsSmallness,
PGIwithRelationships,
)
from .tikhonov import Tikhonov
from .cross_gradient import CrossGradient
from .correspondence import LinearCorrespondence
from .jtv import JointTotalVariation
Move deprecate reg classed to initfrom ..utils.code_utils import deprecate_class
from .base import (
BaseRegularization,
LeastSquaresRegularization,
BaseSimilarityMeasure,
Small,
SmoothDeriv,
SmoothDeriv2,
)
from .regularization_mesh import RegularizationMesh
from .sparse import SparseSmall, SparseDeriv, Sparse
from .pgi import (
PGIsmallness,
PGI,
PGIwithNonlinearRelationshipsSmallness,
PGIwithRelationships,
)
from .cross_gradient import CrossGradient
from .correspondence import LinearCorrespondence
from .jtv import JointTotalVariation
@deprecate_class(removal_version="0.16.0", future_warn=True)
class SimpleSmall(Small):
def __init__(self, mesh=None, **kwargs):
if "alpha_s" not in kwargs:
kwargs["alpha_s"] = 1.0
super().__init__(mesh=mesh, **kwargs)
@deprecate_class(removal_version="0.16.0", future_warn=True)
class SimpleSmoothDeriv(SmoothDeriv):
def __init__(self, mesh=None, **kwargs):
super().__init__(mesh=mesh, normalized_gradients=True, **kwargs)
@deprecate_class(removal_version="0.16.0", future_warn=True)
class Simple(LeastSquaresRegularization):
def __init__(self, mesh=None, **kwargs):
if "alpha_s" not in kwargs:
kwargs["alpha_s"] = 1.0
super().__init__(mesh=mesh, normalized_gradients=True, **kwargs)
@deprecate_class(removal_version="0.x.0", future_warn=True)
class Tikhonov(LeastSquaresRegularization):
pass
| <commit_before>from .base import (
BaseRegularization,
LeastSquaresRegularization,
BaseSimilarityMeasure,
Small,
SmoothDeriv,
SmoothDeriv2,
)
from .regularization_mesh import RegularizationMesh
from .sparse import SparseSmall, SparseDeriv, Sparse
from .pgi import (
PGIsmallness,
PGI,
PGIwithNonlinearRelationshipsSmallness,
PGIwithRelationships,
)
from .tikhonov import Tikhonov
from .cross_gradient import CrossGradient
from .correspondence import LinearCorrespondence
from .jtv import JointTotalVariation
<commit_msg>Move deprecate reg classed to init<commit_after>from ..utils.code_utils import deprecate_class
from .base import (
BaseRegularization,
LeastSquaresRegularization,
BaseSimilarityMeasure,
Small,
SmoothDeriv,
SmoothDeriv2,
)
from .regularization_mesh import RegularizationMesh
from .sparse import SparseSmall, SparseDeriv, Sparse
from .pgi import (
PGIsmallness,
PGI,
PGIwithNonlinearRelationshipsSmallness,
PGIwithRelationships,
)
from .cross_gradient import CrossGradient
from .correspondence import LinearCorrespondence
from .jtv import JointTotalVariation
@deprecate_class(removal_version="0.16.0", future_warn=True)
class SimpleSmall(Small):
def __init__(self, mesh=None, **kwargs):
if "alpha_s" not in kwargs:
kwargs["alpha_s"] = 1.0
super().__init__(mesh=mesh, **kwargs)
@deprecate_class(removal_version="0.16.0", future_warn=True)
class SimpleSmoothDeriv(SmoothDeriv):
def __init__(self, mesh=None, **kwargs):
super().__init__(mesh=mesh, normalized_gradients=True, **kwargs)
@deprecate_class(removal_version="0.16.0", future_warn=True)
class Simple(LeastSquaresRegularization):
def __init__(self, mesh=None, **kwargs):
if "alpha_s" not in kwargs:
kwargs["alpha_s"] = 1.0
super().__init__(mesh=mesh, normalized_gradients=True, **kwargs)
@deprecate_class(removal_version="0.x.0", future_warn=True)
class Tikhonov(LeastSquaresRegularization):
pass
|
f9b6dbc958251050c6170587d5a205350252329c | samples/ucrmain.py | samples/ucrmain.py | """
Script for performing queries on large time series by using UCR ED and DTW algs.
"""
from time import time
import blaze
from blaze.ts.ucr_dtw import ucr
# Convert txt file into Blaze native format
def convert(filetxt, storage):
import os.path
if not os.path.exists(storage):
blaze.Array(np.loadtxt(filetxt),
params=blaze.params(storage=storage))
# Make sure that data is converted into a persistent Blaze array
convert("Data.txt", "Data")
convert("Query.txt", "Query")
convert("Query2.txt", "Query2")
t0 = time()
# Open Blaze arrays on-disk (will not be loaded in memory)
data = blaze.open("Data")
query = blaze.open("Query")
query2 = blaze.open("Query2")
print "Total Blaze arrays open time :", round(time()-t0, 4)
t0 = time()
# Do different searches using ED/DTW with native Blaze arrays
#loc, dist = ucr.ed(data, query, 128)
loc, dist = ucr.dtw(data, query, 0.1, 128, verbose=False)
#loc, dist = ucr.dtw(data, query2, 0.1, 128)
print "Location : ", loc
print "Distance : ", dist
print "Data Scanned : ", data.size
print "Total Execution Time :", round(time()-t0, 4)
| """
Script for performing queries on large time series by using UCR ED and DTW algs.
"""
from time import time
import blaze
from blaze.ts.ucr_dtw import ucr
import numpy as np
# Convert txt file into Blaze native format
def convert(filetxt, storage):
import os.path
if not os.path.exists(storage):
blaze.Array(np.loadtxt(filetxt),
params=blaze.params(storage=storage))
# Make sure that data is converted into a persistent Blaze array
convert("Data.txt", "Data")
convert("Query.txt", "Query")
convert("Query2.txt", "Query2")
t0 = time()
# Open Blaze arrays on-disk (will not be loaded in memory)
data = blaze.open("Data")
query = blaze.open("Query")
query2 = blaze.open("Query2")
print "Total Blaze arrays open time :", round(time()-t0, 4)
t0 = time()
# Do different searches using ED/DTW with native Blaze arrays
#loc, dist = ucr.ed(data, query, 128)
loc, dist = ucr.dtw(data, query, 0.1, 128, verbose=False)
#loc, dist = ucr.dtw(data, query2, 0.1, 128)
print "Location : ", loc
print "Distance : ", dist
print "Data Scanned : ", data.size
print "Total Execution Time :", round(time()-t0, 4)
| Fix missing numpy import in ucr test | Fix missing numpy import in ucr test
| Python | bsd-2-clause | seibert/blaze-core,seibert/blaze-core,seibert/blaze-core,seibert/blaze-core,seibert/blaze-core | """
Script for performing queries on large time series by using UCR ED and DTW algs.
"""
from time import time
import blaze
from blaze.ts.ucr_dtw import ucr
# Convert txt file into Blaze native format
def convert(filetxt, storage):
import os.path
if not os.path.exists(storage):
blaze.Array(np.loadtxt(filetxt),
params=blaze.params(storage=storage))
# Make sure that data is converted into a persistent Blaze array
convert("Data.txt", "Data")
convert("Query.txt", "Query")
convert("Query2.txt", "Query2")
t0 = time()
# Open Blaze arrays on-disk (will not be loaded in memory)
data = blaze.open("Data")
query = blaze.open("Query")
query2 = blaze.open("Query2")
print "Total Blaze arrays open time :", round(time()-t0, 4)
t0 = time()
# Do different searches using ED/DTW with native Blaze arrays
#loc, dist = ucr.ed(data, query, 128)
loc, dist = ucr.dtw(data, query, 0.1, 128, verbose=False)
#loc, dist = ucr.dtw(data, query2, 0.1, 128)
print "Location : ", loc
print "Distance : ", dist
print "Data Scanned : ", data.size
print "Total Execution Time :", round(time()-t0, 4)
Fix missing numpy import in ucr test | """
Script for performing queries on large time series by using UCR ED and DTW algs.
"""
from time import time
import blaze
from blaze.ts.ucr_dtw import ucr
import numpy as np
# Convert txt file into Blaze native format
def convert(filetxt, storage):
import os.path
if not os.path.exists(storage):
blaze.Array(np.loadtxt(filetxt),
params=blaze.params(storage=storage))
# Make sure that data is converted into a persistent Blaze array
convert("Data.txt", "Data")
convert("Query.txt", "Query")
convert("Query2.txt", "Query2")
t0 = time()
# Open Blaze arrays on-disk (will not be loaded in memory)
data = blaze.open("Data")
query = blaze.open("Query")
query2 = blaze.open("Query2")
print "Total Blaze arrays open time :", round(time()-t0, 4)
t0 = time()
# Do different searches using ED/DTW with native Blaze arrays
#loc, dist = ucr.ed(data, query, 128)
loc, dist = ucr.dtw(data, query, 0.1, 128, verbose=False)
#loc, dist = ucr.dtw(data, query2, 0.1, 128)
print "Location : ", loc
print "Distance : ", dist
print "Data Scanned : ", data.size
print "Total Execution Time :", round(time()-t0, 4)
| <commit_before>"""
Script for performing queries on large time series by using UCR ED and DTW algs.
"""
from time import time
import blaze
from blaze.ts.ucr_dtw import ucr
# Convert txt file into Blaze native format
def convert(filetxt, storage):
import os.path
if not os.path.exists(storage):
blaze.Array(np.loadtxt(filetxt),
params=blaze.params(storage=storage))
# Make sure that data is converted into a persistent Blaze array
convert("Data.txt", "Data")
convert("Query.txt", "Query")
convert("Query2.txt", "Query2")
t0 = time()
# Open Blaze arrays on-disk (will not be loaded in memory)
data = blaze.open("Data")
query = blaze.open("Query")
query2 = blaze.open("Query2")
print "Total Blaze arrays open time :", round(time()-t0, 4)
t0 = time()
# Do different searches using ED/DTW with native Blaze arrays
#loc, dist = ucr.ed(data, query, 128)
loc, dist = ucr.dtw(data, query, 0.1, 128, verbose=False)
#loc, dist = ucr.dtw(data, query2, 0.1, 128)
print "Location : ", loc
print "Distance : ", dist
print "Data Scanned : ", data.size
print "Total Execution Time :", round(time()-t0, 4)
<commit_msg>Fix missing numpy import in ucr test<commit_after> | """
Script for performing queries on large time series by using UCR ED and DTW algs.
"""
from time import time
import blaze
from blaze.ts.ucr_dtw import ucr
import numpy as np
# Convert txt file into Blaze native format
def convert(filetxt, storage):
import os.path
if not os.path.exists(storage):
blaze.Array(np.loadtxt(filetxt),
params=blaze.params(storage=storage))
# Make sure that data is converted into a persistent Blaze array
convert("Data.txt", "Data")
convert("Query.txt", "Query")
convert("Query2.txt", "Query2")
t0 = time()
# Open Blaze arrays on-disk (will not be loaded in memory)
data = blaze.open("Data")
query = blaze.open("Query")
query2 = blaze.open("Query2")
print "Total Blaze arrays open time :", round(time()-t0, 4)
t0 = time()
# Do different searches using ED/DTW with native Blaze arrays
#loc, dist = ucr.ed(data, query, 128)
loc, dist = ucr.dtw(data, query, 0.1, 128, verbose=False)
#loc, dist = ucr.dtw(data, query2, 0.1, 128)
print "Location : ", loc
print "Distance : ", dist
print "Data Scanned : ", data.size
print "Total Execution Time :", round(time()-t0, 4)
| """
Script for performing queries on large time series by using UCR ED and DTW algs.
"""
from time import time
import blaze
from blaze.ts.ucr_dtw import ucr
# Convert txt file into Blaze native format
def convert(filetxt, storage):
import os.path
if not os.path.exists(storage):
blaze.Array(np.loadtxt(filetxt),
params=blaze.params(storage=storage))
# Make sure that data is converted into a persistent Blaze array
convert("Data.txt", "Data")
convert("Query.txt", "Query")
convert("Query2.txt", "Query2")
t0 = time()
# Open Blaze arrays on-disk (will not be loaded in memory)
data = blaze.open("Data")
query = blaze.open("Query")
query2 = blaze.open("Query2")
print "Total Blaze arrays open time :", round(time()-t0, 4)
t0 = time()
# Do different searches using ED/DTW with native Blaze arrays
#loc, dist = ucr.ed(data, query, 128)
loc, dist = ucr.dtw(data, query, 0.1, 128, verbose=False)
#loc, dist = ucr.dtw(data, query2, 0.1, 128)
print "Location : ", loc
print "Distance : ", dist
print "Data Scanned : ", data.size
print "Total Execution Time :", round(time()-t0, 4)
Fix missing numpy import in ucr test"""
Script for performing queries on large time series by using UCR ED and DTW algs.
"""
from time import time
import blaze
from blaze.ts.ucr_dtw import ucr
import numpy as np
# Convert txt file into Blaze native format
def convert(filetxt, storage):
import os.path
if not os.path.exists(storage):
blaze.Array(np.loadtxt(filetxt),
params=blaze.params(storage=storage))
# Make sure that data is converted into a persistent Blaze array
convert("Data.txt", "Data")
convert("Query.txt", "Query")
convert("Query2.txt", "Query2")
t0 = time()
# Open Blaze arrays on-disk (will not be loaded in memory)
data = blaze.open("Data")
query = blaze.open("Query")
query2 = blaze.open("Query2")
print "Total Blaze arrays open time :", round(time()-t0, 4)
t0 = time()
# Do different searches using ED/DTW with native Blaze arrays
#loc, dist = ucr.ed(data, query, 128)
loc, dist = ucr.dtw(data, query, 0.1, 128, verbose=False)
#loc, dist = ucr.dtw(data, query2, 0.1, 128)
print "Location : ", loc
print "Distance : ", dist
print "Data Scanned : ", data.size
print "Total Execution Time :", round(time()-t0, 4)
| <commit_before>"""
Script for performing queries on large time series by using UCR ED and DTW algs.
"""
from time import time
import blaze
from blaze.ts.ucr_dtw import ucr
# Convert txt file into Blaze native format
def convert(filetxt, storage):
import os.path
if not os.path.exists(storage):
blaze.Array(np.loadtxt(filetxt),
params=blaze.params(storage=storage))
# Make sure that data is converted into a persistent Blaze array
convert("Data.txt", "Data")
convert("Query.txt", "Query")
convert("Query2.txt", "Query2")
t0 = time()
# Open Blaze arrays on-disk (will not be loaded in memory)
data = blaze.open("Data")
query = blaze.open("Query")
query2 = blaze.open("Query2")
print "Total Blaze arrays open time :", round(time()-t0, 4)
t0 = time()
# Do different searches using ED/DTW with native Blaze arrays
#loc, dist = ucr.ed(data, query, 128)
loc, dist = ucr.dtw(data, query, 0.1, 128, verbose=False)
#loc, dist = ucr.dtw(data, query2, 0.1, 128)
print "Location : ", loc
print "Distance : ", dist
print "Data Scanned : ", data.size
print "Total Execution Time :", round(time()-t0, 4)
<commit_msg>Fix missing numpy import in ucr test<commit_after>"""
Script for performing queries on large time series by using UCR ED and DTW algs.
"""
from time import time
import blaze
from blaze.ts.ucr_dtw import ucr
import numpy as np
# Convert txt file into Blaze native format
def convert(filetxt, storage):
import os.path
if not os.path.exists(storage):
blaze.Array(np.loadtxt(filetxt),
params=blaze.params(storage=storage))
# Make sure that data is converted into a persistent Blaze array
convert("Data.txt", "Data")
convert("Query.txt", "Query")
convert("Query2.txt", "Query2")
t0 = time()
# Open Blaze arrays on-disk (will not be loaded in memory)
data = blaze.open("Data")
query = blaze.open("Query")
query2 = blaze.open("Query2")
print "Total Blaze arrays open time :", round(time()-t0, 4)
t0 = time()
# Do different searches using ED/DTW with native Blaze arrays
#loc, dist = ucr.ed(data, query, 128)
loc, dist = ucr.dtw(data, query, 0.1, 128, verbose=False)
#loc, dist = ucr.dtw(data, query2, 0.1, 128)
print "Location : ", loc
print "Distance : ", dist
print "Data Scanned : ", data.size
print "Total Execution Time :", round(time()-t0, 4)
|
29d24fb5e484e552b12778268d249b62390408be | webserver/urls.py | webserver/urls.py | from django.conf.urls.defaults import patterns, url, include
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns(
'',
# Webserver urls
url(r'^', include('webserver.home.urls')),
url(r'^', include('webserver.profiles.urls')),
url(r'^', include('webserver.codemanagement.urls')),
# Competition
url(r'^', include('competition.urls')),
# Django AllAuth
url(r'^accounts/', include('allauth.urls')),
# Zinnia Blog
url(r'^weblog/', include('zinnia.urls')),
url(r'^comments/', include('django.contrib.comments.urls')),
# Django Admin
url(r'^admin/', include(admin.site.urls)),
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin_tools/', include('admin_tools.urls')),
)
# Flat pages
urlpatterns += patterns(
'django.contrib.flatpages.views',
url(r'^(?P<url>.*)$', 'flatpage'),
)
if settings.DEBUG:
urlpatterns += patterns(
'',
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT}),
url(r'^static/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.STATIC_ROOT}),
)
| from django.conf.urls.defaults import patterns, url, include
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns(
'',
# Webserver urls
url(r'^', include('webserver.home.urls')),
url(r'^', include('webserver.profiles.urls')),
url(r'^', include('webserver.codemanagement.urls')),
# Competition
url(r'^', include('competition.urls')),
# Django AllAuth
url(r'^accounts/', include('allauth.urls')),
# Zinnia Blog
url(r'^weblog/', include('zinnia.urls')),
url(r'^comments/', include('django.contrib.comments.urls')),
# Django Admin
url(r'^admin/', include(admin.site.urls)),
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin_tools/', include('admin_tools.urls')),
)
if settings.DEBUG:
urlpatterns += patterns(
'',
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT,'show_indexes':True}),
url(r'^static/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.STATIC_ROOT}),
)
# Flat pages
urlpatterns += patterns(
'django.contrib.flatpages.views',
url(r'^(?P<url>.*)$', 'flatpage'),
)
| Fix that makes the media uploads work correctly. | Fix that makes the media uploads work correctly.
| Python | bsd-3-clause | siggame/webserver,siggame/webserver,siggame/webserver | from django.conf.urls.defaults import patterns, url, include
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns(
'',
# Webserver urls
url(r'^', include('webserver.home.urls')),
url(r'^', include('webserver.profiles.urls')),
url(r'^', include('webserver.codemanagement.urls')),
# Competition
url(r'^', include('competition.urls')),
# Django AllAuth
url(r'^accounts/', include('allauth.urls')),
# Zinnia Blog
url(r'^weblog/', include('zinnia.urls')),
url(r'^comments/', include('django.contrib.comments.urls')),
# Django Admin
url(r'^admin/', include(admin.site.urls)),
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin_tools/', include('admin_tools.urls')),
)
# Flat pages
urlpatterns += patterns(
'django.contrib.flatpages.views',
url(r'^(?P<url>.*)$', 'flatpage'),
)
if settings.DEBUG:
urlpatterns += patterns(
'',
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT}),
url(r'^static/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.STATIC_ROOT}),
)
Fix that makes the media uploads work correctly. | from django.conf.urls.defaults import patterns, url, include
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns(
'',
# Webserver urls
url(r'^', include('webserver.home.urls')),
url(r'^', include('webserver.profiles.urls')),
url(r'^', include('webserver.codemanagement.urls')),
# Competition
url(r'^', include('competition.urls')),
# Django AllAuth
url(r'^accounts/', include('allauth.urls')),
# Zinnia Blog
url(r'^weblog/', include('zinnia.urls')),
url(r'^comments/', include('django.contrib.comments.urls')),
# Django Admin
url(r'^admin/', include(admin.site.urls)),
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin_tools/', include('admin_tools.urls')),
)
if settings.DEBUG:
urlpatterns += patterns(
'',
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT,'show_indexes':True}),
url(r'^static/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.STATIC_ROOT}),
)
# Flat pages
urlpatterns += patterns(
'django.contrib.flatpages.views',
url(r'^(?P<url>.*)$', 'flatpage'),
)
| <commit_before>from django.conf.urls.defaults import patterns, url, include
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns(
'',
# Webserver urls
url(r'^', include('webserver.home.urls')),
url(r'^', include('webserver.profiles.urls')),
url(r'^', include('webserver.codemanagement.urls')),
# Competition
url(r'^', include('competition.urls')),
# Django AllAuth
url(r'^accounts/', include('allauth.urls')),
# Zinnia Blog
url(r'^weblog/', include('zinnia.urls')),
url(r'^comments/', include('django.contrib.comments.urls')),
# Django Admin
url(r'^admin/', include(admin.site.urls)),
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin_tools/', include('admin_tools.urls')),
)
# Flat pages
urlpatterns += patterns(
'django.contrib.flatpages.views',
url(r'^(?P<url>.*)$', 'flatpage'),
)
if settings.DEBUG:
urlpatterns += patterns(
'',
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT}),
url(r'^static/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.STATIC_ROOT}),
)
<commit_msg>Fix that makes the media uploads work correctly.<commit_after> | from django.conf.urls.defaults import patterns, url, include
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns(
'',
# Webserver urls
url(r'^', include('webserver.home.urls')),
url(r'^', include('webserver.profiles.urls')),
url(r'^', include('webserver.codemanagement.urls')),
# Competition
url(r'^', include('competition.urls')),
# Django AllAuth
url(r'^accounts/', include('allauth.urls')),
# Zinnia Blog
url(r'^weblog/', include('zinnia.urls')),
url(r'^comments/', include('django.contrib.comments.urls')),
# Django Admin
url(r'^admin/', include(admin.site.urls)),
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin_tools/', include('admin_tools.urls')),
)
if settings.DEBUG:
urlpatterns += patterns(
'',
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT,'show_indexes':True}),
url(r'^static/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.STATIC_ROOT}),
)
# Flat pages
urlpatterns += patterns(
'django.contrib.flatpages.views',
url(r'^(?P<url>.*)$', 'flatpage'),
)
| from django.conf.urls.defaults import patterns, url, include
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns(
'',
# Webserver urls
url(r'^', include('webserver.home.urls')),
url(r'^', include('webserver.profiles.urls')),
url(r'^', include('webserver.codemanagement.urls')),
# Competition
url(r'^', include('competition.urls')),
# Django AllAuth
url(r'^accounts/', include('allauth.urls')),
# Zinnia Blog
url(r'^weblog/', include('zinnia.urls')),
url(r'^comments/', include('django.contrib.comments.urls')),
# Django Admin
url(r'^admin/', include(admin.site.urls)),
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin_tools/', include('admin_tools.urls')),
)
# Flat pages
urlpatterns += patterns(
'django.contrib.flatpages.views',
url(r'^(?P<url>.*)$', 'flatpage'),
)
if settings.DEBUG:
urlpatterns += patterns(
'',
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT}),
url(r'^static/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.STATIC_ROOT}),
)
Fix that makes the media uploads work correctly.from django.conf.urls.defaults import patterns, url, include
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns(
'',
# Webserver urls
url(r'^', include('webserver.home.urls')),
url(r'^', include('webserver.profiles.urls')),
url(r'^', include('webserver.codemanagement.urls')),
# Competition
url(r'^', include('competition.urls')),
# Django AllAuth
url(r'^accounts/', include('allauth.urls')),
# Zinnia Blog
url(r'^weblog/', include('zinnia.urls')),
url(r'^comments/', include('django.contrib.comments.urls')),
# Django Admin
url(r'^admin/', include(admin.site.urls)),
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin_tools/', include('admin_tools.urls')),
)
if settings.DEBUG:
urlpatterns += patterns(
'',
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT,'show_indexes':True}),
url(r'^static/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.STATIC_ROOT}),
)
# Flat pages
urlpatterns += patterns(
'django.contrib.flatpages.views',
url(r'^(?P<url>.*)$', 'flatpage'),
)
| <commit_before>from django.conf.urls.defaults import patterns, url, include
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns(
'',
# Webserver urls
url(r'^', include('webserver.home.urls')),
url(r'^', include('webserver.profiles.urls')),
url(r'^', include('webserver.codemanagement.urls')),
# Competition
url(r'^', include('competition.urls')),
# Django AllAuth
url(r'^accounts/', include('allauth.urls')),
# Zinnia Blog
url(r'^weblog/', include('zinnia.urls')),
url(r'^comments/', include('django.contrib.comments.urls')),
# Django Admin
url(r'^admin/', include(admin.site.urls)),
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin_tools/', include('admin_tools.urls')),
)
# Flat pages
urlpatterns += patterns(
'django.contrib.flatpages.views',
url(r'^(?P<url>.*)$', 'flatpage'),
)
if settings.DEBUG:
urlpatterns += patterns(
'',
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT}),
url(r'^static/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.STATIC_ROOT}),
)
<commit_msg>Fix that makes the media uploads work correctly.<commit_after>from django.conf.urls.defaults import patterns, url, include
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns(
'',
# Webserver urls
url(r'^', include('webserver.home.urls')),
url(r'^', include('webserver.profiles.urls')),
url(r'^', include('webserver.codemanagement.urls')),
# Competition
url(r'^', include('competition.urls')),
# Django AllAuth
url(r'^accounts/', include('allauth.urls')),
# Zinnia Blog
url(r'^weblog/', include('zinnia.urls')),
url(r'^comments/', include('django.contrib.comments.urls')),
# Django Admin
url(r'^admin/', include(admin.site.urls)),
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin_tools/', include('admin_tools.urls')),
)
if settings.DEBUG:
urlpatterns += patterns(
'',
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT,'show_indexes':True}),
url(r'^static/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.STATIC_ROOT}),
)
# Flat pages
urlpatterns += patterns(
'django.contrib.flatpages.views',
url(r'^(?P<url>.*)$', 'flatpage'),
)
|
d00aea75e0f4e6ba74a2ccf57d02a0ef912d17ac | db/TableConfig.py | db/TableConfig.py | {
PDBConst.Name: "config",
PDBConst.Columns: [
{
PDBConst.Name: "Name",
PDBConst.Attributes: ["varchar(128)", "not null", "primary key"]
},
{
PDBConst.Name: "Value",
PDBConst.Attributes: ["varchar(128)"]
}],
PDBConst.Initials: [
{"Name": "'version'", "Value": "'4.1'"}
]
}
| {
PDBConst.Name: "config",
PDBConst.Columns: [
{
PDBConst.Name: "Name",
PDBConst.Attributes: ["varchar(128)", "not null", "primary key"]
},
{
PDBConst.Name: "Value",
PDBConst.Attributes: ["varchar(128)"]
}],
PDBConst.Initials: [
{"Name": "'version'", "Value": "'4.2'"}
]
}
| Update DB to v4.2 on note pdf support | Update DB to v4.2 on note pdf support
| Python | mit | eddiedb6/ej,eddiedb6/ej,eddiedb6/ej | {
PDBConst.Name: "config",
PDBConst.Columns: [
{
PDBConst.Name: "Name",
PDBConst.Attributes: ["varchar(128)", "not null", "primary key"]
},
{
PDBConst.Name: "Value",
PDBConst.Attributes: ["varchar(128)"]
}],
PDBConst.Initials: [
{"Name": "'version'", "Value": "'4.1'"}
]
}
Update DB to v4.2 on note pdf support | {
PDBConst.Name: "config",
PDBConst.Columns: [
{
PDBConst.Name: "Name",
PDBConst.Attributes: ["varchar(128)", "not null", "primary key"]
},
{
PDBConst.Name: "Value",
PDBConst.Attributes: ["varchar(128)"]
}],
PDBConst.Initials: [
{"Name": "'version'", "Value": "'4.2'"}
]
}
| <commit_before>{
PDBConst.Name: "config",
PDBConst.Columns: [
{
PDBConst.Name: "Name",
PDBConst.Attributes: ["varchar(128)", "not null", "primary key"]
},
{
PDBConst.Name: "Value",
PDBConst.Attributes: ["varchar(128)"]
}],
PDBConst.Initials: [
{"Name": "'version'", "Value": "'4.1'"}
]
}
<commit_msg>Update DB to v4.2 on note pdf support<commit_after> | {
PDBConst.Name: "config",
PDBConst.Columns: [
{
PDBConst.Name: "Name",
PDBConst.Attributes: ["varchar(128)", "not null", "primary key"]
},
{
PDBConst.Name: "Value",
PDBConst.Attributes: ["varchar(128)"]
}],
PDBConst.Initials: [
{"Name": "'version'", "Value": "'4.2'"}
]
}
| {
PDBConst.Name: "config",
PDBConst.Columns: [
{
PDBConst.Name: "Name",
PDBConst.Attributes: ["varchar(128)", "not null", "primary key"]
},
{
PDBConst.Name: "Value",
PDBConst.Attributes: ["varchar(128)"]
}],
PDBConst.Initials: [
{"Name": "'version'", "Value": "'4.1'"}
]
}
Update DB to v4.2 on note pdf support{
PDBConst.Name: "config",
PDBConst.Columns: [
{
PDBConst.Name: "Name",
PDBConst.Attributes: ["varchar(128)", "not null", "primary key"]
},
{
PDBConst.Name: "Value",
PDBConst.Attributes: ["varchar(128)"]
}],
PDBConst.Initials: [
{"Name": "'version'", "Value": "'4.2'"}
]
}
| <commit_before>{
PDBConst.Name: "config",
PDBConst.Columns: [
{
PDBConst.Name: "Name",
PDBConst.Attributes: ["varchar(128)", "not null", "primary key"]
},
{
PDBConst.Name: "Value",
PDBConst.Attributes: ["varchar(128)"]
}],
PDBConst.Initials: [
{"Name": "'version'", "Value": "'4.1'"}
]
}
<commit_msg>Update DB to v4.2 on note pdf support<commit_after>{
PDBConst.Name: "config",
PDBConst.Columns: [
{
PDBConst.Name: "Name",
PDBConst.Attributes: ["varchar(128)", "not null", "primary key"]
},
{
PDBConst.Name: "Value",
PDBConst.Attributes: ["varchar(128)"]
}],
PDBConst.Initials: [
{"Name": "'version'", "Value": "'4.2'"}
]
}
|
bce79a9156f93fd3c9356579bcb9309a66f2bdf2 | pushbullet/filetype.py | pushbullet/filetype.py | def _magic_get_file_type(f, _):
file_type = magic.from_buffer(f.read(1024), mime=True)
f.seek(0)
return file_type.decode('utf-8')
def _guess_file_type(_, filename):
return mimetypes.guess_type(filename)[0]
try:
import magic
except ImportError:
import mimetypes
get_file_type = _guess_file_type
else:
get_file_type = _magic_get_file_type
| def _magic_get_file_type(f, _):
file_type = magic.from_buffer(f.read(1024), mime=True)
f.seek(0)
return maybe_decode(file_type)
def _guess_file_type(_, filename):
return mimetypes.guess_type(filename)[0]
# return str on python3. Don't want to unconditionally
# decode because that results in unicode on python2
def maybe_decode(s):
if str == bytes:
return s
else:
return s.decode('utf-8')
try:
import magic
except ImportError:
import mimetypes
get_file_type = _guess_file_type
else:
get_file_type = _magic_get_file_type
| Return str rather than bytes for the description strings on python3. | Return str rather than bytes for the description strings on python3.
| Python | mit | randomchars/pushbullet.py,kovacsbalu/pushbullet.py,Saturn/pushbullet.py | def _magic_get_file_type(f, _):
file_type = magic.from_buffer(f.read(1024), mime=True)
f.seek(0)
return file_type.decode('utf-8')
def _guess_file_type(_, filename):
return mimetypes.guess_type(filename)[0]
try:
import magic
except ImportError:
import mimetypes
get_file_type = _guess_file_type
else:
get_file_type = _magic_get_file_type
Return str rather than bytes for the description strings on python3. | def _magic_get_file_type(f, _):
file_type = magic.from_buffer(f.read(1024), mime=True)
f.seek(0)
return maybe_decode(file_type)
def _guess_file_type(_, filename):
return mimetypes.guess_type(filename)[0]
# return str on python3. Don't want to unconditionally
# decode because that results in unicode on python2
def maybe_decode(s):
if str == bytes:
return s
else:
return s.decode('utf-8')
try:
import magic
except ImportError:
import mimetypes
get_file_type = _guess_file_type
else:
get_file_type = _magic_get_file_type
| <commit_before>def _magic_get_file_type(f, _):
file_type = magic.from_buffer(f.read(1024), mime=True)
f.seek(0)
return file_type.decode('utf-8')
def _guess_file_type(_, filename):
return mimetypes.guess_type(filename)[0]
try:
import magic
except ImportError:
import mimetypes
get_file_type = _guess_file_type
else:
get_file_type = _magic_get_file_type
<commit_msg>Return str rather than bytes for the description strings on python3.<commit_after> | def _magic_get_file_type(f, _):
file_type = magic.from_buffer(f.read(1024), mime=True)
f.seek(0)
return maybe_decode(file_type)
def _guess_file_type(_, filename):
return mimetypes.guess_type(filename)[0]
# return str on python3. Don't want to unconditionally
# decode because that results in unicode on python2
def maybe_decode(s):
if str == bytes:
return s
else:
return s.decode('utf-8')
try:
import magic
except ImportError:
import mimetypes
get_file_type = _guess_file_type
else:
get_file_type = _magic_get_file_type
| def _magic_get_file_type(f, _):
file_type = magic.from_buffer(f.read(1024), mime=True)
f.seek(0)
return file_type.decode('utf-8')
def _guess_file_type(_, filename):
return mimetypes.guess_type(filename)[0]
try:
import magic
except ImportError:
import mimetypes
get_file_type = _guess_file_type
else:
get_file_type = _magic_get_file_type
Return str rather than bytes for the description strings on python3.def _magic_get_file_type(f, _):
file_type = magic.from_buffer(f.read(1024), mime=True)
f.seek(0)
return maybe_decode(file_type)
def _guess_file_type(_, filename):
return mimetypes.guess_type(filename)[0]
# return str on python3. Don't want to unconditionally
# decode because that results in unicode on python2
def maybe_decode(s):
if str == bytes:
return s
else:
return s.decode('utf-8')
try:
import magic
except ImportError:
import mimetypes
get_file_type = _guess_file_type
else:
get_file_type = _magic_get_file_type
| <commit_before>def _magic_get_file_type(f, _):
file_type = magic.from_buffer(f.read(1024), mime=True)
f.seek(0)
return file_type.decode('utf-8')
def _guess_file_type(_, filename):
return mimetypes.guess_type(filename)[0]
try:
import magic
except ImportError:
import mimetypes
get_file_type = _guess_file_type
else:
get_file_type = _magic_get_file_type
<commit_msg>Return str rather than bytes for the description strings on python3.<commit_after>def _magic_get_file_type(f, _):
file_type = magic.from_buffer(f.read(1024), mime=True)
f.seek(0)
return maybe_decode(file_type)
def _guess_file_type(_, filename):
return mimetypes.guess_type(filename)[0]
# return str on python3. Don't want to unconditionally
# decode because that results in unicode on python2
def maybe_decode(s):
if str == bytes:
return s
else:
return s.decode('utf-8')
try:
import magic
except ImportError:
import mimetypes
get_file_type = _guess_file_type
else:
get_file_type = _magic_get_file_type
|
2a57e5c17115e9c89936e6667985af1a47bf3247 | raiden/utils/typing.py | raiden/utils/typing.py | # -*- coding: utf-8 -*-
from typing import * # NOQA pylint:disable=wildcard-import,unused-wildcard-import
from typing import NewType
T_Address = bytes
Address = NewType('Address', bytes)
T_BlockExpiration = int
BlockExpiration = NewType('BlockExpiration', int)
T_BlockNumber = int
BlockNumber = NewType('BlockNumber', int)
T_BlockTimeout = int
BlockTimeout = NewType('BlockNumber', int)
T_ChannelID = T_Address
ChannelID = NewType('ChannelID', Address)
T_Keccak256 = bytes
Keccak256 = NewType('Keccak256', bytes)
T_Secret = bytes
Secret = NewType('Secret', bytes)
T_Signature = bytes
Signature = NewType('Signature', bytes)
T_TokenAmount = int
TokenAmount = NewType('TokenAmount', int)
| # -*- coding: utf-8 -*-
from typing import * # NOQA pylint:disable=wildcard-import,unused-wildcard-import
from typing import NewType
T_Address = bytes
Address = NewType('Address', bytes)
T_BlockExpiration = int
BlockExpiration = NewType('BlockExpiration', int)
T_BlockNumber = int
BlockNumber = NewType('BlockNumber', int)
T_BlockTimeout = int
BlockTimeout = NewType('BlockNumber', int)
T_ChannelID = bytes
ChannelID = NewType('ChannelID', bytes)
T_Keccak256 = bytes
Keccak256 = NewType('Keccak256', bytes)
T_Secret = bytes
Secret = NewType('Secret', bytes)
T_Signature = bytes
Signature = NewType('Signature', bytes)
T_TokenAmount = int
TokenAmount = NewType('TokenAmount', int)
| Fix an oversight in new type definitions | Fix an oversight in new type definitions
| Python | mit | hackaugusto/raiden,hackaugusto/raiden | # -*- coding: utf-8 -*-
from typing import * # NOQA pylint:disable=wildcard-import,unused-wildcard-import
from typing import NewType
T_Address = bytes
Address = NewType('Address', bytes)
T_BlockExpiration = int
BlockExpiration = NewType('BlockExpiration', int)
T_BlockNumber = int
BlockNumber = NewType('BlockNumber', int)
T_BlockTimeout = int
BlockTimeout = NewType('BlockNumber', int)
T_ChannelID = T_Address
ChannelID = NewType('ChannelID', Address)
T_Keccak256 = bytes
Keccak256 = NewType('Keccak256', bytes)
T_Secret = bytes
Secret = NewType('Secret', bytes)
T_Signature = bytes
Signature = NewType('Signature', bytes)
T_TokenAmount = int
TokenAmount = NewType('TokenAmount', int)
Fix an oversight in new type definitions | # -*- coding: utf-8 -*-
from typing import * # NOQA pylint:disable=wildcard-import,unused-wildcard-import
from typing import NewType
T_Address = bytes
Address = NewType('Address', bytes)
T_BlockExpiration = int
BlockExpiration = NewType('BlockExpiration', int)
T_BlockNumber = int
BlockNumber = NewType('BlockNumber', int)
T_BlockTimeout = int
BlockTimeout = NewType('BlockNumber', int)
T_ChannelID = bytes
ChannelID = NewType('ChannelID', bytes)
T_Keccak256 = bytes
Keccak256 = NewType('Keccak256', bytes)
T_Secret = bytes
Secret = NewType('Secret', bytes)
T_Signature = bytes
Signature = NewType('Signature', bytes)
T_TokenAmount = int
TokenAmount = NewType('TokenAmount', int)
| <commit_before># -*- coding: utf-8 -*-
from typing import * # NOQA pylint:disable=wildcard-import,unused-wildcard-import
from typing import NewType
T_Address = bytes
Address = NewType('Address', bytes)
T_BlockExpiration = int
BlockExpiration = NewType('BlockExpiration', int)
T_BlockNumber = int
BlockNumber = NewType('BlockNumber', int)
T_BlockTimeout = int
BlockTimeout = NewType('BlockNumber', int)
T_ChannelID = T_Address
ChannelID = NewType('ChannelID', Address)
T_Keccak256 = bytes
Keccak256 = NewType('Keccak256', bytes)
T_Secret = bytes
Secret = NewType('Secret', bytes)
T_Signature = bytes
Signature = NewType('Signature', bytes)
T_TokenAmount = int
TokenAmount = NewType('TokenAmount', int)
<commit_msg>Fix an oversight in new type definitions<commit_after> | # -*- coding: utf-8 -*-
from typing import * # NOQA pylint:disable=wildcard-import,unused-wildcard-import
from typing import NewType
T_Address = bytes
Address = NewType('Address', bytes)
T_BlockExpiration = int
BlockExpiration = NewType('BlockExpiration', int)
T_BlockNumber = int
BlockNumber = NewType('BlockNumber', int)
T_BlockTimeout = int
BlockTimeout = NewType('BlockNumber', int)
T_ChannelID = bytes
ChannelID = NewType('ChannelID', bytes)
T_Keccak256 = bytes
Keccak256 = NewType('Keccak256', bytes)
T_Secret = bytes
Secret = NewType('Secret', bytes)
T_Signature = bytes
Signature = NewType('Signature', bytes)
T_TokenAmount = int
TokenAmount = NewType('TokenAmount', int)
| # -*- coding: utf-8 -*-
from typing import * # NOQA pylint:disable=wildcard-import,unused-wildcard-import
from typing import NewType
T_Address = bytes
Address = NewType('Address', bytes)
T_BlockExpiration = int
BlockExpiration = NewType('BlockExpiration', int)
T_BlockNumber = int
BlockNumber = NewType('BlockNumber', int)
T_BlockTimeout = int
BlockTimeout = NewType('BlockNumber', int)
T_ChannelID = T_Address
ChannelID = NewType('ChannelID', Address)
T_Keccak256 = bytes
Keccak256 = NewType('Keccak256', bytes)
T_Secret = bytes
Secret = NewType('Secret', bytes)
T_Signature = bytes
Signature = NewType('Signature', bytes)
T_TokenAmount = int
TokenAmount = NewType('TokenAmount', int)
Fix an oversight in new type definitions# -*- coding: utf-8 -*-
from typing import * # NOQA pylint:disable=wildcard-import,unused-wildcard-import
from typing import NewType
T_Address = bytes
Address = NewType('Address', bytes)
T_BlockExpiration = int
BlockExpiration = NewType('BlockExpiration', int)
T_BlockNumber = int
BlockNumber = NewType('BlockNumber', int)
T_BlockTimeout = int
BlockTimeout = NewType('BlockNumber', int)
T_ChannelID = bytes
ChannelID = NewType('ChannelID', bytes)
T_Keccak256 = bytes
Keccak256 = NewType('Keccak256', bytes)
T_Secret = bytes
Secret = NewType('Secret', bytes)
T_Signature = bytes
Signature = NewType('Signature', bytes)
T_TokenAmount = int
TokenAmount = NewType('TokenAmount', int)
| <commit_before># -*- coding: utf-8 -*-
from typing import * # NOQA pylint:disable=wildcard-import,unused-wildcard-import
from typing import NewType
T_Address = bytes
Address = NewType('Address', bytes)
T_BlockExpiration = int
BlockExpiration = NewType('BlockExpiration', int)
T_BlockNumber = int
BlockNumber = NewType('BlockNumber', int)
T_BlockTimeout = int
BlockTimeout = NewType('BlockNumber', int)
T_ChannelID = T_Address
ChannelID = NewType('ChannelID', Address)
T_Keccak256 = bytes
Keccak256 = NewType('Keccak256', bytes)
T_Secret = bytes
Secret = NewType('Secret', bytes)
T_Signature = bytes
Signature = NewType('Signature', bytes)
T_TokenAmount = int
TokenAmount = NewType('TokenAmount', int)
<commit_msg>Fix an oversight in new type definitions<commit_after># -*- coding: utf-8 -*-
from typing import * # NOQA pylint:disable=wildcard-import,unused-wildcard-import
from typing import NewType
T_Address = bytes
Address = NewType('Address', bytes)
T_BlockExpiration = int
BlockExpiration = NewType('BlockExpiration', int)
T_BlockNumber = int
BlockNumber = NewType('BlockNumber', int)
T_BlockTimeout = int
BlockTimeout = NewType('BlockNumber', int)
T_ChannelID = bytes
ChannelID = NewType('ChannelID', bytes)
T_Keccak256 = bytes
Keccak256 = NewType('Keccak256', bytes)
T_Secret = bytes
Secret = NewType('Secret', bytes)
T_Signature = bytes
Signature = NewType('Signature', bytes)
T_TokenAmount = int
TokenAmount = NewType('TokenAmount', int)
|
525a9fcb14a1f91aa94508ca6dcc362d430d2969 | __openerp__.py | __openerp__.py | {
'name': "Project Logical Framework",
'category': 'Project',
'sequence': 1,
'description': """
Project Logical Framework
=========================
""",
'version': '0.3',
'depends': ['project'],
'data': [
'static/src/xml/create_project.xml',
],
}
| {
'name': "Project Logical Framework",
'author' : 'Stéphane Codazzi @ TeMPO-consulting',
'category': 'Project',
'sequence': 1,
'description': """
Project Logical Framework
=========================
""",
'version': '0.3',
'depends': ['project'],
'data': [
'static/src/xml/create_project.xml',
],
}
| Add o2m between project and logical frameworks lines | Add o2m between project and logical frameworks lines
| Python | mit | stephane-/project_logical_framework | {
'name': "Project Logical Framework",
'category': 'Project',
'sequence': 1,
'description': """
Project Logical Framework
=========================
""",
'version': '0.3',
'depends': ['project'],
'data': [
'static/src/xml/create_project.xml',
],
}
Add o2m between project and logical frameworks lines | {
'name': "Project Logical Framework",
'author' : 'Stéphane Codazzi @ TeMPO-consulting',
'category': 'Project',
'sequence': 1,
'description': """
Project Logical Framework
=========================
""",
'version': '0.3',
'depends': ['project'],
'data': [
'static/src/xml/create_project.xml',
],
}
| <commit_before>{
'name': "Project Logical Framework",
'category': 'Project',
'sequence': 1,
'description': """
Project Logical Framework
=========================
""",
'version': '0.3',
'depends': ['project'],
'data': [
'static/src/xml/create_project.xml',
],
}
<commit_msg>Add o2m between project and logical frameworks lines<commit_after> | {
'name': "Project Logical Framework",
'author' : 'Stéphane Codazzi @ TeMPO-consulting',
'category': 'Project',
'sequence': 1,
'description': """
Project Logical Framework
=========================
""",
'version': '0.3',
'depends': ['project'],
'data': [
'static/src/xml/create_project.xml',
],
}
| {
'name': "Project Logical Framework",
'category': 'Project',
'sequence': 1,
'description': """
Project Logical Framework
=========================
""",
'version': '0.3',
'depends': ['project'],
'data': [
'static/src/xml/create_project.xml',
],
}
Add o2m between project and logical frameworks lines{
'name': "Project Logical Framework",
'author' : 'Stéphane Codazzi @ TeMPO-consulting',
'category': 'Project',
'sequence': 1,
'description': """
Project Logical Framework
=========================
""",
'version': '0.3',
'depends': ['project'],
'data': [
'static/src/xml/create_project.xml',
],
}
| <commit_before>{
'name': "Project Logical Framework",
'category': 'Project',
'sequence': 1,
'description': """
Project Logical Framework
=========================
""",
'version': '0.3',
'depends': ['project'],
'data': [
'static/src/xml/create_project.xml',
],
}
<commit_msg>Add o2m between project and logical frameworks lines<commit_after>{
'name': "Project Logical Framework",
'author' : 'Stéphane Codazzi @ TeMPO-consulting',
'category': 'Project',
'sequence': 1,
'description': """
Project Logical Framework
=========================
""",
'version': '0.3',
'depends': ['project'],
'data': [
'static/src/xml/create_project.xml',
],
}
|
fc203d643aa9a69c835aebee0de9b17851ef7a58 | compose/cli/docker_client.py | compose/cli/docker_client.py | from docker import Client
from docker import tls
import ssl
import os
def docker_client():
"""
Returns a docker-py client configured using environment variables
according to the same logic as the official Docker client.
"""
cert_path = os.environ.get('DOCKER_CERT_PATH', '')
if cert_path == '':
cert_path = os.path.join(os.environ.get('HOME', ''), '.docker')
base_url = os.environ.get('DOCKER_HOST')
tls_config = None
if os.environ.get('DOCKER_TLS_VERIFY', '') != '':
parts = base_url.split('://', 1)
base_url = '%s://%s' % ('https', parts[1])
client_cert = (os.path.join(cert_path, 'cert.pem'), os.path.join(cert_path, 'key.pem'))
ca_cert = os.path.join(cert_path, 'ca.pem')
tls_config = tls.TLSConfig(
ssl_version=ssl.PROTOCOL_TLSv1,
verify=True,
assert_hostname=False,
client_cert=client_cert,
ca_cert=ca_cert,
)
timeout = int(os.environ.get('DOCKER_CLIENT_TIMEOUT', 60))
return Client(base_url=base_url, tls=tls_config, version='1.18', timeout=timeout)
| from docker import Client
from docker import tls
import ssl
import os
def docker_client():
"""
Returns a docker-py client configured using environment variables
according to the same logic as the official Docker client.
"""
cert_path = os.environ.get('DOCKER_CERT_PATH', '')
if cert_path == '':
cert_path = os.path.join(os.environ.get('HOME', ''), '.docker')
base_url = os.environ.get('DOCKER_HOST')
api_version = os.environ.get('COMPOSE_API_VERSION', '1.18')
tls_config = None
if os.environ.get('DOCKER_TLS_VERIFY', '') != '':
parts = base_url.split('://', 1)
base_url = '%s://%s' % ('https', parts[1])
client_cert = (os.path.join(cert_path, 'cert.pem'), os.path.join(cert_path, 'key.pem'))
ca_cert = os.path.join(cert_path, 'ca.pem')
tls_config = tls.TLSConfig(
ssl_version=ssl.PROTOCOL_TLSv1,
verify=True,
assert_hostname=False,
client_cert=client_cert,
ca_cert=ca_cert,
)
timeout = int(os.environ.get('DOCKER_CLIENT_TIMEOUT', 60))
return Client(base_url=base_url, tls=tls_config, version=api_version, timeout=timeout)
| Allow API version specification via env var | Allow API version specification via env var
Hard-coding the API version to '1.18' with the docker-py constructor will
cause the docker-py logic at
https://github.com/docker/docker-py/blob/master/docker/client.py#L143-L146
to always fail, which will cause authentication issues if you're using a
remote daemon using API version 1.19 - regardless of the API version of
the registry.
Allow the user to set the API version via an environment variable. If
the variable is not present, it will still default to '1.18' like it
does today.
Signed-off-by: Reilly Herrewig-Pope <feddfa1b1b567537c5d9b5bb606cac73059ea316@mandiant.com>
| Python | apache-2.0 | jeanpralo/compose,shubheksha/docker.github.io,saada/compose,talolard/compose,joaofnfernandes/docker.github.io,ionrock/compose,iamluc/compose,goloveychuk/compose,vdemeester/compose,menglingwei/denverdino.github.io,qzio/compose,d2bit/compose,docker/docker.github.io,GM-Alex/compose,bdwill/docker.github.io,anweiss/docker.github.io,KalleDK/compose,londoncalling/docker.github.io,jiekechoo/compose,londoncalling/docker.github.io,cgvarela/compose,cgvarela/compose,albers/compose,thaJeztah/compose,gdevillele/docker.github.io,bbirand/compose,au-phiware/compose,ph-One/compose,denverdino/docker.github.io,alexisbellido/docker.github.io,moxiegirl/compose,twitherspoon/compose,mchasal/compose,denverdino/docker.github.io,unodba/compose,vlajos/compose,lmesz/compose,mrfuxi/compose,troy0820/docker.github.io,mdaue/compose,LuisBosquez/docker.github.io,troy0820/docker.github.io,artemkaint/compose,gtrdotmcs/compose,joaofnfernandes/docker.github.io,denverdino/docker.github.io,phiroict/docker,jzwlqx/denverdino.github.io,simonista/compose,jrabbit/compose,docker/docker.github.io,rgbkrk/compose,ionrock/compose,TheDataShed/compose,jorgeLuizChaves/compose,menglingwei/denverdino.github.io,dbdd4us/compose,tiry/compose,shin-/docker.github.io,denverdino/denverdino.github.io,charleswhchan/compose,hoogenm/compose,alexisbellido/docker.github.io,KevinGreene/compose,artemkaint/compose,johnstep/docker.github.io,gdevillele/docker.github.io,danix800/docker.github.io,talolard/compose,d2bit/compose,ggtools/compose,kojiromike/compose,alexandrev/compose,jzwlqx/denverdino.github.io,jeanpralo/compose,funkyfuture/docker-compose,dopry/compose,mnuessler/compose,troy0820/docker.github.io,lmesz/compose,saada/compose,thaJeztah/docker.github.io,alexandrev/compose,sanscontext/docker.github.io,thaJeztah/docker.github.io,mbailey/compose,swoopla/compose,shubheksha/docker.github.io,shin-/compose,aanand/fig,TomasTomecek/compose,screwgoth/compose,joeuo/docker.github.io,dilgerma/compose,VinceBarresi/compose,LuisBosquez/docker.github.io,joaofnfernandes/docker.github.io,denverdino/denverdino.github.io,Dakno/compose,JimGalasyn/docker.github.io,bbirand/compose,TomasTomecek/compose,anweiss/docker.github.io,aanand/fig,browning/compose,mohitsoni/compose,shubheksha/docker.github.io,joaofnfernandes/docker.github.io,viranch/compose,thaJeztah/docker.github.io,BSWANG/denverdino.github.io,joaofnfernandes/docker.github.io,mohitsoni/compose,mnowster/compose,JimGalasyn/docker.github.io,bdwill/docker.github.io,alexisbellido/docker.github.io,BSWANG/denverdino.github.io,michael-k/docker-compose,jonaseck2/compose,simonista/compose,JimGalasyn/docker.github.io,andrewgee/compose,docker-zh/docker.github.io,joeuo/docker.github.io,unodba/compose,mrfuxi/compose,genki/compose,bdwill/docker.github.io,runcom/compose,docker-zh/docker.github.io,sdurrheimer/compose,JimGalasyn/docker.github.io,menglingwei/denverdino.github.io,aduermael/docker.github.io,thaJeztah/compose,andrewgee/compose,dbdd4us/compose,GM-Alex/compose,alunduil/fig,anweiss/docker.github.io,denverdino/denverdino.github.io,mchasal/compose,johnstep/docker.github.io,dnephin/compose,ggtools/compose,BSWANG/denverdino.github.io,aduermael/docker.github.io,browning/compose,bsmr-docker/compose,swoopla/compose,danix800/docker.github.io,mindaugasrukas/compose,jiekechoo/compose,rillig/docker.github.io,bdwill/docker.github.io,ZJaffee/compose,denverdino/denverdino.github.io,dilgerma/compose,docker/docker.github.io,docker-zh/docker.github.io,MSakamaki/compose,JimGalasyn/docker.github.io,denverdino/docker.github.io,danix800/docker.github.io,j-fuentes/compose,londoncalling/docker.github.io,jzwlqx/denverdino.github.io,KevinGreene/compose,dopry/compose,vlajos/compose,jzwlqx/denverdino.github.io,menglingwei/denverdino.github.io,johnstep/docker.github.io,danix800/docker.github.io,tangkun75/compose,mbailey/compose,jonaseck2/compose,anweiss/docker.github.io,denverdino/compose,Dakno/compose,alunduil/fig,kikkomep/compose,londoncalling/docker.github.io,josephpage/compose,jrabbit/compose,xydinesh/compose,dockerhn/compose,docker/docker.github.io,viranch/compose,VinceBarresi/compose,denverdino/docker.github.io,londoncalling/docker.github.io,aduermael/docker.github.io,j-fuentes/compose,charleswhchan/compose,rgbkrk/compose,shin-/compose,denverdino/compose,sanscontext/docker.github.io,goloveychuk/compose,docker-zh/docker.github.io,anweiss/docker.github.io,mark-adams/compose,bsmr-docker/compose,bdwill/docker.github.io,kojiromike/compose,TheDataShed/compose,qzio/compose,mnowster/compose,screwgoth/compose,rillig/docker.github.io,mark-adams/compose,sanscontext/docker.github.io,au-phiware/compose,tangkun75/compose,shubheksha/docker.github.io,gdevillele/docker.github.io,thaJeztah/docker.github.io,nhumrich/compose,johnstep/docker.github.io,iamluc/compose,shin-/docker.github.io,runcom/compose,ChrisChinchilla/compose,moxiegirl/compose,alexisbellido/docker.github.io,jorgeLuizChaves/compose,genki/compose,tiry/compose,denverdino/denverdino.github.io,phiroict/docker,joeuo/docker.github.io,docker-zh/docker.github.io,rillig/docker.github.io,BSWANG/denverdino.github.io,phiroict/docker,phiroict/docker,ph-One/compose,ChrisChinchilla/compose,johnstep/docker.github.io,gdevillele/docker.github.io,vdemeester/compose,shin-/docker.github.io,shin-/docker.github.io,kikkomep/compose,mdaue/compose,dnephin/compose,aduermael/docker.github.io,shin-/docker.github.io,joeuo/docker.github.io,KalleDK/compose,joeuo/docker.github.io,LuisBosquez/docker.github.io,jzwlqx/denverdino.github.io,menglingwei/denverdino.github.io,docker/docker.github.io,funkyfuture/docker-compose,twitherspoon/compose,sanscontext/docker.github.io,alexisbellido/docker.github.io,gdevillele/docker.github.io,amitsaha/compose,schmunk42/compose,troy0820/docker.github.io,gtrdotmcs/compose,mindaugasrukas/compose,hoogenm/compose,mnuessler/compose,josephpage/compose,LuisBosquez/docker.github.io,sanscontext/docker.github.io,xydinesh/compose,rillig/docker.github.io,albers/compose,phiroict/docker,ZJaffee/compose,amitsaha/compose,shubheksha/docker.github.io,dockerhn/compose,nhumrich/compose,thaJeztah/docker.github.io,LuisBosquez/docker.github.io,michael-k/docker-compose,schmunk42/compose,sdurrheimer/compose,MSakamaki/compose,BSWANG/denverdino.github.io | from docker import Client
from docker import tls
import ssl
import os
def docker_client():
"""
Returns a docker-py client configured using environment variables
according to the same logic as the official Docker client.
"""
cert_path = os.environ.get('DOCKER_CERT_PATH', '')
if cert_path == '':
cert_path = os.path.join(os.environ.get('HOME', ''), '.docker')
base_url = os.environ.get('DOCKER_HOST')
tls_config = None
if os.environ.get('DOCKER_TLS_VERIFY', '') != '':
parts = base_url.split('://', 1)
base_url = '%s://%s' % ('https', parts[1])
client_cert = (os.path.join(cert_path, 'cert.pem'), os.path.join(cert_path, 'key.pem'))
ca_cert = os.path.join(cert_path, 'ca.pem')
tls_config = tls.TLSConfig(
ssl_version=ssl.PROTOCOL_TLSv1,
verify=True,
assert_hostname=False,
client_cert=client_cert,
ca_cert=ca_cert,
)
timeout = int(os.environ.get('DOCKER_CLIENT_TIMEOUT', 60))
return Client(base_url=base_url, tls=tls_config, version='1.18', timeout=timeout)
Allow API version specification via env var
Hard-coding the API version to '1.18' with the docker-py constructor will
cause the docker-py logic at
https://github.com/docker/docker-py/blob/master/docker/client.py#L143-L146
to always fail, which will cause authentication issues if you're using a
remote daemon using API version 1.19 - regardless of the API version of
the registry.
Allow the user to set the API version via an environment variable. If
the variable is not present, it will still default to '1.18' like it
does today.
Signed-off-by: Reilly Herrewig-Pope <feddfa1b1b567537c5d9b5bb606cac73059ea316@mandiant.com> | from docker import Client
from docker import tls
import ssl
import os
def docker_client():
"""
Returns a docker-py client configured using environment variables
according to the same logic as the official Docker client.
"""
cert_path = os.environ.get('DOCKER_CERT_PATH', '')
if cert_path == '':
cert_path = os.path.join(os.environ.get('HOME', ''), '.docker')
base_url = os.environ.get('DOCKER_HOST')
api_version = os.environ.get('COMPOSE_API_VERSION', '1.18')
tls_config = None
if os.environ.get('DOCKER_TLS_VERIFY', '') != '':
parts = base_url.split('://', 1)
base_url = '%s://%s' % ('https', parts[1])
client_cert = (os.path.join(cert_path, 'cert.pem'), os.path.join(cert_path, 'key.pem'))
ca_cert = os.path.join(cert_path, 'ca.pem')
tls_config = tls.TLSConfig(
ssl_version=ssl.PROTOCOL_TLSv1,
verify=True,
assert_hostname=False,
client_cert=client_cert,
ca_cert=ca_cert,
)
timeout = int(os.environ.get('DOCKER_CLIENT_TIMEOUT', 60))
return Client(base_url=base_url, tls=tls_config, version=api_version, timeout=timeout)
| <commit_before>from docker import Client
from docker import tls
import ssl
import os
def docker_client():
"""
Returns a docker-py client configured using environment variables
according to the same logic as the official Docker client.
"""
cert_path = os.environ.get('DOCKER_CERT_PATH', '')
if cert_path == '':
cert_path = os.path.join(os.environ.get('HOME', ''), '.docker')
base_url = os.environ.get('DOCKER_HOST')
tls_config = None
if os.environ.get('DOCKER_TLS_VERIFY', '') != '':
parts = base_url.split('://', 1)
base_url = '%s://%s' % ('https', parts[1])
client_cert = (os.path.join(cert_path, 'cert.pem'), os.path.join(cert_path, 'key.pem'))
ca_cert = os.path.join(cert_path, 'ca.pem')
tls_config = tls.TLSConfig(
ssl_version=ssl.PROTOCOL_TLSv1,
verify=True,
assert_hostname=False,
client_cert=client_cert,
ca_cert=ca_cert,
)
timeout = int(os.environ.get('DOCKER_CLIENT_TIMEOUT', 60))
return Client(base_url=base_url, tls=tls_config, version='1.18', timeout=timeout)
<commit_msg>Allow API version specification via env var
Hard-coding the API version to '1.18' with the docker-py constructor will
cause the docker-py logic at
https://github.com/docker/docker-py/blob/master/docker/client.py#L143-L146
to always fail, which will cause authentication issues if you're using a
remote daemon using API version 1.19 - regardless of the API version of
the registry.
Allow the user to set the API version via an environment variable. If
the variable is not present, it will still default to '1.18' like it
does today.
Signed-off-by: Reilly Herrewig-Pope <feddfa1b1b567537c5d9b5bb606cac73059ea316@mandiant.com><commit_after> | from docker import Client
from docker import tls
import ssl
import os
def docker_client():
"""
Returns a docker-py client configured using environment variables
according to the same logic as the official Docker client.
"""
cert_path = os.environ.get('DOCKER_CERT_PATH', '')
if cert_path == '':
cert_path = os.path.join(os.environ.get('HOME', ''), '.docker')
base_url = os.environ.get('DOCKER_HOST')
api_version = os.environ.get('COMPOSE_API_VERSION', '1.18')
tls_config = None
if os.environ.get('DOCKER_TLS_VERIFY', '') != '':
parts = base_url.split('://', 1)
base_url = '%s://%s' % ('https', parts[1])
client_cert = (os.path.join(cert_path, 'cert.pem'), os.path.join(cert_path, 'key.pem'))
ca_cert = os.path.join(cert_path, 'ca.pem')
tls_config = tls.TLSConfig(
ssl_version=ssl.PROTOCOL_TLSv1,
verify=True,
assert_hostname=False,
client_cert=client_cert,
ca_cert=ca_cert,
)
timeout = int(os.environ.get('DOCKER_CLIENT_TIMEOUT', 60))
return Client(base_url=base_url, tls=tls_config, version=api_version, timeout=timeout)
| from docker import Client
from docker import tls
import ssl
import os
def docker_client():
"""
Returns a docker-py client configured using environment variables
according to the same logic as the official Docker client.
"""
cert_path = os.environ.get('DOCKER_CERT_PATH', '')
if cert_path == '':
cert_path = os.path.join(os.environ.get('HOME', ''), '.docker')
base_url = os.environ.get('DOCKER_HOST')
tls_config = None
if os.environ.get('DOCKER_TLS_VERIFY', '') != '':
parts = base_url.split('://', 1)
base_url = '%s://%s' % ('https', parts[1])
client_cert = (os.path.join(cert_path, 'cert.pem'), os.path.join(cert_path, 'key.pem'))
ca_cert = os.path.join(cert_path, 'ca.pem')
tls_config = tls.TLSConfig(
ssl_version=ssl.PROTOCOL_TLSv1,
verify=True,
assert_hostname=False,
client_cert=client_cert,
ca_cert=ca_cert,
)
timeout = int(os.environ.get('DOCKER_CLIENT_TIMEOUT', 60))
return Client(base_url=base_url, tls=tls_config, version='1.18', timeout=timeout)
Allow API version specification via env var
Hard-coding the API version to '1.18' with the docker-py constructor will
cause the docker-py logic at
https://github.com/docker/docker-py/blob/master/docker/client.py#L143-L146
to always fail, which will cause authentication issues if you're using a
remote daemon using API version 1.19 - regardless of the API version of
the registry.
Allow the user to set the API version via an environment variable. If
the variable is not present, it will still default to '1.18' like it
does today.
Signed-off-by: Reilly Herrewig-Pope <feddfa1b1b567537c5d9b5bb606cac73059ea316@mandiant.com>from docker import Client
from docker import tls
import ssl
import os
def docker_client():
"""
Returns a docker-py client configured using environment variables
according to the same logic as the official Docker client.
"""
cert_path = os.environ.get('DOCKER_CERT_PATH', '')
if cert_path == '':
cert_path = os.path.join(os.environ.get('HOME', ''), '.docker')
base_url = os.environ.get('DOCKER_HOST')
api_version = os.environ.get('COMPOSE_API_VERSION', '1.18')
tls_config = None
if os.environ.get('DOCKER_TLS_VERIFY', '') != '':
parts = base_url.split('://', 1)
base_url = '%s://%s' % ('https', parts[1])
client_cert = (os.path.join(cert_path, 'cert.pem'), os.path.join(cert_path, 'key.pem'))
ca_cert = os.path.join(cert_path, 'ca.pem')
tls_config = tls.TLSConfig(
ssl_version=ssl.PROTOCOL_TLSv1,
verify=True,
assert_hostname=False,
client_cert=client_cert,
ca_cert=ca_cert,
)
timeout = int(os.environ.get('DOCKER_CLIENT_TIMEOUT', 60))
return Client(base_url=base_url, tls=tls_config, version=api_version, timeout=timeout)
| <commit_before>from docker import Client
from docker import tls
import ssl
import os
def docker_client():
"""
Returns a docker-py client configured using environment variables
according to the same logic as the official Docker client.
"""
cert_path = os.environ.get('DOCKER_CERT_PATH', '')
if cert_path == '':
cert_path = os.path.join(os.environ.get('HOME', ''), '.docker')
base_url = os.environ.get('DOCKER_HOST')
tls_config = None
if os.environ.get('DOCKER_TLS_VERIFY', '') != '':
parts = base_url.split('://', 1)
base_url = '%s://%s' % ('https', parts[1])
client_cert = (os.path.join(cert_path, 'cert.pem'), os.path.join(cert_path, 'key.pem'))
ca_cert = os.path.join(cert_path, 'ca.pem')
tls_config = tls.TLSConfig(
ssl_version=ssl.PROTOCOL_TLSv1,
verify=True,
assert_hostname=False,
client_cert=client_cert,
ca_cert=ca_cert,
)
timeout = int(os.environ.get('DOCKER_CLIENT_TIMEOUT', 60))
return Client(base_url=base_url, tls=tls_config, version='1.18', timeout=timeout)
<commit_msg>Allow API version specification via env var
Hard-coding the API version to '1.18' with the docker-py constructor will
cause the docker-py logic at
https://github.com/docker/docker-py/blob/master/docker/client.py#L143-L146
to always fail, which will cause authentication issues if you're using a
remote daemon using API version 1.19 - regardless of the API version of
the registry.
Allow the user to set the API version via an environment variable. If
the variable is not present, it will still default to '1.18' like it
does today.
Signed-off-by: Reilly Herrewig-Pope <feddfa1b1b567537c5d9b5bb606cac73059ea316@mandiant.com><commit_after>from docker import Client
from docker import tls
import ssl
import os
def docker_client():
"""
Returns a docker-py client configured using environment variables
according to the same logic as the official Docker client.
"""
cert_path = os.environ.get('DOCKER_CERT_PATH', '')
if cert_path == '':
cert_path = os.path.join(os.environ.get('HOME', ''), '.docker')
base_url = os.environ.get('DOCKER_HOST')
api_version = os.environ.get('COMPOSE_API_VERSION', '1.18')
tls_config = None
if os.environ.get('DOCKER_TLS_VERIFY', '') != '':
parts = base_url.split('://', 1)
base_url = '%s://%s' % ('https', parts[1])
client_cert = (os.path.join(cert_path, 'cert.pem'), os.path.join(cert_path, 'key.pem'))
ca_cert = os.path.join(cert_path, 'ca.pem')
tls_config = tls.TLSConfig(
ssl_version=ssl.PROTOCOL_TLSv1,
verify=True,
assert_hostname=False,
client_cert=client_cert,
ca_cert=ca_cert,
)
timeout = int(os.environ.get('DOCKER_CLIENT_TIMEOUT', 60))
return Client(base_url=base_url, tls=tls_config, version=api_version, timeout=timeout)
|
21dabaa963815f5dc99f19a7e3f792b0515c9089 | Mockbox.py | Mockbox.py | from threading import Thread
from Queue import Queue
from twisted.python import log
import time
import Mailbox, Web
import sys
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--verbose", "-v", help="output logs to std out, not the file",
action="store_true")
args = parser.parse_args()
queue = Queue()
MailboxThread = Thread(target=Mailbox.MailboxHandler, args=(queue,))
WebThread = Thread(target=Web.WebHandler, args=(queue,))
MailboxThread.setDaemon(True)
WebThread.setDaemon(True)
if args.verbose:
log.startLogging(sys.stdout)
else:
log.startLogging(open('mockbox.log', 'w'))
MailboxThread.start()
WebThread.start()
while True:
try:
time.sleep(1)
except KeyboardInterrupt:
print "Shutting down..."
raise
| from threading import Thread
from Queue import Queue
from twisted.python import log
import time
import Mailbox, Web
import sys
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--verbose", "-v", help="output logs to std out, not the file",
action="store_true")
args = parser.parse_args()
queue = Queue()
MailboxThread = Thread(target=Mailbox.MailboxHandler, args=(queue,))
WebThread = Thread(target=Web.WebHandler, args=(queue,))
MailboxThread.setDaemon(True)
WebThread.setDaemon(True)
if args.verbose:
log.startLogging(sys.stdout)
else:
log.startLogging(open('mockbox.log', 'a'))
MailboxThread.start()
WebThread.start()
while True:
try:
time.sleep(1)
except KeyboardInterrupt:
print "Shutting down..."
raise
| Fix logging so it doesn't erase the file | Fix logging so it doesn't erase the file
| Python | mit | jkingsman/Mockbox,jkingsman/Mockbox,jkingsman/Mockbox | from threading import Thread
from Queue import Queue
from twisted.python import log
import time
import Mailbox, Web
import sys
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--verbose", "-v", help="output logs to std out, not the file",
action="store_true")
args = parser.parse_args()
queue = Queue()
MailboxThread = Thread(target=Mailbox.MailboxHandler, args=(queue,))
WebThread = Thread(target=Web.WebHandler, args=(queue,))
MailboxThread.setDaemon(True)
WebThread.setDaemon(True)
if args.verbose:
log.startLogging(sys.stdout)
else:
log.startLogging(open('mockbox.log', 'w'))
MailboxThread.start()
WebThread.start()
while True:
try:
time.sleep(1)
except KeyboardInterrupt:
print "Shutting down..."
raise
Fix logging so it doesn't erase the file | from threading import Thread
from Queue import Queue
from twisted.python import log
import time
import Mailbox, Web
import sys
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--verbose", "-v", help="output logs to std out, not the file",
action="store_true")
args = parser.parse_args()
queue = Queue()
MailboxThread = Thread(target=Mailbox.MailboxHandler, args=(queue,))
WebThread = Thread(target=Web.WebHandler, args=(queue,))
MailboxThread.setDaemon(True)
WebThread.setDaemon(True)
if args.verbose:
log.startLogging(sys.stdout)
else:
log.startLogging(open('mockbox.log', 'a'))
MailboxThread.start()
WebThread.start()
while True:
try:
time.sleep(1)
except KeyboardInterrupt:
print "Shutting down..."
raise
| <commit_before>from threading import Thread
from Queue import Queue
from twisted.python import log
import time
import Mailbox, Web
import sys
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--verbose", "-v", help="output logs to std out, not the file",
action="store_true")
args = parser.parse_args()
queue = Queue()
MailboxThread = Thread(target=Mailbox.MailboxHandler, args=(queue,))
WebThread = Thread(target=Web.WebHandler, args=(queue,))
MailboxThread.setDaemon(True)
WebThread.setDaemon(True)
if args.verbose:
log.startLogging(sys.stdout)
else:
log.startLogging(open('mockbox.log', 'w'))
MailboxThread.start()
WebThread.start()
while True:
try:
time.sleep(1)
except KeyboardInterrupt:
print "Shutting down..."
raise
<commit_msg>Fix logging so it doesn't erase the file<commit_after> | from threading import Thread
from Queue import Queue
from twisted.python import log
import time
import Mailbox, Web
import sys
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--verbose", "-v", help="output logs to std out, not the file",
action="store_true")
args = parser.parse_args()
queue = Queue()
MailboxThread = Thread(target=Mailbox.MailboxHandler, args=(queue,))
WebThread = Thread(target=Web.WebHandler, args=(queue,))
MailboxThread.setDaemon(True)
WebThread.setDaemon(True)
if args.verbose:
log.startLogging(sys.stdout)
else:
log.startLogging(open('mockbox.log', 'a'))
MailboxThread.start()
WebThread.start()
while True:
try:
time.sleep(1)
except KeyboardInterrupt:
print "Shutting down..."
raise
| from threading import Thread
from Queue import Queue
from twisted.python import log
import time
import Mailbox, Web
import sys
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--verbose", "-v", help="output logs to std out, not the file",
action="store_true")
args = parser.parse_args()
queue = Queue()
MailboxThread = Thread(target=Mailbox.MailboxHandler, args=(queue,))
WebThread = Thread(target=Web.WebHandler, args=(queue,))
MailboxThread.setDaemon(True)
WebThread.setDaemon(True)
if args.verbose:
log.startLogging(sys.stdout)
else:
log.startLogging(open('mockbox.log', 'w'))
MailboxThread.start()
WebThread.start()
while True:
try:
time.sleep(1)
except KeyboardInterrupt:
print "Shutting down..."
raise
Fix logging so it doesn't erase the filefrom threading import Thread
from Queue import Queue
from twisted.python import log
import time
import Mailbox, Web
import sys
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--verbose", "-v", help="output logs to std out, not the file",
action="store_true")
args = parser.parse_args()
queue = Queue()
MailboxThread = Thread(target=Mailbox.MailboxHandler, args=(queue,))
WebThread = Thread(target=Web.WebHandler, args=(queue,))
MailboxThread.setDaemon(True)
WebThread.setDaemon(True)
if args.verbose:
log.startLogging(sys.stdout)
else:
log.startLogging(open('mockbox.log', 'a'))
MailboxThread.start()
WebThread.start()
while True:
try:
time.sleep(1)
except KeyboardInterrupt:
print "Shutting down..."
raise
| <commit_before>from threading import Thread
from Queue import Queue
from twisted.python import log
import time
import Mailbox, Web
import sys
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--verbose", "-v", help="output logs to std out, not the file",
action="store_true")
args = parser.parse_args()
queue = Queue()
MailboxThread = Thread(target=Mailbox.MailboxHandler, args=(queue,))
WebThread = Thread(target=Web.WebHandler, args=(queue,))
MailboxThread.setDaemon(True)
WebThread.setDaemon(True)
if args.verbose:
log.startLogging(sys.stdout)
else:
log.startLogging(open('mockbox.log', 'w'))
MailboxThread.start()
WebThread.start()
while True:
try:
time.sleep(1)
except KeyboardInterrupt:
print "Shutting down..."
raise
<commit_msg>Fix logging so it doesn't erase the file<commit_after>from threading import Thread
from Queue import Queue
from twisted.python import log
import time
import Mailbox, Web
import sys
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--verbose", "-v", help="output logs to std out, not the file",
action="store_true")
args = parser.parse_args()
queue = Queue()
MailboxThread = Thread(target=Mailbox.MailboxHandler, args=(queue,))
WebThread = Thread(target=Web.WebHandler, args=(queue,))
MailboxThread.setDaemon(True)
WebThread.setDaemon(True)
if args.verbose:
log.startLogging(sys.stdout)
else:
log.startLogging(open('mockbox.log', 'a'))
MailboxThread.start()
WebThread.start()
while True:
try:
time.sleep(1)
except KeyboardInterrupt:
print "Shutting down..."
raise
|
7343cf6fe3b9dde76a629fd0c607d7ce0b632a64 | coupons/settings.py | coupons/settings.py | import string
from django.conf import settings
COUPON_TYPES = getattr(settings, 'COUPONS_COUPON_TYPES', (
('monetary', 'Money based coupon'),
('percentage', 'Percentage discount'),
('virtual_currency', 'Virtual currency'),
))
CODE_LENGTH = getattr(settings, 'COUPNS_CODE_LENGTH', 15)
CODE_CHARS = getattr(settings, 'COUPNS_CODE_CHARS', string.ascii_letters+string.digits)
| import string
from django.conf import settings
COUPON_TYPES = getattr(settings, 'COUPONS_COUPON_TYPES', (
('monetary', 'Money based coupon'),
('percentage', 'Percentage discount'),
('virtual_currency', 'Virtual currency'),
))
CODE_LENGTH = getattr(settings, 'COUPONS_CODE_LENGTH', 15)
CODE_CHARS = getattr(settings, 'COUPONS_CODE_CHARS', string.ascii_letters+string.digits)
| Correct typo in setting names | Correct typo in setting names
| Python | bsd-3-clause | rsalmaso/django-fluo-coupons,rsalmaso/django-fluo-coupons | import string
from django.conf import settings
COUPON_TYPES = getattr(settings, 'COUPONS_COUPON_TYPES', (
('monetary', 'Money based coupon'),
('percentage', 'Percentage discount'),
('virtual_currency', 'Virtual currency'),
))
CODE_LENGTH = getattr(settings, 'COUPNS_CODE_LENGTH', 15)
CODE_CHARS = getattr(settings, 'COUPNS_CODE_CHARS', string.ascii_letters+string.digits)
Correct typo in setting names | import string
from django.conf import settings
COUPON_TYPES = getattr(settings, 'COUPONS_COUPON_TYPES', (
('monetary', 'Money based coupon'),
('percentage', 'Percentage discount'),
('virtual_currency', 'Virtual currency'),
))
CODE_LENGTH = getattr(settings, 'COUPONS_CODE_LENGTH', 15)
CODE_CHARS = getattr(settings, 'COUPONS_CODE_CHARS', string.ascii_letters+string.digits)
| <commit_before>import string
from django.conf import settings
COUPON_TYPES = getattr(settings, 'COUPONS_COUPON_TYPES', (
('monetary', 'Money based coupon'),
('percentage', 'Percentage discount'),
('virtual_currency', 'Virtual currency'),
))
CODE_LENGTH = getattr(settings, 'COUPNS_CODE_LENGTH', 15)
CODE_CHARS = getattr(settings, 'COUPNS_CODE_CHARS', string.ascii_letters+string.digits)
<commit_msg>Correct typo in setting names<commit_after> | import string
from django.conf import settings
COUPON_TYPES = getattr(settings, 'COUPONS_COUPON_TYPES', (
('monetary', 'Money based coupon'),
('percentage', 'Percentage discount'),
('virtual_currency', 'Virtual currency'),
))
CODE_LENGTH = getattr(settings, 'COUPONS_CODE_LENGTH', 15)
CODE_CHARS = getattr(settings, 'COUPONS_CODE_CHARS', string.ascii_letters+string.digits)
| import string
from django.conf import settings
COUPON_TYPES = getattr(settings, 'COUPONS_COUPON_TYPES', (
('monetary', 'Money based coupon'),
('percentage', 'Percentage discount'),
('virtual_currency', 'Virtual currency'),
))
CODE_LENGTH = getattr(settings, 'COUPNS_CODE_LENGTH', 15)
CODE_CHARS = getattr(settings, 'COUPNS_CODE_CHARS', string.ascii_letters+string.digits)
Correct typo in setting namesimport string
from django.conf import settings
COUPON_TYPES = getattr(settings, 'COUPONS_COUPON_TYPES', (
('monetary', 'Money based coupon'),
('percentage', 'Percentage discount'),
('virtual_currency', 'Virtual currency'),
))
CODE_LENGTH = getattr(settings, 'COUPONS_CODE_LENGTH', 15)
CODE_CHARS = getattr(settings, 'COUPONS_CODE_CHARS', string.ascii_letters+string.digits)
| <commit_before>import string
from django.conf import settings
COUPON_TYPES = getattr(settings, 'COUPONS_COUPON_TYPES', (
('monetary', 'Money based coupon'),
('percentage', 'Percentage discount'),
('virtual_currency', 'Virtual currency'),
))
CODE_LENGTH = getattr(settings, 'COUPNS_CODE_LENGTH', 15)
CODE_CHARS = getattr(settings, 'COUPNS_CODE_CHARS', string.ascii_letters+string.digits)
<commit_msg>Correct typo in setting names<commit_after>import string
from django.conf import settings
COUPON_TYPES = getattr(settings, 'COUPONS_COUPON_TYPES', (
('monetary', 'Money based coupon'),
('percentage', 'Percentage discount'),
('virtual_currency', 'Virtual currency'),
))
CODE_LENGTH = getattr(settings, 'COUPONS_CODE_LENGTH', 15)
CODE_CHARS = getattr(settings, 'COUPONS_CODE_CHARS', string.ascii_letters+string.digits)
|
093702a38645853d560606446da0b078ba12d14e | eventkit_cloud/auth/admin.py | eventkit_cloud/auth/admin.py |
import logging
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
from eventkit_cloud.auth.models import OAuth
from eventkit_cloud.jobs.models import UserLicense
logger = logging.getLogger(__name__)
class OAuthAdmin(admin.ModelAdmin):
search_fields = ['user__username', 'identification', 'commonname', 'user_info']
list_display = ['user', 'identification', 'commonname']
def has_delete_permission(self, request, obj=None):
return False
def get_actions(self, request):
actions = super(OAuthAdmin, self).get_actions(request)
actions.pop('delete_selected', None)
return actions
class UserLicenseInline(admin.TabularInline):
model = UserLicense
extra = 0
UserAdmin.inlines = [UserLicenseInline]
UserAdmin.readonly_fields += 'last_login', 'date_joined'
admin.site.unregister(Token)
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
admin.site.register(OAuth, OAuthAdmin)
|
import logging
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
from eventkit_cloud.auth.models import OAuth
from eventkit_cloud.jobs.models import UserLicense
logger = logging.getLogger(__name__)
class OAuthAdmin(admin.ModelAdmin):
search_fields = ['user__username', 'identification', 'commonname', 'user_info']
list_display = ['user', 'identification', 'commonname']
def has_delete_permission(self, request, obj=None):
return False
def get_actions(self, request):
actions = super(OAuthAdmin, self).get_actions(request)
actions.pop('delete_selected', None)
return actions
class OAuthInline(admin.StackedInline):
model = OAuth
class UserLicenseInline(admin.TabularInline):
model = UserLicense
extra = 0
UserAdmin.inlines = [OAuthInline, UserLicenseInline]
UserAdmin.readonly_fields += 'last_login', 'date_joined'
admin.site.unregister(Token)
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
admin.site.register(OAuth, OAuthAdmin)
| Add OAuth class information to the UserAdmin page. | Add OAuth class information to the UserAdmin page.
| Python | bsd-3-clause | terranodo/eventkit-cloud,venicegeo/eventkit-cloud,terranodo/eventkit-cloud,venicegeo/eventkit-cloud,terranodo/eventkit-cloud,venicegeo/eventkit-cloud,venicegeo/eventkit-cloud,venicegeo/eventkit-cloud,terranodo/eventkit-cloud,venicegeo/eventkit-cloud |
import logging
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
from eventkit_cloud.auth.models import OAuth
from eventkit_cloud.jobs.models import UserLicense
logger = logging.getLogger(__name__)
class OAuthAdmin(admin.ModelAdmin):
search_fields = ['user__username', 'identification', 'commonname', 'user_info']
list_display = ['user', 'identification', 'commonname']
def has_delete_permission(self, request, obj=None):
return False
def get_actions(self, request):
actions = super(OAuthAdmin, self).get_actions(request)
actions.pop('delete_selected', None)
return actions
class UserLicenseInline(admin.TabularInline):
model = UserLicense
extra = 0
UserAdmin.inlines = [UserLicenseInline]
UserAdmin.readonly_fields += 'last_login', 'date_joined'
admin.site.unregister(Token)
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
admin.site.register(OAuth, OAuthAdmin)
Add OAuth class information to the UserAdmin page. |
import logging
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
from eventkit_cloud.auth.models import OAuth
from eventkit_cloud.jobs.models import UserLicense
logger = logging.getLogger(__name__)
class OAuthAdmin(admin.ModelAdmin):
search_fields = ['user__username', 'identification', 'commonname', 'user_info']
list_display = ['user', 'identification', 'commonname']
def has_delete_permission(self, request, obj=None):
return False
def get_actions(self, request):
actions = super(OAuthAdmin, self).get_actions(request)
actions.pop('delete_selected', None)
return actions
class OAuthInline(admin.StackedInline):
model = OAuth
class UserLicenseInline(admin.TabularInline):
model = UserLicense
extra = 0
UserAdmin.inlines = [OAuthInline, UserLicenseInline]
UserAdmin.readonly_fields += 'last_login', 'date_joined'
admin.site.unregister(Token)
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
admin.site.register(OAuth, OAuthAdmin)
| <commit_before>
import logging
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
from eventkit_cloud.auth.models import OAuth
from eventkit_cloud.jobs.models import UserLicense
logger = logging.getLogger(__name__)
class OAuthAdmin(admin.ModelAdmin):
search_fields = ['user__username', 'identification', 'commonname', 'user_info']
list_display = ['user', 'identification', 'commonname']
def has_delete_permission(self, request, obj=None):
return False
def get_actions(self, request):
actions = super(OAuthAdmin, self).get_actions(request)
actions.pop('delete_selected', None)
return actions
class UserLicenseInline(admin.TabularInline):
model = UserLicense
extra = 0
UserAdmin.inlines = [UserLicenseInline]
UserAdmin.readonly_fields += 'last_login', 'date_joined'
admin.site.unregister(Token)
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
admin.site.register(OAuth, OAuthAdmin)
<commit_msg>Add OAuth class information to the UserAdmin page.<commit_after> |
import logging
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
from eventkit_cloud.auth.models import OAuth
from eventkit_cloud.jobs.models import UserLicense
logger = logging.getLogger(__name__)
class OAuthAdmin(admin.ModelAdmin):
search_fields = ['user__username', 'identification', 'commonname', 'user_info']
list_display = ['user', 'identification', 'commonname']
def has_delete_permission(self, request, obj=None):
return False
def get_actions(self, request):
actions = super(OAuthAdmin, self).get_actions(request)
actions.pop('delete_selected', None)
return actions
class OAuthInline(admin.StackedInline):
model = OAuth
class UserLicenseInline(admin.TabularInline):
model = UserLicense
extra = 0
UserAdmin.inlines = [OAuthInline, UserLicenseInline]
UserAdmin.readonly_fields += 'last_login', 'date_joined'
admin.site.unregister(Token)
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
admin.site.register(OAuth, OAuthAdmin)
|
import logging
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
from eventkit_cloud.auth.models import OAuth
from eventkit_cloud.jobs.models import UserLicense
logger = logging.getLogger(__name__)
class OAuthAdmin(admin.ModelAdmin):
search_fields = ['user__username', 'identification', 'commonname', 'user_info']
list_display = ['user', 'identification', 'commonname']
def has_delete_permission(self, request, obj=None):
return False
def get_actions(self, request):
actions = super(OAuthAdmin, self).get_actions(request)
actions.pop('delete_selected', None)
return actions
class UserLicenseInline(admin.TabularInline):
model = UserLicense
extra = 0
UserAdmin.inlines = [UserLicenseInline]
UserAdmin.readonly_fields += 'last_login', 'date_joined'
admin.site.unregister(Token)
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
admin.site.register(OAuth, OAuthAdmin)
Add OAuth class information to the UserAdmin page.
import logging
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
from eventkit_cloud.auth.models import OAuth
from eventkit_cloud.jobs.models import UserLicense
logger = logging.getLogger(__name__)
class OAuthAdmin(admin.ModelAdmin):
search_fields = ['user__username', 'identification', 'commonname', 'user_info']
list_display = ['user', 'identification', 'commonname']
def has_delete_permission(self, request, obj=None):
return False
def get_actions(self, request):
actions = super(OAuthAdmin, self).get_actions(request)
actions.pop('delete_selected', None)
return actions
class OAuthInline(admin.StackedInline):
model = OAuth
class UserLicenseInline(admin.TabularInline):
model = UserLicense
extra = 0
UserAdmin.inlines = [OAuthInline, UserLicenseInline]
UserAdmin.readonly_fields += 'last_login', 'date_joined'
admin.site.unregister(Token)
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
admin.site.register(OAuth, OAuthAdmin)
| <commit_before>
import logging
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
from eventkit_cloud.auth.models import OAuth
from eventkit_cloud.jobs.models import UserLicense
logger = logging.getLogger(__name__)
class OAuthAdmin(admin.ModelAdmin):
search_fields = ['user__username', 'identification', 'commonname', 'user_info']
list_display = ['user', 'identification', 'commonname']
def has_delete_permission(self, request, obj=None):
return False
def get_actions(self, request):
actions = super(OAuthAdmin, self).get_actions(request)
actions.pop('delete_selected', None)
return actions
class UserLicenseInline(admin.TabularInline):
model = UserLicense
extra = 0
UserAdmin.inlines = [UserLicenseInline]
UserAdmin.readonly_fields += 'last_login', 'date_joined'
admin.site.unregister(Token)
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
admin.site.register(OAuth, OAuthAdmin)
<commit_msg>Add OAuth class information to the UserAdmin page.<commit_after>
import logging
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
from eventkit_cloud.auth.models import OAuth
from eventkit_cloud.jobs.models import UserLicense
logger = logging.getLogger(__name__)
class OAuthAdmin(admin.ModelAdmin):
search_fields = ['user__username', 'identification', 'commonname', 'user_info']
list_display = ['user', 'identification', 'commonname']
def has_delete_permission(self, request, obj=None):
return False
def get_actions(self, request):
actions = super(OAuthAdmin, self).get_actions(request)
actions.pop('delete_selected', None)
return actions
class OAuthInline(admin.StackedInline):
model = OAuth
class UserLicenseInline(admin.TabularInline):
model = UserLicense
extra = 0
UserAdmin.inlines = [OAuthInline, UserLicenseInline]
UserAdmin.readonly_fields += 'last_login', 'date_joined'
admin.site.unregister(Token)
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
admin.site.register(OAuth, OAuthAdmin)
|
bf4b22ef25d158ddeb1a98432d29451e10a912e6 | quick_orm/examples/hello_world.py | quick_orm/examples/hello_world.py | from quick_orm.core import Database
from sqlalchemy import Column, String
__metaclass__ = Database.DefaultMeta
class User:
name = Column(String(30))
Database.register()
if __name__ == '__main__':
db = Database('sqlite://')
db.create_tables()
user = User(name = 'Hello World')
db.session.add_then_commit(user)
user = db.session.query(User).get(1)
print 'My name is', user.name
| from quick_orm.core import Database
from sqlalchemy import Column, String
__metaclass__ = Database.DefaultMeta
class User:
name = Column(String(30))
Database.register()
if __name__ == '__main__':
db = Database('sqlite://') # database urls: http://docs.sqlalchemy.org/en/latest/core/engines.html#database-urls
db.create_tables()
user = User(name = 'Hello World')
db.session.add_then_commit(user)
user = db.session.query(User).get(1)
print 'My name is', user.name | Add a link for database urls | Add a link for database urls
| Python | bsd-3-clause | tek/sqlpharmacy,tylerlong/quick_orm | from quick_orm.core import Database
from sqlalchemy import Column, String
__metaclass__ = Database.DefaultMeta
class User:
name = Column(String(30))
Database.register()
if __name__ == '__main__':
db = Database('sqlite://')
db.create_tables()
user = User(name = 'Hello World')
db.session.add_then_commit(user)
user = db.session.query(User).get(1)
print 'My name is', user.name
Add a link for database urls | from quick_orm.core import Database
from sqlalchemy import Column, String
__metaclass__ = Database.DefaultMeta
class User:
name = Column(String(30))
Database.register()
if __name__ == '__main__':
db = Database('sqlite://') # database urls: http://docs.sqlalchemy.org/en/latest/core/engines.html#database-urls
db.create_tables()
user = User(name = 'Hello World')
db.session.add_then_commit(user)
user = db.session.query(User).get(1)
print 'My name is', user.name | <commit_before>from quick_orm.core import Database
from sqlalchemy import Column, String
__metaclass__ = Database.DefaultMeta
class User:
name = Column(String(30))
Database.register()
if __name__ == '__main__':
db = Database('sqlite://')
db.create_tables()
user = User(name = 'Hello World')
db.session.add_then_commit(user)
user = db.session.query(User).get(1)
print 'My name is', user.name
<commit_msg>Add a link for database urls<commit_after> | from quick_orm.core import Database
from sqlalchemy import Column, String
__metaclass__ = Database.DefaultMeta
class User:
name = Column(String(30))
Database.register()
if __name__ == '__main__':
db = Database('sqlite://') # database urls: http://docs.sqlalchemy.org/en/latest/core/engines.html#database-urls
db.create_tables()
user = User(name = 'Hello World')
db.session.add_then_commit(user)
user = db.session.query(User).get(1)
print 'My name is', user.name | from quick_orm.core import Database
from sqlalchemy import Column, String
__metaclass__ = Database.DefaultMeta
class User:
name = Column(String(30))
Database.register()
if __name__ == '__main__':
db = Database('sqlite://')
db.create_tables()
user = User(name = 'Hello World')
db.session.add_then_commit(user)
user = db.session.query(User).get(1)
print 'My name is', user.name
Add a link for database urlsfrom quick_orm.core import Database
from sqlalchemy import Column, String
__metaclass__ = Database.DefaultMeta
class User:
name = Column(String(30))
Database.register()
if __name__ == '__main__':
db = Database('sqlite://') # database urls: http://docs.sqlalchemy.org/en/latest/core/engines.html#database-urls
db.create_tables()
user = User(name = 'Hello World')
db.session.add_then_commit(user)
user = db.session.query(User).get(1)
print 'My name is', user.name | <commit_before>from quick_orm.core import Database
from sqlalchemy import Column, String
__metaclass__ = Database.DefaultMeta
class User:
name = Column(String(30))
Database.register()
if __name__ == '__main__':
db = Database('sqlite://')
db.create_tables()
user = User(name = 'Hello World')
db.session.add_then_commit(user)
user = db.session.query(User).get(1)
print 'My name is', user.name
<commit_msg>Add a link for database urls<commit_after>from quick_orm.core import Database
from sqlalchemy import Column, String
__metaclass__ = Database.DefaultMeta
class User:
name = Column(String(30))
Database.register()
if __name__ == '__main__':
db = Database('sqlite://') # database urls: http://docs.sqlalchemy.org/en/latest/core/engines.html#database-urls
db.create_tables()
user = User(name = 'Hello World')
db.session.add_then_commit(user)
user = db.session.query(User).get(1)
print 'My name is', user.name |
c55a2b152cd2b6603ef358e0f764eeb0308ff332 | Orange/__init__.py | Orange/__init__.py | from __future__ import absolute_import
from importlib import import_module
try:
from .import version
# Always use short_version here (see PEP 386)
__version__ = version.short_version
__git_revision__ = version.git_revision
except ImportError:
__version__ = "unknown"
__git_revision__ = "unknown"
ADDONS_ENTRY_POINT = 'orange.addons'
import warnings
import pkg_resources
alreadyWarned = False
disabledMsg = "Some features will be disabled due to failing modules\n"
def _import(name):
global alreadyWarned
try:
import_module(name, package='Orange')
except ImportError as err:
warnings.warn("%sImporting '%s' failed: %s" %
(disabledMsg if not alreadyWarned else "", name, err),
UserWarning, 2)
alreadyWarned = True
def import_all():
import Orange
for name in ["classification", "clustering", "data", "distance",
"evaluation", "feature", "misc", "regression", "statistics"]:
Orange.__dict__[name] = import_module('Orange.' + name, package='Orange')
# Alternatives:
# global classification
# import Orange.classification as classification
# or
# import Orange.classification as classification
# globals()['clasification'] = classification
_import(".data")
_import(".distance")
_import(".feature")
_import(".feature.discretization")
_import(".data.discretization")
del _import
del alreadyWarned
del disabledMsg
| from __future__ import absolute_import
from importlib import import_module
try:
from .import version
# Always use short_version here (see PEP 386)
__version__ = version.short_version
__git_revision__ = version.git_revision
except ImportError:
__version__ = "unknown"
__git_revision__ = "unknown"
ADDONS_ENTRY_POINT = 'orange.addons'
import warnings
import pkg_resources
alreadyWarned = False
disabledMsg = "Some features will be disabled due to failing modules\n"
def _import(name):
global alreadyWarned
try:
import_module(name, package='Orange')
except ImportError as err:
warnings.warn("%sImporting '%s' failed: %s" %
(disabledMsg if not alreadyWarned else "", name, err),
UserWarning, 2)
alreadyWarned = True
def import_all():
import Orange
for name in ["classification", "clustering", "data", "distance",
"evaluation", "feature", "misc", "regression", "statistics"]:
Orange.__dict__[name] = import_module('Orange.' + name, package='Orange')
# Alternatives:
# global classification
# import Orange.classification as classification
# or
# import Orange.classification as classification
# globals()['clasification'] = classification
_import(".data")
del _import
del alreadyWarned
del disabledMsg
| Remove imports in Orange, except data | Remove imports in Orange, except data
| Python | bsd-2-clause | marinkaz/orange3,kwikadi/orange3,qusp/orange3,kwikadi/orange3,kwikadi/orange3,qPCR4vir/orange3,kwikadi/orange3,qPCR4vir/orange3,qPCR4vir/orange3,cheral/orange3,marinkaz/orange3,qusp/orange3,qPCR4vir/orange3,cheral/orange3,qusp/orange3,marinkaz/orange3,qPCR4vir/orange3,cheral/orange3,marinkaz/orange3,qPCR4vir/orange3,cheral/orange3,marinkaz/orange3,kwikadi/orange3,qusp/orange3,kwikadi/orange3,cheral/orange3,marinkaz/orange3,cheral/orange3 | from __future__ import absolute_import
from importlib import import_module
try:
from .import version
# Always use short_version here (see PEP 386)
__version__ = version.short_version
__git_revision__ = version.git_revision
except ImportError:
__version__ = "unknown"
__git_revision__ = "unknown"
ADDONS_ENTRY_POINT = 'orange.addons'
import warnings
import pkg_resources
alreadyWarned = False
disabledMsg = "Some features will be disabled due to failing modules\n"
def _import(name):
global alreadyWarned
try:
import_module(name, package='Orange')
except ImportError as err:
warnings.warn("%sImporting '%s' failed: %s" %
(disabledMsg if not alreadyWarned else "", name, err),
UserWarning, 2)
alreadyWarned = True
def import_all():
import Orange
for name in ["classification", "clustering", "data", "distance",
"evaluation", "feature", "misc", "regression", "statistics"]:
Orange.__dict__[name] = import_module('Orange.' + name, package='Orange')
# Alternatives:
# global classification
# import Orange.classification as classification
# or
# import Orange.classification as classification
# globals()['clasification'] = classification
_import(".data")
_import(".distance")
_import(".feature")
_import(".feature.discretization")
_import(".data.discretization")
del _import
del alreadyWarned
del disabledMsg
Remove imports in Orange, except data | from __future__ import absolute_import
from importlib import import_module
try:
from .import version
# Always use short_version here (see PEP 386)
__version__ = version.short_version
__git_revision__ = version.git_revision
except ImportError:
__version__ = "unknown"
__git_revision__ = "unknown"
ADDONS_ENTRY_POINT = 'orange.addons'
import warnings
import pkg_resources
alreadyWarned = False
disabledMsg = "Some features will be disabled due to failing modules\n"
def _import(name):
global alreadyWarned
try:
import_module(name, package='Orange')
except ImportError as err:
warnings.warn("%sImporting '%s' failed: %s" %
(disabledMsg if not alreadyWarned else "", name, err),
UserWarning, 2)
alreadyWarned = True
def import_all():
import Orange
for name in ["classification", "clustering", "data", "distance",
"evaluation", "feature", "misc", "regression", "statistics"]:
Orange.__dict__[name] = import_module('Orange.' + name, package='Orange')
# Alternatives:
# global classification
# import Orange.classification as classification
# or
# import Orange.classification as classification
# globals()['clasification'] = classification
_import(".data")
del _import
del alreadyWarned
del disabledMsg
| <commit_before>from __future__ import absolute_import
from importlib import import_module
try:
from .import version
# Always use short_version here (see PEP 386)
__version__ = version.short_version
__git_revision__ = version.git_revision
except ImportError:
__version__ = "unknown"
__git_revision__ = "unknown"
ADDONS_ENTRY_POINT = 'orange.addons'
import warnings
import pkg_resources
alreadyWarned = False
disabledMsg = "Some features will be disabled due to failing modules\n"
def _import(name):
global alreadyWarned
try:
import_module(name, package='Orange')
except ImportError as err:
warnings.warn("%sImporting '%s' failed: %s" %
(disabledMsg if not alreadyWarned else "", name, err),
UserWarning, 2)
alreadyWarned = True
def import_all():
import Orange
for name in ["classification", "clustering", "data", "distance",
"evaluation", "feature", "misc", "regression", "statistics"]:
Orange.__dict__[name] = import_module('Orange.' + name, package='Orange')
# Alternatives:
# global classification
# import Orange.classification as classification
# or
# import Orange.classification as classification
# globals()['clasification'] = classification
_import(".data")
_import(".distance")
_import(".feature")
_import(".feature.discretization")
_import(".data.discretization")
del _import
del alreadyWarned
del disabledMsg
<commit_msg>Remove imports in Orange, except data<commit_after> | from __future__ import absolute_import
from importlib import import_module
try:
from .import version
# Always use short_version here (see PEP 386)
__version__ = version.short_version
__git_revision__ = version.git_revision
except ImportError:
__version__ = "unknown"
__git_revision__ = "unknown"
ADDONS_ENTRY_POINT = 'orange.addons'
import warnings
import pkg_resources
alreadyWarned = False
disabledMsg = "Some features will be disabled due to failing modules\n"
def _import(name):
global alreadyWarned
try:
import_module(name, package='Orange')
except ImportError as err:
warnings.warn("%sImporting '%s' failed: %s" %
(disabledMsg if not alreadyWarned else "", name, err),
UserWarning, 2)
alreadyWarned = True
def import_all():
import Orange
for name in ["classification", "clustering", "data", "distance",
"evaluation", "feature", "misc", "regression", "statistics"]:
Orange.__dict__[name] = import_module('Orange.' + name, package='Orange')
# Alternatives:
# global classification
# import Orange.classification as classification
# or
# import Orange.classification as classification
# globals()['clasification'] = classification
_import(".data")
del _import
del alreadyWarned
del disabledMsg
| from __future__ import absolute_import
from importlib import import_module
try:
from .import version
# Always use short_version here (see PEP 386)
__version__ = version.short_version
__git_revision__ = version.git_revision
except ImportError:
__version__ = "unknown"
__git_revision__ = "unknown"
ADDONS_ENTRY_POINT = 'orange.addons'
import warnings
import pkg_resources
alreadyWarned = False
disabledMsg = "Some features will be disabled due to failing modules\n"
def _import(name):
global alreadyWarned
try:
import_module(name, package='Orange')
except ImportError as err:
warnings.warn("%sImporting '%s' failed: %s" %
(disabledMsg if not alreadyWarned else "", name, err),
UserWarning, 2)
alreadyWarned = True
def import_all():
import Orange
for name in ["classification", "clustering", "data", "distance",
"evaluation", "feature", "misc", "regression", "statistics"]:
Orange.__dict__[name] = import_module('Orange.' + name, package='Orange')
# Alternatives:
# global classification
# import Orange.classification as classification
# or
# import Orange.classification as classification
# globals()['clasification'] = classification
_import(".data")
_import(".distance")
_import(".feature")
_import(".feature.discretization")
_import(".data.discretization")
del _import
del alreadyWarned
del disabledMsg
Remove imports in Orange, except datafrom __future__ import absolute_import
from importlib import import_module
try:
from .import version
# Always use short_version here (see PEP 386)
__version__ = version.short_version
__git_revision__ = version.git_revision
except ImportError:
__version__ = "unknown"
__git_revision__ = "unknown"
ADDONS_ENTRY_POINT = 'orange.addons'
import warnings
import pkg_resources
alreadyWarned = False
disabledMsg = "Some features will be disabled due to failing modules\n"
def _import(name):
global alreadyWarned
try:
import_module(name, package='Orange')
except ImportError as err:
warnings.warn("%sImporting '%s' failed: %s" %
(disabledMsg if not alreadyWarned else "", name, err),
UserWarning, 2)
alreadyWarned = True
def import_all():
import Orange
for name in ["classification", "clustering", "data", "distance",
"evaluation", "feature", "misc", "regression", "statistics"]:
Orange.__dict__[name] = import_module('Orange.' + name, package='Orange')
# Alternatives:
# global classification
# import Orange.classification as classification
# or
# import Orange.classification as classification
# globals()['clasification'] = classification
_import(".data")
del _import
del alreadyWarned
del disabledMsg
| <commit_before>from __future__ import absolute_import
from importlib import import_module
try:
from .import version
# Always use short_version here (see PEP 386)
__version__ = version.short_version
__git_revision__ = version.git_revision
except ImportError:
__version__ = "unknown"
__git_revision__ = "unknown"
ADDONS_ENTRY_POINT = 'orange.addons'
import warnings
import pkg_resources
alreadyWarned = False
disabledMsg = "Some features will be disabled due to failing modules\n"
def _import(name):
global alreadyWarned
try:
import_module(name, package='Orange')
except ImportError as err:
warnings.warn("%sImporting '%s' failed: %s" %
(disabledMsg if not alreadyWarned else "", name, err),
UserWarning, 2)
alreadyWarned = True
def import_all():
import Orange
for name in ["classification", "clustering", "data", "distance",
"evaluation", "feature", "misc", "regression", "statistics"]:
Orange.__dict__[name] = import_module('Orange.' + name, package='Orange')
# Alternatives:
# global classification
# import Orange.classification as classification
# or
# import Orange.classification as classification
# globals()['clasification'] = classification
_import(".data")
_import(".distance")
_import(".feature")
_import(".feature.discretization")
_import(".data.discretization")
del _import
del alreadyWarned
del disabledMsg
<commit_msg>Remove imports in Orange, except data<commit_after>from __future__ import absolute_import
from importlib import import_module
try:
from .import version
# Always use short_version here (see PEP 386)
__version__ = version.short_version
__git_revision__ = version.git_revision
except ImportError:
__version__ = "unknown"
__git_revision__ = "unknown"
ADDONS_ENTRY_POINT = 'orange.addons'
import warnings
import pkg_resources
alreadyWarned = False
disabledMsg = "Some features will be disabled due to failing modules\n"
def _import(name):
global alreadyWarned
try:
import_module(name, package='Orange')
except ImportError as err:
warnings.warn("%sImporting '%s' failed: %s" %
(disabledMsg if not alreadyWarned else "", name, err),
UserWarning, 2)
alreadyWarned = True
def import_all():
import Orange
for name in ["classification", "clustering", "data", "distance",
"evaluation", "feature", "misc", "regression", "statistics"]:
Orange.__dict__[name] = import_module('Orange.' + name, package='Orange')
# Alternatives:
# global classification
# import Orange.classification as classification
# or
# import Orange.classification as classification
# globals()['clasification'] = classification
_import(".data")
del _import
del alreadyWarned
del disabledMsg
|
1bd90d597b23f49bce3ca3402256c9bb1ad22647 | accounts/management/commands/request_common_profile_update.py | accounts/management/commands/request_common_profile_update.py | from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.core.management.base import BaseCommand
from django.core.urlresolvers import reverse
from post_office import mail
class Command(BaseCommand):
help = "Notify users to update."
def handle(self, *args, **options):
domain = Site.objects.get_current().domain
full_url = "https://{}{}".format(domain,
reverse('edit_common_profile'))
for user in User.objects.filter(common_profile__is_student=True,
is_active=True):
mail.send([user.email],
template="update_common_profile",
context={'user': user, 'full_url': full_url})
self.stdout.write(u'Emailed {}.'.format(user.email))
| from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.core.exceptions import ValidationError
from django.core.management.base import BaseCommand
from django.core.urlresolvers import reverse
from post_office import mail
class Command(BaseCommand):
help = "Notify users to update."
def handle(self, *args, **options):
domain = Site.objects.get_current().domain
full_url = "https://{}{}".format(domain,
reverse('edit_common_profile'))
for user in User.objects.filter(common_profile__is_student=True,
is_active=True).exclude(email=""):
try:
mail.send([user.email],
template="update_common_profile",
context={'user': user, 'full_url': full_url})
self.stdout.write(u'Emailed {}.'.format(user.email))
except ValidationError:
self.stdout.write(u'Error with {}'.format(user.email))
| Handle missing and invalid email addresses. | Handle missing and invalid email addresses.
| Python | agpl-3.0 | osamak/student-portal,osamak/student-portal,enjaz/enjaz,osamak/student-portal,osamak/student-portal,enjaz/enjaz,enjaz/enjaz,enjaz/enjaz,enjaz/enjaz,osamak/student-portal | from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.core.management.base import BaseCommand
from django.core.urlresolvers import reverse
from post_office import mail
class Command(BaseCommand):
help = "Notify users to update."
def handle(self, *args, **options):
domain = Site.objects.get_current().domain
full_url = "https://{}{}".format(domain,
reverse('edit_common_profile'))
for user in User.objects.filter(common_profile__is_student=True,
is_active=True):
mail.send([user.email],
template="update_common_profile",
context={'user': user, 'full_url': full_url})
self.stdout.write(u'Emailed {}.'.format(user.email))
Handle missing and invalid email addresses. | from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.core.exceptions import ValidationError
from django.core.management.base import BaseCommand
from django.core.urlresolvers import reverse
from post_office import mail
class Command(BaseCommand):
help = "Notify users to update."
def handle(self, *args, **options):
domain = Site.objects.get_current().domain
full_url = "https://{}{}".format(domain,
reverse('edit_common_profile'))
for user in User.objects.filter(common_profile__is_student=True,
is_active=True).exclude(email=""):
try:
mail.send([user.email],
template="update_common_profile",
context={'user': user, 'full_url': full_url})
self.stdout.write(u'Emailed {}.'.format(user.email))
except ValidationError:
self.stdout.write(u'Error with {}'.format(user.email))
| <commit_before>from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.core.management.base import BaseCommand
from django.core.urlresolvers import reverse
from post_office import mail
class Command(BaseCommand):
help = "Notify users to update."
def handle(self, *args, **options):
domain = Site.objects.get_current().domain
full_url = "https://{}{}".format(domain,
reverse('edit_common_profile'))
for user in User.objects.filter(common_profile__is_student=True,
is_active=True):
mail.send([user.email],
template="update_common_profile",
context={'user': user, 'full_url': full_url})
self.stdout.write(u'Emailed {}.'.format(user.email))
<commit_msg>Handle missing and invalid email addresses.<commit_after> | from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.core.exceptions import ValidationError
from django.core.management.base import BaseCommand
from django.core.urlresolvers import reverse
from post_office import mail
class Command(BaseCommand):
help = "Notify users to update."
def handle(self, *args, **options):
domain = Site.objects.get_current().domain
full_url = "https://{}{}".format(domain,
reverse('edit_common_profile'))
for user in User.objects.filter(common_profile__is_student=True,
is_active=True).exclude(email=""):
try:
mail.send([user.email],
template="update_common_profile",
context={'user': user, 'full_url': full_url})
self.stdout.write(u'Emailed {}.'.format(user.email))
except ValidationError:
self.stdout.write(u'Error with {}'.format(user.email))
| from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.core.management.base import BaseCommand
from django.core.urlresolvers import reverse
from post_office import mail
class Command(BaseCommand):
help = "Notify users to update."
def handle(self, *args, **options):
domain = Site.objects.get_current().domain
full_url = "https://{}{}".format(domain,
reverse('edit_common_profile'))
for user in User.objects.filter(common_profile__is_student=True,
is_active=True):
mail.send([user.email],
template="update_common_profile",
context={'user': user, 'full_url': full_url})
self.stdout.write(u'Emailed {}.'.format(user.email))
Handle missing and invalid email addresses.from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.core.exceptions import ValidationError
from django.core.management.base import BaseCommand
from django.core.urlresolvers import reverse
from post_office import mail
class Command(BaseCommand):
help = "Notify users to update."
def handle(self, *args, **options):
domain = Site.objects.get_current().domain
full_url = "https://{}{}".format(domain,
reverse('edit_common_profile'))
for user in User.objects.filter(common_profile__is_student=True,
is_active=True).exclude(email=""):
try:
mail.send([user.email],
template="update_common_profile",
context={'user': user, 'full_url': full_url})
self.stdout.write(u'Emailed {}.'.format(user.email))
except ValidationError:
self.stdout.write(u'Error with {}'.format(user.email))
| <commit_before>from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.core.management.base import BaseCommand
from django.core.urlresolvers import reverse
from post_office import mail
class Command(BaseCommand):
help = "Notify users to update."
def handle(self, *args, **options):
domain = Site.objects.get_current().domain
full_url = "https://{}{}".format(domain,
reverse('edit_common_profile'))
for user in User.objects.filter(common_profile__is_student=True,
is_active=True):
mail.send([user.email],
template="update_common_profile",
context={'user': user, 'full_url': full_url})
self.stdout.write(u'Emailed {}.'.format(user.email))
<commit_msg>Handle missing and invalid email addresses.<commit_after>from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.core.exceptions import ValidationError
from django.core.management.base import BaseCommand
from django.core.urlresolvers import reverse
from post_office import mail
class Command(BaseCommand):
help = "Notify users to update."
def handle(self, *args, **options):
domain = Site.objects.get_current().domain
full_url = "https://{}{}".format(domain,
reverse('edit_common_profile'))
for user in User.objects.filter(common_profile__is_student=True,
is_active=True).exclude(email=""):
try:
mail.send([user.email],
template="update_common_profile",
context={'user': user, 'full_url': full_url})
self.stdout.write(u'Emailed {}.'.format(user.email))
except ValidationError:
self.stdout.write(u'Error with {}'.format(user.email))
|
c2973d4f2ae7da0f75f573cebd8eb1780d5b33e1 | account_withholding_automatic/models/account_payment_group.py | account_withholding_automatic/models/account_payment_group.py | # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import models, api, fields
class AccountPaymentGroup(models.Model):
_inherit = "account.payment.group"
withholdings_amount = fields.Monetary(
compute='_compute_withholdings_amount'
)
@api.multi
@api.depends(
'payment_ids.tax_withholding_id',
'payment_ids.amount',
)
def _compute_withholdings_amount(self):
for rec in self:
rec.withholdings_amount = sum(
rec.payment_ids.filtered(
lambda x: x.tax_withholding_id).mapped('amount'))
@api.multi
def compute_withholdings(self):
for rec in self:
if rec.partner_type != 'supplier':
continue
self.env['account.tax'].search([
('type_tax_use', '=', rec.partner_type),
('company_id', '=', rec.company_id.id),
]).create_payment_withholdings(rec)
@api.multi
def confirm(self):
res = super(AccountPaymentGroup, self).confirm()
for rec in self:
if rec.company_id.automatic_withholdings:
rec.compute_withholdings()
return res
| # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import models, api, fields
class AccountPaymentGroup(models.Model):
_inherit = "account.payment.group"
withholdings_amount = fields.Monetary(
compute='_compute_withholdings_amount'
)
@api.multi
@api.depends(
'payment_ids.tax_withholding_id',
'payment_ids.amount',
)
def _compute_withholdings_amount(self):
for rec in self:
rec.withholdings_amount = sum(
rec.payment_ids.filtered(
lambda x: x.tax_withholding_id).mapped('amount'))
@api.multi
def compute_withholdings(self):
for rec in self:
if rec.partner_type != 'supplier':
continue
# limpiamos el type por si se paga desde factura ya que el en ese
# caso viene in_invoice o out_invoice y en search de tax filtrar
# por impuestos de venta y compra (y no los nuestros de pagos
# y cobros)
self.env['account.tax'].with_context(type=None).search([
('type_tax_use', '=', rec.partner_type),
('company_id', '=', rec.company_id.id),
]).create_payment_withholdings(rec)
@api.multi
def confirm(self):
res = super(AccountPaymentGroup, self).confirm()
for rec in self:
if rec.company_id.automatic_withholdings:
rec.compute_withholdings()
return res
| FIX withholdings computation when payment come from invoices | FIX withholdings computation when payment come from invoices
| Python | agpl-3.0 | ingadhoc/account-payment | # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import models, api, fields
class AccountPaymentGroup(models.Model):
_inherit = "account.payment.group"
withholdings_amount = fields.Monetary(
compute='_compute_withholdings_amount'
)
@api.multi
@api.depends(
'payment_ids.tax_withholding_id',
'payment_ids.amount',
)
def _compute_withholdings_amount(self):
for rec in self:
rec.withholdings_amount = sum(
rec.payment_ids.filtered(
lambda x: x.tax_withholding_id).mapped('amount'))
@api.multi
def compute_withholdings(self):
for rec in self:
if rec.partner_type != 'supplier':
continue
self.env['account.tax'].search([
('type_tax_use', '=', rec.partner_type),
('company_id', '=', rec.company_id.id),
]).create_payment_withholdings(rec)
@api.multi
def confirm(self):
res = super(AccountPaymentGroup, self).confirm()
for rec in self:
if rec.company_id.automatic_withholdings:
rec.compute_withholdings()
return res
FIX withholdings computation when payment come from invoices | # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import models, api, fields
class AccountPaymentGroup(models.Model):
_inherit = "account.payment.group"
withholdings_amount = fields.Monetary(
compute='_compute_withholdings_amount'
)
@api.multi
@api.depends(
'payment_ids.tax_withholding_id',
'payment_ids.amount',
)
def _compute_withholdings_amount(self):
for rec in self:
rec.withholdings_amount = sum(
rec.payment_ids.filtered(
lambda x: x.tax_withholding_id).mapped('amount'))
@api.multi
def compute_withholdings(self):
for rec in self:
if rec.partner_type != 'supplier':
continue
# limpiamos el type por si se paga desde factura ya que el en ese
# caso viene in_invoice o out_invoice y en search de tax filtrar
# por impuestos de venta y compra (y no los nuestros de pagos
# y cobros)
self.env['account.tax'].with_context(type=None).search([
('type_tax_use', '=', rec.partner_type),
('company_id', '=', rec.company_id.id),
]).create_payment_withholdings(rec)
@api.multi
def confirm(self):
res = super(AccountPaymentGroup, self).confirm()
for rec in self:
if rec.company_id.automatic_withholdings:
rec.compute_withholdings()
return res
| <commit_before># -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import models, api, fields
class AccountPaymentGroup(models.Model):
_inherit = "account.payment.group"
withholdings_amount = fields.Monetary(
compute='_compute_withholdings_amount'
)
@api.multi
@api.depends(
'payment_ids.tax_withholding_id',
'payment_ids.amount',
)
def _compute_withholdings_amount(self):
for rec in self:
rec.withholdings_amount = sum(
rec.payment_ids.filtered(
lambda x: x.tax_withholding_id).mapped('amount'))
@api.multi
def compute_withholdings(self):
for rec in self:
if rec.partner_type != 'supplier':
continue
self.env['account.tax'].search([
('type_tax_use', '=', rec.partner_type),
('company_id', '=', rec.company_id.id),
]).create_payment_withholdings(rec)
@api.multi
def confirm(self):
res = super(AccountPaymentGroup, self).confirm()
for rec in self:
if rec.company_id.automatic_withholdings:
rec.compute_withholdings()
return res
<commit_msg>FIX withholdings computation when payment come from invoices<commit_after> | # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import models, api, fields
class AccountPaymentGroup(models.Model):
_inherit = "account.payment.group"
withholdings_amount = fields.Monetary(
compute='_compute_withholdings_amount'
)
@api.multi
@api.depends(
'payment_ids.tax_withholding_id',
'payment_ids.amount',
)
def _compute_withholdings_amount(self):
for rec in self:
rec.withholdings_amount = sum(
rec.payment_ids.filtered(
lambda x: x.tax_withholding_id).mapped('amount'))
@api.multi
def compute_withholdings(self):
for rec in self:
if rec.partner_type != 'supplier':
continue
# limpiamos el type por si se paga desde factura ya que el en ese
# caso viene in_invoice o out_invoice y en search de tax filtrar
# por impuestos de venta y compra (y no los nuestros de pagos
# y cobros)
self.env['account.tax'].with_context(type=None).search([
('type_tax_use', '=', rec.partner_type),
('company_id', '=', rec.company_id.id),
]).create_payment_withholdings(rec)
@api.multi
def confirm(self):
res = super(AccountPaymentGroup, self).confirm()
for rec in self:
if rec.company_id.automatic_withholdings:
rec.compute_withholdings()
return res
| # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import models, api, fields
class AccountPaymentGroup(models.Model):
_inherit = "account.payment.group"
withholdings_amount = fields.Monetary(
compute='_compute_withholdings_amount'
)
@api.multi
@api.depends(
'payment_ids.tax_withholding_id',
'payment_ids.amount',
)
def _compute_withholdings_amount(self):
for rec in self:
rec.withholdings_amount = sum(
rec.payment_ids.filtered(
lambda x: x.tax_withholding_id).mapped('amount'))
@api.multi
def compute_withholdings(self):
for rec in self:
if rec.partner_type != 'supplier':
continue
self.env['account.tax'].search([
('type_tax_use', '=', rec.partner_type),
('company_id', '=', rec.company_id.id),
]).create_payment_withholdings(rec)
@api.multi
def confirm(self):
res = super(AccountPaymentGroup, self).confirm()
for rec in self:
if rec.company_id.automatic_withholdings:
rec.compute_withholdings()
return res
FIX withholdings computation when payment come from invoices# -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import models, api, fields
class AccountPaymentGroup(models.Model):
_inherit = "account.payment.group"
withholdings_amount = fields.Monetary(
compute='_compute_withholdings_amount'
)
@api.multi
@api.depends(
'payment_ids.tax_withholding_id',
'payment_ids.amount',
)
def _compute_withholdings_amount(self):
for rec in self:
rec.withholdings_amount = sum(
rec.payment_ids.filtered(
lambda x: x.tax_withholding_id).mapped('amount'))
@api.multi
def compute_withholdings(self):
for rec in self:
if rec.partner_type != 'supplier':
continue
# limpiamos el type por si se paga desde factura ya que el en ese
# caso viene in_invoice o out_invoice y en search de tax filtrar
# por impuestos de venta y compra (y no los nuestros de pagos
# y cobros)
self.env['account.tax'].with_context(type=None).search([
('type_tax_use', '=', rec.partner_type),
('company_id', '=', rec.company_id.id),
]).create_payment_withholdings(rec)
@api.multi
def confirm(self):
res = super(AccountPaymentGroup, self).confirm()
for rec in self:
if rec.company_id.automatic_withholdings:
rec.compute_withholdings()
return res
| <commit_before># -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import models, api, fields
class AccountPaymentGroup(models.Model):
_inherit = "account.payment.group"
withholdings_amount = fields.Monetary(
compute='_compute_withholdings_amount'
)
@api.multi
@api.depends(
'payment_ids.tax_withholding_id',
'payment_ids.amount',
)
def _compute_withholdings_amount(self):
for rec in self:
rec.withholdings_amount = sum(
rec.payment_ids.filtered(
lambda x: x.tax_withholding_id).mapped('amount'))
@api.multi
def compute_withholdings(self):
for rec in self:
if rec.partner_type != 'supplier':
continue
self.env['account.tax'].search([
('type_tax_use', '=', rec.partner_type),
('company_id', '=', rec.company_id.id),
]).create_payment_withholdings(rec)
@api.multi
def confirm(self):
res = super(AccountPaymentGroup, self).confirm()
for rec in self:
if rec.company_id.automatic_withholdings:
rec.compute_withholdings()
return res
<commit_msg>FIX withholdings computation when payment come from invoices<commit_after># -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import models, api, fields
class AccountPaymentGroup(models.Model):
_inherit = "account.payment.group"
withholdings_amount = fields.Monetary(
compute='_compute_withholdings_amount'
)
@api.multi
@api.depends(
'payment_ids.tax_withholding_id',
'payment_ids.amount',
)
def _compute_withholdings_amount(self):
for rec in self:
rec.withholdings_amount = sum(
rec.payment_ids.filtered(
lambda x: x.tax_withholding_id).mapped('amount'))
@api.multi
def compute_withholdings(self):
for rec in self:
if rec.partner_type != 'supplier':
continue
# limpiamos el type por si se paga desde factura ya que el en ese
# caso viene in_invoice o out_invoice y en search de tax filtrar
# por impuestos de venta y compra (y no los nuestros de pagos
# y cobros)
self.env['account.tax'].with_context(type=None).search([
('type_tax_use', '=', rec.partner_type),
('company_id', '=', rec.company_id.id),
]).create_payment_withholdings(rec)
@api.multi
def confirm(self):
res = super(AccountPaymentGroup, self).confirm()
for rec in self:
if rec.company_id.automatic_withholdings:
rec.compute_withholdings()
return res
|
987d943a076b591309c06f67834dcb0012dd8491 | audio_train.py | audio_train.py | #%% Setup.
import numpy as np
import scipy.io.wavfile
from keras.utils.visualize_util import plot
from keras.callbacks import TensorBoard, ModelCheckpoint
from keras.utils import np_utils
from eva.models.wavenet import Wavenet, compute_receptive_field
from eva.util.mutil import sparse_labels
#%% Data
RATE, DATA = scipy.io.wavfile.read('./data/undertale/undertale_001_once_upon_a_time.comp.wav')
#%% Model Config.
MODEL = Wavenet
FILTERS = 32
DEPTH = 7
STACKS = 4
LENGTH = 1 + compute_receptive_field(RATE, DEPTH, STACKS)[0]
BINS = 256
LOAD = False
#%% Model.
INPUT = (LENGTH, BINS)
ARGS = (INPUT, FILTERS, DEPTH, STACKS)
M = MODEL(*ARGS)
if LOAD:
M.load_weights('model.h5')
M.summary()
plot(M)
#%% Train.
TRAIN = np_utils.to_categorical(DATA, BINS)
TRAIN = TRAIN[:TRAIN.shape[0]//LENGTH*LENGTH].reshape(TRAIN.shape[0]//LENGTH, LENGTH, BINS)
M.fit(TRAIN, sparse_labels(TRAIN), nb_epoch=2000, batch_size=32,
callbacks=[TensorBoard(), ModelCheckpoint('model.h5')]) | #%% Setup.
import numpy as np
import scipy.io.wavfile
from keras.utils.visualize_util import plot
from keras.callbacks import TensorBoard, ModelCheckpoint
from keras.utils import np_utils
from eva.models.wavenet import Wavenet, compute_receptive_field
from eva.util.mutil import sparse_labels
#%% Data
RATE, DATA = scipy.io.wavfile.read('./data/undertale/undertale_001_once_upon_a_time.comp.wav')
#%% Model Config.
MODEL = Wavenet
FILTERS = 32
DEPTH = 7
STACKS = 4
LENGTH = 1 + compute_receptive_field(RATE, DEPTH, STACKS)[0]
BINS = 256
LOAD = False
#%% Model.
INPUT = (LENGTH, BINS)
ARGS = (INPUT, FILTERS, DEPTH, STACKS)
M = MODEL(*ARGS)
if LOAD:
M.load_weights('model.h5')
M.summary()
plot(M)
#%% Train.
TRAIN = np_utils.to_categorical(DATA, BINS)
TRAIN = TRAIN[:TRAIN.shape[0]//LENGTH*LENGTH].reshape(TRAIN.shape[0]//LENGTH, LENGTH, BINS)
M.fit(TRAIN, sparse_labels(TRAIN), nb_epoch=2000, batch_size=8,
callbacks=[TensorBoard(), ModelCheckpoint('model.h5')]) | Fix OOM by reducing batch | Fix OOM by reducing batch
| Python | apache-2.0 | israelg99/eva | #%% Setup.
import numpy as np
import scipy.io.wavfile
from keras.utils.visualize_util import plot
from keras.callbacks import TensorBoard, ModelCheckpoint
from keras.utils import np_utils
from eva.models.wavenet import Wavenet, compute_receptive_field
from eva.util.mutil import sparse_labels
#%% Data
RATE, DATA = scipy.io.wavfile.read('./data/undertale/undertale_001_once_upon_a_time.comp.wav')
#%% Model Config.
MODEL = Wavenet
FILTERS = 32
DEPTH = 7
STACKS = 4
LENGTH = 1 + compute_receptive_field(RATE, DEPTH, STACKS)[0]
BINS = 256
LOAD = False
#%% Model.
INPUT = (LENGTH, BINS)
ARGS = (INPUT, FILTERS, DEPTH, STACKS)
M = MODEL(*ARGS)
if LOAD:
M.load_weights('model.h5')
M.summary()
plot(M)
#%% Train.
TRAIN = np_utils.to_categorical(DATA, BINS)
TRAIN = TRAIN[:TRAIN.shape[0]//LENGTH*LENGTH].reshape(TRAIN.shape[0]//LENGTH, LENGTH, BINS)
M.fit(TRAIN, sparse_labels(TRAIN), nb_epoch=2000, batch_size=32,
callbacks=[TensorBoard(), ModelCheckpoint('model.h5')])Fix OOM by reducing batch | #%% Setup.
import numpy as np
import scipy.io.wavfile
from keras.utils.visualize_util import plot
from keras.callbacks import TensorBoard, ModelCheckpoint
from keras.utils import np_utils
from eva.models.wavenet import Wavenet, compute_receptive_field
from eva.util.mutil import sparse_labels
#%% Data
RATE, DATA = scipy.io.wavfile.read('./data/undertale/undertale_001_once_upon_a_time.comp.wav')
#%% Model Config.
MODEL = Wavenet
FILTERS = 32
DEPTH = 7
STACKS = 4
LENGTH = 1 + compute_receptive_field(RATE, DEPTH, STACKS)[0]
BINS = 256
LOAD = False
#%% Model.
INPUT = (LENGTH, BINS)
ARGS = (INPUT, FILTERS, DEPTH, STACKS)
M = MODEL(*ARGS)
if LOAD:
M.load_weights('model.h5')
M.summary()
plot(M)
#%% Train.
TRAIN = np_utils.to_categorical(DATA, BINS)
TRAIN = TRAIN[:TRAIN.shape[0]//LENGTH*LENGTH].reshape(TRAIN.shape[0]//LENGTH, LENGTH, BINS)
M.fit(TRAIN, sparse_labels(TRAIN), nb_epoch=2000, batch_size=8,
callbacks=[TensorBoard(), ModelCheckpoint('model.h5')]) | <commit_before>#%% Setup.
import numpy as np
import scipy.io.wavfile
from keras.utils.visualize_util import plot
from keras.callbacks import TensorBoard, ModelCheckpoint
from keras.utils import np_utils
from eva.models.wavenet import Wavenet, compute_receptive_field
from eva.util.mutil import sparse_labels
#%% Data
RATE, DATA = scipy.io.wavfile.read('./data/undertale/undertale_001_once_upon_a_time.comp.wav')
#%% Model Config.
MODEL = Wavenet
FILTERS = 32
DEPTH = 7
STACKS = 4
LENGTH = 1 + compute_receptive_field(RATE, DEPTH, STACKS)[0]
BINS = 256
LOAD = False
#%% Model.
INPUT = (LENGTH, BINS)
ARGS = (INPUT, FILTERS, DEPTH, STACKS)
M = MODEL(*ARGS)
if LOAD:
M.load_weights('model.h5')
M.summary()
plot(M)
#%% Train.
TRAIN = np_utils.to_categorical(DATA, BINS)
TRAIN = TRAIN[:TRAIN.shape[0]//LENGTH*LENGTH].reshape(TRAIN.shape[0]//LENGTH, LENGTH, BINS)
M.fit(TRAIN, sparse_labels(TRAIN), nb_epoch=2000, batch_size=32,
callbacks=[TensorBoard(), ModelCheckpoint('model.h5')])<commit_msg>Fix OOM by reducing batch<commit_after> | #%% Setup.
import numpy as np
import scipy.io.wavfile
from keras.utils.visualize_util import plot
from keras.callbacks import TensorBoard, ModelCheckpoint
from keras.utils import np_utils
from eva.models.wavenet import Wavenet, compute_receptive_field
from eva.util.mutil import sparse_labels
#%% Data
RATE, DATA = scipy.io.wavfile.read('./data/undertale/undertale_001_once_upon_a_time.comp.wav')
#%% Model Config.
MODEL = Wavenet
FILTERS = 32
DEPTH = 7
STACKS = 4
LENGTH = 1 + compute_receptive_field(RATE, DEPTH, STACKS)[0]
BINS = 256
LOAD = False
#%% Model.
INPUT = (LENGTH, BINS)
ARGS = (INPUT, FILTERS, DEPTH, STACKS)
M = MODEL(*ARGS)
if LOAD:
M.load_weights('model.h5')
M.summary()
plot(M)
#%% Train.
TRAIN = np_utils.to_categorical(DATA, BINS)
TRAIN = TRAIN[:TRAIN.shape[0]//LENGTH*LENGTH].reshape(TRAIN.shape[0]//LENGTH, LENGTH, BINS)
M.fit(TRAIN, sparse_labels(TRAIN), nb_epoch=2000, batch_size=8,
callbacks=[TensorBoard(), ModelCheckpoint('model.h5')]) | #%% Setup.
import numpy as np
import scipy.io.wavfile
from keras.utils.visualize_util import plot
from keras.callbacks import TensorBoard, ModelCheckpoint
from keras.utils import np_utils
from eva.models.wavenet import Wavenet, compute_receptive_field
from eva.util.mutil import sparse_labels
#%% Data
RATE, DATA = scipy.io.wavfile.read('./data/undertale/undertale_001_once_upon_a_time.comp.wav')
#%% Model Config.
MODEL = Wavenet
FILTERS = 32
DEPTH = 7
STACKS = 4
LENGTH = 1 + compute_receptive_field(RATE, DEPTH, STACKS)[0]
BINS = 256
LOAD = False
#%% Model.
INPUT = (LENGTH, BINS)
ARGS = (INPUT, FILTERS, DEPTH, STACKS)
M = MODEL(*ARGS)
if LOAD:
M.load_weights('model.h5')
M.summary()
plot(M)
#%% Train.
TRAIN = np_utils.to_categorical(DATA, BINS)
TRAIN = TRAIN[:TRAIN.shape[0]//LENGTH*LENGTH].reshape(TRAIN.shape[0]//LENGTH, LENGTH, BINS)
M.fit(TRAIN, sparse_labels(TRAIN), nb_epoch=2000, batch_size=32,
callbacks=[TensorBoard(), ModelCheckpoint('model.h5')])Fix OOM by reducing batch#%% Setup.
import numpy as np
import scipy.io.wavfile
from keras.utils.visualize_util import plot
from keras.callbacks import TensorBoard, ModelCheckpoint
from keras.utils import np_utils
from eva.models.wavenet import Wavenet, compute_receptive_field
from eva.util.mutil import sparse_labels
#%% Data
RATE, DATA = scipy.io.wavfile.read('./data/undertale/undertale_001_once_upon_a_time.comp.wav')
#%% Model Config.
MODEL = Wavenet
FILTERS = 32
DEPTH = 7
STACKS = 4
LENGTH = 1 + compute_receptive_field(RATE, DEPTH, STACKS)[0]
BINS = 256
LOAD = False
#%% Model.
INPUT = (LENGTH, BINS)
ARGS = (INPUT, FILTERS, DEPTH, STACKS)
M = MODEL(*ARGS)
if LOAD:
M.load_weights('model.h5')
M.summary()
plot(M)
#%% Train.
TRAIN = np_utils.to_categorical(DATA, BINS)
TRAIN = TRAIN[:TRAIN.shape[0]//LENGTH*LENGTH].reshape(TRAIN.shape[0]//LENGTH, LENGTH, BINS)
M.fit(TRAIN, sparse_labels(TRAIN), nb_epoch=2000, batch_size=8,
callbacks=[TensorBoard(), ModelCheckpoint('model.h5')]) | <commit_before>#%% Setup.
import numpy as np
import scipy.io.wavfile
from keras.utils.visualize_util import plot
from keras.callbacks import TensorBoard, ModelCheckpoint
from keras.utils import np_utils
from eva.models.wavenet import Wavenet, compute_receptive_field
from eva.util.mutil import sparse_labels
#%% Data
RATE, DATA = scipy.io.wavfile.read('./data/undertale/undertale_001_once_upon_a_time.comp.wav')
#%% Model Config.
MODEL = Wavenet
FILTERS = 32
DEPTH = 7
STACKS = 4
LENGTH = 1 + compute_receptive_field(RATE, DEPTH, STACKS)[0]
BINS = 256
LOAD = False
#%% Model.
INPUT = (LENGTH, BINS)
ARGS = (INPUT, FILTERS, DEPTH, STACKS)
M = MODEL(*ARGS)
if LOAD:
M.load_weights('model.h5')
M.summary()
plot(M)
#%% Train.
TRAIN = np_utils.to_categorical(DATA, BINS)
TRAIN = TRAIN[:TRAIN.shape[0]//LENGTH*LENGTH].reshape(TRAIN.shape[0]//LENGTH, LENGTH, BINS)
M.fit(TRAIN, sparse_labels(TRAIN), nb_epoch=2000, batch_size=32,
callbacks=[TensorBoard(), ModelCheckpoint('model.h5')])<commit_msg>Fix OOM by reducing batch<commit_after>#%% Setup.
import numpy as np
import scipy.io.wavfile
from keras.utils.visualize_util import plot
from keras.callbacks import TensorBoard, ModelCheckpoint
from keras.utils import np_utils
from eva.models.wavenet import Wavenet, compute_receptive_field
from eva.util.mutil import sparse_labels
#%% Data
RATE, DATA = scipy.io.wavfile.read('./data/undertale/undertale_001_once_upon_a_time.comp.wav')
#%% Model Config.
MODEL = Wavenet
FILTERS = 32
DEPTH = 7
STACKS = 4
LENGTH = 1 + compute_receptive_field(RATE, DEPTH, STACKS)[0]
BINS = 256
LOAD = False
#%% Model.
INPUT = (LENGTH, BINS)
ARGS = (INPUT, FILTERS, DEPTH, STACKS)
M = MODEL(*ARGS)
if LOAD:
M.load_weights('model.h5')
M.summary()
plot(M)
#%% Train.
TRAIN = np_utils.to_categorical(DATA, BINS)
TRAIN = TRAIN[:TRAIN.shape[0]//LENGTH*LENGTH].reshape(TRAIN.shape[0]//LENGTH, LENGTH, BINS)
M.fit(TRAIN, sparse_labels(TRAIN), nb_epoch=2000, batch_size=8,
callbacks=[TensorBoard(), ModelCheckpoint('model.h5')]) |
fdfe3c77e7b147f813b4cbb13b211f6ed8ff9057 | sublime_plugin_tests/launchers/init.py | sublime_plugin_tests/launchers/init.py | # Load in core dependencies
import code
import os
import sublime
# Set up constants
__dir__ = os.path.dirname(os.path.abspath(__file__))
def run():
# On every run, re-import the test class
# TODO: Determine if this is necessary
filepath = __dir__ + '/plugin_runner.py'
plugin_dict = {
'__dir__': __dir__,
'__file__': filepath,
'__name__': '%s.plugin_runner' % __package__,
'__package__': __package__,
'__builtins__': __builtins__,
}
# DEV: In Python 2.x, use execfile. In 3.x, use compile + exec.
# if getattr(__builtins__, 'execfile', None):
if sublime.version() < '3000':
execfile(filepath, plugin_dict, plugin_dict)
else:
f = open(filepath)
script = f.read()
interpretter = code.InteractiveInterpreter(plugin_dict)
interpretter.runcode(compile(script, filepath, 'exec'))
test = plugin_dict['Test']()
test.run(__dir__)
# TODO: Set timeout loop that checks if `run` has set a global variable
# TODO: This thought was along side a plugin hook so we can guarantee most plugins are loaded
sublime.set_timeout(run, 1000)
| # Load in core dependencies
import code
import os
import sublime
# Set up constants
__dir__ = os.path.dirname(os.path.abspath(__file__))
def run():
# On every run, re-import the test class
# TODO: Determine if this is necessary
filepath = __dir__ + '/plugin_runner.py'
plugin_dict = {
'__dir__': __dir__,
'__file__': filepath,
'__name__': '%s.plugin_runner' % __package__,
'__package__': __package__,
'__builtins__': __builtins__,
}
# DEV: In Python 2.x, use execfile. In 3.x, use compile + exec.
# if getattr(__builtins__, 'execfile', None):
if sublime.version() < '3000':
execfile(filepath, plugin_dict, plugin_dict)
else:
f = open(filepath)
script = f.read()
interpretter = code.InteractiveInterpreter(plugin_dict)
interpretter.runcode(compile(script, filepath, 'exec'))
test = plugin_dict['Test']()
test.run(__dir__)
# TODO: Set timeout loop that checks if `run` has set a global variable
# TODO: This thought was along side a plugin hook so we can guarantee most plugins are loaded
sublime.set_timeout(run, 5000)
| Increase the timeout to an absurd value | Increase the timeout to an absurd value
| Python | unlicense | twolfson/sublime-plugin-tests,twolfson/sublime-plugin-tests | # Load in core dependencies
import code
import os
import sublime
# Set up constants
__dir__ = os.path.dirname(os.path.abspath(__file__))
def run():
# On every run, re-import the test class
# TODO: Determine if this is necessary
filepath = __dir__ + '/plugin_runner.py'
plugin_dict = {
'__dir__': __dir__,
'__file__': filepath,
'__name__': '%s.plugin_runner' % __package__,
'__package__': __package__,
'__builtins__': __builtins__,
}
# DEV: In Python 2.x, use execfile. In 3.x, use compile + exec.
# if getattr(__builtins__, 'execfile', None):
if sublime.version() < '3000':
execfile(filepath, plugin_dict, plugin_dict)
else:
f = open(filepath)
script = f.read()
interpretter = code.InteractiveInterpreter(plugin_dict)
interpretter.runcode(compile(script, filepath, 'exec'))
test = plugin_dict['Test']()
test.run(__dir__)
# TODO: Set timeout loop that checks if `run` has set a global variable
# TODO: This thought was along side a plugin hook so we can guarantee most plugins are loaded
sublime.set_timeout(run, 1000)
Increase the timeout to an absurd value | # Load in core dependencies
import code
import os
import sublime
# Set up constants
__dir__ = os.path.dirname(os.path.abspath(__file__))
def run():
# On every run, re-import the test class
# TODO: Determine if this is necessary
filepath = __dir__ + '/plugin_runner.py'
plugin_dict = {
'__dir__': __dir__,
'__file__': filepath,
'__name__': '%s.plugin_runner' % __package__,
'__package__': __package__,
'__builtins__': __builtins__,
}
# DEV: In Python 2.x, use execfile. In 3.x, use compile + exec.
# if getattr(__builtins__, 'execfile', None):
if sublime.version() < '3000':
execfile(filepath, plugin_dict, plugin_dict)
else:
f = open(filepath)
script = f.read()
interpretter = code.InteractiveInterpreter(plugin_dict)
interpretter.runcode(compile(script, filepath, 'exec'))
test = plugin_dict['Test']()
test.run(__dir__)
# TODO: Set timeout loop that checks if `run` has set a global variable
# TODO: This thought was along side a plugin hook so we can guarantee most plugins are loaded
sublime.set_timeout(run, 5000)
| <commit_before># Load in core dependencies
import code
import os
import sublime
# Set up constants
__dir__ = os.path.dirname(os.path.abspath(__file__))
def run():
# On every run, re-import the test class
# TODO: Determine if this is necessary
filepath = __dir__ + '/plugin_runner.py'
plugin_dict = {
'__dir__': __dir__,
'__file__': filepath,
'__name__': '%s.plugin_runner' % __package__,
'__package__': __package__,
'__builtins__': __builtins__,
}
# DEV: In Python 2.x, use execfile. In 3.x, use compile + exec.
# if getattr(__builtins__, 'execfile', None):
if sublime.version() < '3000':
execfile(filepath, plugin_dict, plugin_dict)
else:
f = open(filepath)
script = f.read()
interpretter = code.InteractiveInterpreter(plugin_dict)
interpretter.runcode(compile(script, filepath, 'exec'))
test = plugin_dict['Test']()
test.run(__dir__)
# TODO: Set timeout loop that checks if `run` has set a global variable
# TODO: This thought was along side a plugin hook so we can guarantee most plugins are loaded
sublime.set_timeout(run, 1000)
<commit_msg>Increase the timeout to an absurd value<commit_after> | # Load in core dependencies
import code
import os
import sublime
# Set up constants
__dir__ = os.path.dirname(os.path.abspath(__file__))
def run():
# On every run, re-import the test class
# TODO: Determine if this is necessary
filepath = __dir__ + '/plugin_runner.py'
plugin_dict = {
'__dir__': __dir__,
'__file__': filepath,
'__name__': '%s.plugin_runner' % __package__,
'__package__': __package__,
'__builtins__': __builtins__,
}
# DEV: In Python 2.x, use execfile. In 3.x, use compile + exec.
# if getattr(__builtins__, 'execfile', None):
if sublime.version() < '3000':
execfile(filepath, plugin_dict, plugin_dict)
else:
f = open(filepath)
script = f.read()
interpretter = code.InteractiveInterpreter(plugin_dict)
interpretter.runcode(compile(script, filepath, 'exec'))
test = plugin_dict['Test']()
test.run(__dir__)
# TODO: Set timeout loop that checks if `run` has set a global variable
# TODO: This thought was along side a plugin hook so we can guarantee most plugins are loaded
sublime.set_timeout(run, 5000)
| # Load in core dependencies
import code
import os
import sublime
# Set up constants
__dir__ = os.path.dirname(os.path.abspath(__file__))
def run():
# On every run, re-import the test class
# TODO: Determine if this is necessary
filepath = __dir__ + '/plugin_runner.py'
plugin_dict = {
'__dir__': __dir__,
'__file__': filepath,
'__name__': '%s.plugin_runner' % __package__,
'__package__': __package__,
'__builtins__': __builtins__,
}
# DEV: In Python 2.x, use execfile. In 3.x, use compile + exec.
# if getattr(__builtins__, 'execfile', None):
if sublime.version() < '3000':
execfile(filepath, plugin_dict, plugin_dict)
else:
f = open(filepath)
script = f.read()
interpretter = code.InteractiveInterpreter(plugin_dict)
interpretter.runcode(compile(script, filepath, 'exec'))
test = plugin_dict['Test']()
test.run(__dir__)
# TODO: Set timeout loop that checks if `run` has set a global variable
# TODO: This thought was along side a plugin hook so we can guarantee most plugins are loaded
sublime.set_timeout(run, 1000)
Increase the timeout to an absurd value# Load in core dependencies
import code
import os
import sublime
# Set up constants
__dir__ = os.path.dirname(os.path.abspath(__file__))
def run():
# On every run, re-import the test class
# TODO: Determine if this is necessary
filepath = __dir__ + '/plugin_runner.py'
plugin_dict = {
'__dir__': __dir__,
'__file__': filepath,
'__name__': '%s.plugin_runner' % __package__,
'__package__': __package__,
'__builtins__': __builtins__,
}
# DEV: In Python 2.x, use execfile. In 3.x, use compile + exec.
# if getattr(__builtins__, 'execfile', None):
if sublime.version() < '3000':
execfile(filepath, plugin_dict, plugin_dict)
else:
f = open(filepath)
script = f.read()
interpretter = code.InteractiveInterpreter(plugin_dict)
interpretter.runcode(compile(script, filepath, 'exec'))
test = plugin_dict['Test']()
test.run(__dir__)
# TODO: Set timeout loop that checks if `run` has set a global variable
# TODO: This thought was along side a plugin hook so we can guarantee most plugins are loaded
sublime.set_timeout(run, 5000)
| <commit_before># Load in core dependencies
import code
import os
import sublime
# Set up constants
__dir__ = os.path.dirname(os.path.abspath(__file__))
def run():
# On every run, re-import the test class
# TODO: Determine if this is necessary
filepath = __dir__ + '/plugin_runner.py'
plugin_dict = {
'__dir__': __dir__,
'__file__': filepath,
'__name__': '%s.plugin_runner' % __package__,
'__package__': __package__,
'__builtins__': __builtins__,
}
# DEV: In Python 2.x, use execfile. In 3.x, use compile + exec.
# if getattr(__builtins__, 'execfile', None):
if sublime.version() < '3000':
execfile(filepath, plugin_dict, plugin_dict)
else:
f = open(filepath)
script = f.read()
interpretter = code.InteractiveInterpreter(plugin_dict)
interpretter.runcode(compile(script, filepath, 'exec'))
test = plugin_dict['Test']()
test.run(__dir__)
# TODO: Set timeout loop that checks if `run` has set a global variable
# TODO: This thought was along side a plugin hook so we can guarantee most plugins are loaded
sublime.set_timeout(run, 1000)
<commit_msg>Increase the timeout to an absurd value<commit_after># Load in core dependencies
import code
import os
import sublime
# Set up constants
__dir__ = os.path.dirname(os.path.abspath(__file__))
def run():
# On every run, re-import the test class
# TODO: Determine if this is necessary
filepath = __dir__ + '/plugin_runner.py'
plugin_dict = {
'__dir__': __dir__,
'__file__': filepath,
'__name__': '%s.plugin_runner' % __package__,
'__package__': __package__,
'__builtins__': __builtins__,
}
# DEV: In Python 2.x, use execfile. In 3.x, use compile + exec.
# if getattr(__builtins__, 'execfile', None):
if sublime.version() < '3000':
execfile(filepath, plugin_dict, plugin_dict)
else:
f = open(filepath)
script = f.read()
interpretter = code.InteractiveInterpreter(plugin_dict)
interpretter.runcode(compile(script, filepath, 'exec'))
test = plugin_dict['Test']()
test.run(__dir__)
# TODO: Set timeout loop that checks if `run` has set a global variable
# TODO: This thought was along side a plugin hook so we can guarantee most plugins are loaded
sublime.set_timeout(run, 5000)
|
ffa6417b30517569cadff00aec839d968f3c91d7 | bisnode/constants.py | bisnode/constants.py | COMPANY_STANDARD_REPORT = "NRGCompanyReportStandard"
COMPANY_RATING_REPORT = "NRGCompanyReportRating"
HIGH = 'AAA'
GOOD = 'AA'
WORTHY = 'A'
BELOW_AVERAGE = 'B'
BAD = 'C'
MISSING = '-'
RATING_CHOICES = (
(HIGH, "high"),
(GOOD, "good"),
(WORTHY, "worthy"),
(BELOW_AVERAGE, "below average"),
(BAD, "bad"),
(MISSING, "missing"),
)
| COMPANY_STANDARD_REPORT = "NRGCompanyReportStandard"
COMPANY_RATING_REPORT = "NRGCompanyReportRating"
HIGH = 'AAA'
GOOD = 'AA'
WORTHY = 'A'
NEW = 'AN'
BELOW_AVERAGE = 'B'
BAD = 'C'
MISSING = '-'
RATING_CHOICES = (
(HIGH, "high"),
(GOOD, "good"),
(WORTHY, "worthy"),
(NEW, "new"),
(BELOW_AVERAGE, "below average"),
(BAD, "bad"),
(MISSING, "missing"),
)
| Add a new rating code | Add a new rating code
| Python | mit | FundedByMe/django-bisnode | COMPANY_STANDARD_REPORT = "NRGCompanyReportStandard"
COMPANY_RATING_REPORT = "NRGCompanyReportRating"
HIGH = 'AAA'
GOOD = 'AA'
WORTHY = 'A'
BELOW_AVERAGE = 'B'
BAD = 'C'
MISSING = '-'
RATING_CHOICES = (
(HIGH, "high"),
(GOOD, "good"),
(WORTHY, "worthy"),
(BELOW_AVERAGE, "below average"),
(BAD, "bad"),
(MISSING, "missing"),
)
Add a new rating code | COMPANY_STANDARD_REPORT = "NRGCompanyReportStandard"
COMPANY_RATING_REPORT = "NRGCompanyReportRating"
HIGH = 'AAA'
GOOD = 'AA'
WORTHY = 'A'
NEW = 'AN'
BELOW_AVERAGE = 'B'
BAD = 'C'
MISSING = '-'
RATING_CHOICES = (
(HIGH, "high"),
(GOOD, "good"),
(WORTHY, "worthy"),
(NEW, "new"),
(BELOW_AVERAGE, "below average"),
(BAD, "bad"),
(MISSING, "missing"),
)
| <commit_before>COMPANY_STANDARD_REPORT = "NRGCompanyReportStandard"
COMPANY_RATING_REPORT = "NRGCompanyReportRating"
HIGH = 'AAA'
GOOD = 'AA'
WORTHY = 'A'
BELOW_AVERAGE = 'B'
BAD = 'C'
MISSING = '-'
RATING_CHOICES = (
(HIGH, "high"),
(GOOD, "good"),
(WORTHY, "worthy"),
(BELOW_AVERAGE, "below average"),
(BAD, "bad"),
(MISSING, "missing"),
)
<commit_msg>Add a new rating code<commit_after> | COMPANY_STANDARD_REPORT = "NRGCompanyReportStandard"
COMPANY_RATING_REPORT = "NRGCompanyReportRating"
HIGH = 'AAA'
GOOD = 'AA'
WORTHY = 'A'
NEW = 'AN'
BELOW_AVERAGE = 'B'
BAD = 'C'
MISSING = '-'
RATING_CHOICES = (
(HIGH, "high"),
(GOOD, "good"),
(WORTHY, "worthy"),
(NEW, "new"),
(BELOW_AVERAGE, "below average"),
(BAD, "bad"),
(MISSING, "missing"),
)
| COMPANY_STANDARD_REPORT = "NRGCompanyReportStandard"
COMPANY_RATING_REPORT = "NRGCompanyReportRating"
HIGH = 'AAA'
GOOD = 'AA'
WORTHY = 'A'
BELOW_AVERAGE = 'B'
BAD = 'C'
MISSING = '-'
RATING_CHOICES = (
(HIGH, "high"),
(GOOD, "good"),
(WORTHY, "worthy"),
(BELOW_AVERAGE, "below average"),
(BAD, "bad"),
(MISSING, "missing"),
)
Add a new rating codeCOMPANY_STANDARD_REPORT = "NRGCompanyReportStandard"
COMPANY_RATING_REPORT = "NRGCompanyReportRating"
HIGH = 'AAA'
GOOD = 'AA'
WORTHY = 'A'
NEW = 'AN'
BELOW_AVERAGE = 'B'
BAD = 'C'
MISSING = '-'
RATING_CHOICES = (
(HIGH, "high"),
(GOOD, "good"),
(WORTHY, "worthy"),
(NEW, "new"),
(BELOW_AVERAGE, "below average"),
(BAD, "bad"),
(MISSING, "missing"),
)
| <commit_before>COMPANY_STANDARD_REPORT = "NRGCompanyReportStandard"
COMPANY_RATING_REPORT = "NRGCompanyReportRating"
HIGH = 'AAA'
GOOD = 'AA'
WORTHY = 'A'
BELOW_AVERAGE = 'B'
BAD = 'C'
MISSING = '-'
RATING_CHOICES = (
(HIGH, "high"),
(GOOD, "good"),
(WORTHY, "worthy"),
(BELOW_AVERAGE, "below average"),
(BAD, "bad"),
(MISSING, "missing"),
)
<commit_msg>Add a new rating code<commit_after>COMPANY_STANDARD_REPORT = "NRGCompanyReportStandard"
COMPANY_RATING_REPORT = "NRGCompanyReportRating"
HIGH = 'AAA'
GOOD = 'AA'
WORTHY = 'A'
NEW = 'AN'
BELOW_AVERAGE = 'B'
BAD = 'C'
MISSING = '-'
RATING_CHOICES = (
(HIGH, "high"),
(GOOD, "good"),
(WORTHY, "worthy"),
(NEW, "new"),
(BELOW_AVERAGE, "below average"),
(BAD, "bad"),
(MISSING, "missing"),
)
|
73b1273de8f8e17adf51893bdbd24d2067866297 | bootstrap/helpers.py | bootstrap/helpers.py | # -*- coding: utf-8 -*-
"""
bootstrap.helpers
~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.orga.models import OrgaFlag
from byceps.services.user.models.user import User
from byceps.services.user import service as user_service
from .util import add_to_database
# -------------------------------------------------------------------- #
# users
@add_to_database
def create_user(screen_name, email_address, *, enabled=False):
user = user_service.build_user(screen_name, email_address)
user.enabled = enabled
return user
def find_user(screen_name):
return User.query.filter_by(screen_name=screen_name).one_or_none()
def get_user(screen_name):
return User.query.filter_by(screen_name=screen_name).one()
# -------------------------------------------------------------------- #
# orgas
@add_to_database
def promote_orga(brand, user):
return OrgaFlag(brand.id, user.id)
| # -*- coding: utf-8 -*-
"""
bootstrap.helpers
~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.user.models.user import User
from byceps.services.user import service as user_service
from .util import add_to_database
# -------------------------------------------------------------------- #
# users
@add_to_database
def create_user(screen_name, email_address, *, enabled=False):
user = user_service.build_user(screen_name, email_address)
user.enabled = enabled
return user
def find_user(screen_name):
return User.query.filter_by(screen_name=screen_name).one_or_none()
def get_user(screen_name):
return User.query.filter_by(screen_name=screen_name).one()
| Remove bootstrap helper to promote a user to organizer as there is a service function (and a script calling it) for that | Remove bootstrap helper to promote a user to organizer as there is a service function (and a script calling it) for that
| Python | bsd-3-clause | m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps | # -*- coding: utf-8 -*-
"""
bootstrap.helpers
~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.orga.models import OrgaFlag
from byceps.services.user.models.user import User
from byceps.services.user import service as user_service
from .util import add_to_database
# -------------------------------------------------------------------- #
# users
@add_to_database
def create_user(screen_name, email_address, *, enabled=False):
user = user_service.build_user(screen_name, email_address)
user.enabled = enabled
return user
def find_user(screen_name):
return User.query.filter_by(screen_name=screen_name).one_or_none()
def get_user(screen_name):
return User.query.filter_by(screen_name=screen_name).one()
# -------------------------------------------------------------------- #
# orgas
@add_to_database
def promote_orga(brand, user):
return OrgaFlag(brand.id, user.id)
Remove bootstrap helper to promote a user to organizer as there is a service function (and a script calling it) for that | # -*- coding: utf-8 -*-
"""
bootstrap.helpers
~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.user.models.user import User
from byceps.services.user import service as user_service
from .util import add_to_database
# -------------------------------------------------------------------- #
# users
@add_to_database
def create_user(screen_name, email_address, *, enabled=False):
user = user_service.build_user(screen_name, email_address)
user.enabled = enabled
return user
def find_user(screen_name):
return User.query.filter_by(screen_name=screen_name).one_or_none()
def get_user(screen_name):
return User.query.filter_by(screen_name=screen_name).one()
| <commit_before># -*- coding: utf-8 -*-
"""
bootstrap.helpers
~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.orga.models import OrgaFlag
from byceps.services.user.models.user import User
from byceps.services.user import service as user_service
from .util import add_to_database
# -------------------------------------------------------------------- #
# users
@add_to_database
def create_user(screen_name, email_address, *, enabled=False):
user = user_service.build_user(screen_name, email_address)
user.enabled = enabled
return user
def find_user(screen_name):
return User.query.filter_by(screen_name=screen_name).one_or_none()
def get_user(screen_name):
return User.query.filter_by(screen_name=screen_name).one()
# -------------------------------------------------------------------- #
# orgas
@add_to_database
def promote_orga(brand, user):
return OrgaFlag(brand.id, user.id)
<commit_msg>Remove bootstrap helper to promote a user to organizer as there is a service function (and a script calling it) for that<commit_after> | # -*- coding: utf-8 -*-
"""
bootstrap.helpers
~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.user.models.user import User
from byceps.services.user import service as user_service
from .util import add_to_database
# -------------------------------------------------------------------- #
# users
@add_to_database
def create_user(screen_name, email_address, *, enabled=False):
user = user_service.build_user(screen_name, email_address)
user.enabled = enabled
return user
def find_user(screen_name):
return User.query.filter_by(screen_name=screen_name).one_or_none()
def get_user(screen_name):
return User.query.filter_by(screen_name=screen_name).one()
| # -*- coding: utf-8 -*-
"""
bootstrap.helpers
~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.orga.models import OrgaFlag
from byceps.services.user.models.user import User
from byceps.services.user import service as user_service
from .util import add_to_database
# -------------------------------------------------------------------- #
# users
@add_to_database
def create_user(screen_name, email_address, *, enabled=False):
user = user_service.build_user(screen_name, email_address)
user.enabled = enabled
return user
def find_user(screen_name):
return User.query.filter_by(screen_name=screen_name).one_or_none()
def get_user(screen_name):
return User.query.filter_by(screen_name=screen_name).one()
# -------------------------------------------------------------------- #
# orgas
@add_to_database
def promote_orga(brand, user):
return OrgaFlag(brand.id, user.id)
Remove bootstrap helper to promote a user to organizer as there is a service function (and a script calling it) for that# -*- coding: utf-8 -*-
"""
bootstrap.helpers
~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.user.models.user import User
from byceps.services.user import service as user_service
from .util import add_to_database
# -------------------------------------------------------------------- #
# users
@add_to_database
def create_user(screen_name, email_address, *, enabled=False):
user = user_service.build_user(screen_name, email_address)
user.enabled = enabled
return user
def find_user(screen_name):
return User.query.filter_by(screen_name=screen_name).one_or_none()
def get_user(screen_name):
return User.query.filter_by(screen_name=screen_name).one()
| <commit_before># -*- coding: utf-8 -*-
"""
bootstrap.helpers
~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.orga.models import OrgaFlag
from byceps.services.user.models.user import User
from byceps.services.user import service as user_service
from .util import add_to_database
# -------------------------------------------------------------------- #
# users
@add_to_database
def create_user(screen_name, email_address, *, enabled=False):
user = user_service.build_user(screen_name, email_address)
user.enabled = enabled
return user
def find_user(screen_name):
return User.query.filter_by(screen_name=screen_name).one_or_none()
def get_user(screen_name):
return User.query.filter_by(screen_name=screen_name).one()
# -------------------------------------------------------------------- #
# orgas
@add_to_database
def promote_orga(brand, user):
return OrgaFlag(brand.id, user.id)
<commit_msg>Remove bootstrap helper to promote a user to organizer as there is a service function (and a script calling it) for that<commit_after># -*- coding: utf-8 -*-
"""
bootstrap.helpers
~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.user.models.user import User
from byceps.services.user import service as user_service
from .util import add_to_database
# -------------------------------------------------------------------- #
# users
@add_to_database
def create_user(screen_name, email_address, *, enabled=False):
user = user_service.build_user(screen_name, email_address)
user.enabled = enabled
return user
def find_user(screen_name):
return User.query.filter_by(screen_name=screen_name).one_or_none()
def get_user(screen_name):
return User.query.filter_by(screen_name=screen_name).one()
|
a1e56d65807228b952036fc182071aab5e6ff25f | tests/cli/test_pixel.py | tests/cli/test_pixel.py | """ Test ``yatsm line``
"""
import os
from click.testing import CliRunner
import matplotlib as mpl
import pytest
from yatsm.cli.main import cli
mpl_skip = pytest.mark.skipif(
mpl.get_backend() != 'agg' and "DISPLAY" not in os.environ,
reason='Requires either matplotlib "agg" backend or that DISPLAY" is set')
@mpl_skip
def test_cli_pixel_pass_1(example_timeseries):
""" Correctly run for one pixel
"""
runner = CliRunner()
result = runner.invoke(
cli,
['-v', 'pixel',
'--band', '5',
'--plot', 'TS',
'--style', 'ggplot',
example_timeseries['config'], '1', '1'
])
assert result.exit_code == 0
| """ Test ``yatsm line``
"""
import os
from click.testing import CliRunner
import matplotlib as mpl
import pytest
from yatsm.cli.main import cli
mpl_skip = pytest.mark.skipif(
mpl.get_backend() != 'agg' and "DISPLAY" not in os.environ,
reason='Requires either matplotlib "agg" backend or that DISPLAY" is set')
@mpl_skip
def test_cli_pixel_pass_1(example_timeseries):
""" Correctly run for one pixel
"""
runner = CliRunner()
result = runner.invoke(
cli,
['-v', 'pixel',
'--band', '5',
'--plot', 'TS',
'--style', 'ggplot',
example_timeseries['config'], '1', '1'
])
assert result.exit_code == 0
@mpl_skip
def test_cli_pixel_pass_2(example_timeseries):
""" Correctly run for one pixel for 3 plots
"""
runner = CliRunner()
result = runner.invoke(
cli,
['-v', 'pixel',
'--band', '5',
'--plot', 'TS', '--plot', 'DOY', '--plot', 'VAL',
'--style', 'ggplot',
example_timeseries['config'], '1', '1'
])
assert result.exit_code == 0
| Add test for all plot types | Add test for all plot types
| Python | mit | valpasq/yatsm,c11/yatsm,ceholden/yatsm,ceholden/yatsm,c11/yatsm,valpasq/yatsm | """ Test ``yatsm line``
"""
import os
from click.testing import CliRunner
import matplotlib as mpl
import pytest
from yatsm.cli.main import cli
mpl_skip = pytest.mark.skipif(
mpl.get_backend() != 'agg' and "DISPLAY" not in os.environ,
reason='Requires either matplotlib "agg" backend or that DISPLAY" is set')
@mpl_skip
def test_cli_pixel_pass_1(example_timeseries):
""" Correctly run for one pixel
"""
runner = CliRunner()
result = runner.invoke(
cli,
['-v', 'pixel',
'--band', '5',
'--plot', 'TS',
'--style', 'ggplot',
example_timeseries['config'], '1', '1'
])
assert result.exit_code == 0
Add test for all plot types | """ Test ``yatsm line``
"""
import os
from click.testing import CliRunner
import matplotlib as mpl
import pytest
from yatsm.cli.main import cli
mpl_skip = pytest.mark.skipif(
mpl.get_backend() != 'agg' and "DISPLAY" not in os.environ,
reason='Requires either matplotlib "agg" backend or that DISPLAY" is set')
@mpl_skip
def test_cli_pixel_pass_1(example_timeseries):
""" Correctly run for one pixel
"""
runner = CliRunner()
result = runner.invoke(
cli,
['-v', 'pixel',
'--band', '5',
'--plot', 'TS',
'--style', 'ggplot',
example_timeseries['config'], '1', '1'
])
assert result.exit_code == 0
@mpl_skip
def test_cli_pixel_pass_2(example_timeseries):
""" Correctly run for one pixel for 3 plots
"""
runner = CliRunner()
result = runner.invoke(
cli,
['-v', 'pixel',
'--band', '5',
'--plot', 'TS', '--plot', 'DOY', '--plot', 'VAL',
'--style', 'ggplot',
example_timeseries['config'], '1', '1'
])
assert result.exit_code == 0
| <commit_before>""" Test ``yatsm line``
"""
import os
from click.testing import CliRunner
import matplotlib as mpl
import pytest
from yatsm.cli.main import cli
mpl_skip = pytest.mark.skipif(
mpl.get_backend() != 'agg' and "DISPLAY" not in os.environ,
reason='Requires either matplotlib "agg" backend or that DISPLAY" is set')
@mpl_skip
def test_cli_pixel_pass_1(example_timeseries):
""" Correctly run for one pixel
"""
runner = CliRunner()
result = runner.invoke(
cli,
['-v', 'pixel',
'--band', '5',
'--plot', 'TS',
'--style', 'ggplot',
example_timeseries['config'], '1', '1'
])
assert result.exit_code == 0
<commit_msg>Add test for all plot types<commit_after> | """ Test ``yatsm line``
"""
import os
from click.testing import CliRunner
import matplotlib as mpl
import pytest
from yatsm.cli.main import cli
mpl_skip = pytest.mark.skipif(
mpl.get_backend() != 'agg' and "DISPLAY" not in os.environ,
reason='Requires either matplotlib "agg" backend or that DISPLAY" is set')
@mpl_skip
def test_cli_pixel_pass_1(example_timeseries):
""" Correctly run for one pixel
"""
runner = CliRunner()
result = runner.invoke(
cli,
['-v', 'pixel',
'--band', '5',
'--plot', 'TS',
'--style', 'ggplot',
example_timeseries['config'], '1', '1'
])
assert result.exit_code == 0
@mpl_skip
def test_cli_pixel_pass_2(example_timeseries):
""" Correctly run for one pixel for 3 plots
"""
runner = CliRunner()
result = runner.invoke(
cli,
['-v', 'pixel',
'--band', '5',
'--plot', 'TS', '--plot', 'DOY', '--plot', 'VAL',
'--style', 'ggplot',
example_timeseries['config'], '1', '1'
])
assert result.exit_code == 0
| """ Test ``yatsm line``
"""
import os
from click.testing import CliRunner
import matplotlib as mpl
import pytest
from yatsm.cli.main import cli
mpl_skip = pytest.mark.skipif(
mpl.get_backend() != 'agg' and "DISPLAY" not in os.environ,
reason='Requires either matplotlib "agg" backend or that DISPLAY" is set')
@mpl_skip
def test_cli_pixel_pass_1(example_timeseries):
""" Correctly run for one pixel
"""
runner = CliRunner()
result = runner.invoke(
cli,
['-v', 'pixel',
'--band', '5',
'--plot', 'TS',
'--style', 'ggplot',
example_timeseries['config'], '1', '1'
])
assert result.exit_code == 0
Add test for all plot types""" Test ``yatsm line``
"""
import os
from click.testing import CliRunner
import matplotlib as mpl
import pytest
from yatsm.cli.main import cli
mpl_skip = pytest.mark.skipif(
mpl.get_backend() != 'agg' and "DISPLAY" not in os.environ,
reason='Requires either matplotlib "agg" backend or that DISPLAY" is set')
@mpl_skip
def test_cli_pixel_pass_1(example_timeseries):
""" Correctly run for one pixel
"""
runner = CliRunner()
result = runner.invoke(
cli,
['-v', 'pixel',
'--band', '5',
'--plot', 'TS',
'--style', 'ggplot',
example_timeseries['config'], '1', '1'
])
assert result.exit_code == 0
@mpl_skip
def test_cli_pixel_pass_2(example_timeseries):
""" Correctly run for one pixel for 3 plots
"""
runner = CliRunner()
result = runner.invoke(
cli,
['-v', 'pixel',
'--band', '5',
'--plot', 'TS', '--plot', 'DOY', '--plot', 'VAL',
'--style', 'ggplot',
example_timeseries['config'], '1', '1'
])
assert result.exit_code == 0
| <commit_before>""" Test ``yatsm line``
"""
import os
from click.testing import CliRunner
import matplotlib as mpl
import pytest
from yatsm.cli.main import cli
mpl_skip = pytest.mark.skipif(
mpl.get_backend() != 'agg' and "DISPLAY" not in os.environ,
reason='Requires either matplotlib "agg" backend or that DISPLAY" is set')
@mpl_skip
def test_cli_pixel_pass_1(example_timeseries):
""" Correctly run for one pixel
"""
runner = CliRunner()
result = runner.invoke(
cli,
['-v', 'pixel',
'--band', '5',
'--plot', 'TS',
'--style', 'ggplot',
example_timeseries['config'], '1', '1'
])
assert result.exit_code == 0
<commit_msg>Add test for all plot types<commit_after>""" Test ``yatsm line``
"""
import os
from click.testing import CliRunner
import matplotlib as mpl
import pytest
from yatsm.cli.main import cli
mpl_skip = pytest.mark.skipif(
mpl.get_backend() != 'agg' and "DISPLAY" not in os.environ,
reason='Requires either matplotlib "agg" backend or that DISPLAY" is set')
@mpl_skip
def test_cli_pixel_pass_1(example_timeseries):
""" Correctly run for one pixel
"""
runner = CliRunner()
result = runner.invoke(
cli,
['-v', 'pixel',
'--band', '5',
'--plot', 'TS',
'--style', 'ggplot',
example_timeseries['config'], '1', '1'
])
assert result.exit_code == 0
@mpl_skip
def test_cli_pixel_pass_2(example_timeseries):
""" Correctly run for one pixel for 3 plots
"""
runner = CliRunner()
result = runner.invoke(
cli,
['-v', 'pixel',
'--band', '5',
'--plot', 'TS', '--plot', 'DOY', '--plot', 'VAL',
'--style', 'ggplot',
example_timeseries['config'], '1', '1'
])
assert result.exit_code == 0
|
1441a4e96ee610cbb6c87a9e04491102a29c0032 | importlib_metadata/tests/test_main.py | importlib_metadata/tests/test_main.py | import unittest
import re
import importlib
import importlib_metadata
class BasicTests(unittest.TestCase):
version_pattern = r'\d+\.\d+(\.\d)?'
def test_retrieves_version_of_self(self):
dist = importlib_metadata.Distribution.for_module(importlib_metadata)
assert isinstance(dist.version, str)
assert re.match(self.version_pattern, dist.version)
def test_retrieves_version_of_pip(self):
"""
Assume pip is installed and retrieve the version of pip.
"""
pip = importlib.import_module('pip')
dist = importlib_metadata.Distribution.for_module(pip)
assert isinstance(dist.version, str)
assert re.match(self.version_pattern, dist.version)
def test_for_name_does_not_exist(self):
with self.assertRaises(importlib_metadata.PackageNotFound):
importlib_metadata.Distribution.for_name('does-not-exist')
| import unittest
import re
import importlib
import importlib_metadata
class BasicTests(unittest.TestCase):
version_pattern = r'\d+\.\d+(\.\d)?'
def test_retrieves_version_of_self(self):
dist = importlib_metadata.Distribution.from_module(importlib_metadata)
assert isinstance(dist.version, str)
assert re.match(self.version_pattern, dist.version)
def test_retrieves_version_of_pip(self):
"""
Assume pip is installed and retrieve the version of pip.
"""
pip = importlib.import_module('pip')
dist = importlib_metadata.Distribution.from_module(pip)
assert isinstance(dist.version, str)
assert re.match(self.version_pattern, dist.version)
def test_for_name_does_not_exist(self):
with self.assertRaises(importlib_metadata.PackageNotFound):
importlib_metadata.Distribution.from_name('does-not-exist')
| Fix test failures after method renames | Fix test failures after method renames
| Python | apache-2.0 | python/importlib_metadata | import unittest
import re
import importlib
import importlib_metadata
class BasicTests(unittest.TestCase):
version_pattern = r'\d+\.\d+(\.\d)?'
def test_retrieves_version_of_self(self):
dist = importlib_metadata.Distribution.for_module(importlib_metadata)
assert isinstance(dist.version, str)
assert re.match(self.version_pattern, dist.version)
def test_retrieves_version_of_pip(self):
"""
Assume pip is installed and retrieve the version of pip.
"""
pip = importlib.import_module('pip')
dist = importlib_metadata.Distribution.for_module(pip)
assert isinstance(dist.version, str)
assert re.match(self.version_pattern, dist.version)
def test_for_name_does_not_exist(self):
with self.assertRaises(importlib_metadata.PackageNotFound):
importlib_metadata.Distribution.for_name('does-not-exist')
Fix test failures after method renames | import unittest
import re
import importlib
import importlib_metadata
class BasicTests(unittest.TestCase):
version_pattern = r'\d+\.\d+(\.\d)?'
def test_retrieves_version_of_self(self):
dist = importlib_metadata.Distribution.from_module(importlib_metadata)
assert isinstance(dist.version, str)
assert re.match(self.version_pattern, dist.version)
def test_retrieves_version_of_pip(self):
"""
Assume pip is installed and retrieve the version of pip.
"""
pip = importlib.import_module('pip')
dist = importlib_metadata.Distribution.from_module(pip)
assert isinstance(dist.version, str)
assert re.match(self.version_pattern, dist.version)
def test_for_name_does_not_exist(self):
with self.assertRaises(importlib_metadata.PackageNotFound):
importlib_metadata.Distribution.from_name('does-not-exist')
| <commit_before>import unittest
import re
import importlib
import importlib_metadata
class BasicTests(unittest.TestCase):
version_pattern = r'\d+\.\d+(\.\d)?'
def test_retrieves_version_of_self(self):
dist = importlib_metadata.Distribution.for_module(importlib_metadata)
assert isinstance(dist.version, str)
assert re.match(self.version_pattern, dist.version)
def test_retrieves_version_of_pip(self):
"""
Assume pip is installed and retrieve the version of pip.
"""
pip = importlib.import_module('pip')
dist = importlib_metadata.Distribution.for_module(pip)
assert isinstance(dist.version, str)
assert re.match(self.version_pattern, dist.version)
def test_for_name_does_not_exist(self):
with self.assertRaises(importlib_metadata.PackageNotFound):
importlib_metadata.Distribution.for_name('does-not-exist')
<commit_msg>Fix test failures after method renames<commit_after> | import unittest
import re
import importlib
import importlib_metadata
class BasicTests(unittest.TestCase):
version_pattern = r'\d+\.\d+(\.\d)?'
def test_retrieves_version_of_self(self):
dist = importlib_metadata.Distribution.from_module(importlib_metadata)
assert isinstance(dist.version, str)
assert re.match(self.version_pattern, dist.version)
def test_retrieves_version_of_pip(self):
"""
Assume pip is installed and retrieve the version of pip.
"""
pip = importlib.import_module('pip')
dist = importlib_metadata.Distribution.from_module(pip)
assert isinstance(dist.version, str)
assert re.match(self.version_pattern, dist.version)
def test_for_name_does_not_exist(self):
with self.assertRaises(importlib_metadata.PackageNotFound):
importlib_metadata.Distribution.from_name('does-not-exist')
| import unittest
import re
import importlib
import importlib_metadata
class BasicTests(unittest.TestCase):
version_pattern = r'\d+\.\d+(\.\d)?'
def test_retrieves_version_of_self(self):
dist = importlib_metadata.Distribution.for_module(importlib_metadata)
assert isinstance(dist.version, str)
assert re.match(self.version_pattern, dist.version)
def test_retrieves_version_of_pip(self):
"""
Assume pip is installed and retrieve the version of pip.
"""
pip = importlib.import_module('pip')
dist = importlib_metadata.Distribution.for_module(pip)
assert isinstance(dist.version, str)
assert re.match(self.version_pattern, dist.version)
def test_for_name_does_not_exist(self):
with self.assertRaises(importlib_metadata.PackageNotFound):
importlib_metadata.Distribution.for_name('does-not-exist')
Fix test failures after method renamesimport unittest
import re
import importlib
import importlib_metadata
class BasicTests(unittest.TestCase):
version_pattern = r'\d+\.\d+(\.\d)?'
def test_retrieves_version_of_self(self):
dist = importlib_metadata.Distribution.from_module(importlib_metadata)
assert isinstance(dist.version, str)
assert re.match(self.version_pattern, dist.version)
def test_retrieves_version_of_pip(self):
"""
Assume pip is installed and retrieve the version of pip.
"""
pip = importlib.import_module('pip')
dist = importlib_metadata.Distribution.from_module(pip)
assert isinstance(dist.version, str)
assert re.match(self.version_pattern, dist.version)
def test_for_name_does_not_exist(self):
with self.assertRaises(importlib_metadata.PackageNotFound):
importlib_metadata.Distribution.from_name('does-not-exist')
| <commit_before>import unittest
import re
import importlib
import importlib_metadata
class BasicTests(unittest.TestCase):
version_pattern = r'\d+\.\d+(\.\d)?'
def test_retrieves_version_of_self(self):
dist = importlib_metadata.Distribution.for_module(importlib_metadata)
assert isinstance(dist.version, str)
assert re.match(self.version_pattern, dist.version)
def test_retrieves_version_of_pip(self):
"""
Assume pip is installed and retrieve the version of pip.
"""
pip = importlib.import_module('pip')
dist = importlib_metadata.Distribution.for_module(pip)
assert isinstance(dist.version, str)
assert re.match(self.version_pattern, dist.version)
def test_for_name_does_not_exist(self):
with self.assertRaises(importlib_metadata.PackageNotFound):
importlib_metadata.Distribution.for_name('does-not-exist')
<commit_msg>Fix test failures after method renames<commit_after>import unittest
import re
import importlib
import importlib_metadata
class BasicTests(unittest.TestCase):
version_pattern = r'\d+\.\d+(\.\d)?'
def test_retrieves_version_of_self(self):
dist = importlib_metadata.Distribution.from_module(importlib_metadata)
assert isinstance(dist.version, str)
assert re.match(self.version_pattern, dist.version)
def test_retrieves_version_of_pip(self):
"""
Assume pip is installed and retrieve the version of pip.
"""
pip = importlib.import_module('pip')
dist = importlib_metadata.Distribution.from_module(pip)
assert isinstance(dist.version, str)
assert re.match(self.version_pattern, dist.version)
def test_for_name_does_not_exist(self):
with self.assertRaises(importlib_metadata.PackageNotFound):
importlib_metadata.Distribution.from_name('does-not-exist')
|
4107fe1240598f64335bf3203c717f4ef00f9e0f | settings/travis.py | settings/travis.py | from defaults import *
DATABASES = {
'default': {
#'ENGINE': 'django.db.backends.postgresql_psycopg2',
# 'ENGINE': 'django.contrib.gis.db.backends.postgis',
'ENGINE': 'django.db.backends.sqlite3',
#'NAME': 'atlas_test',
#'USER': 'postgres',
'PASSWORD': '',
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'haystack.backends.simple_backend.SimpleEngine'
}
}
| from defaults import *
DATABASES = {
'default': {
#'ENGINE': 'django.db.backends.postgresql_psycopg2',
'ENGINE': 'django.contrib.gis.db.backends.postgis',
# 'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'atlas_test',
'USER': 'postgres',
'PASSWORD': '',
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'haystack.backends.simple_backend.SimpleEngine'
}
}
| Switch to postgis for Travis | Switch to postgis for Travis
At least until I have time to figure out the spatialite automatic build.
| Python | mit | denverfoundation/storybase,denverfoundation/storybase,denverfoundation/storybase,denverfoundation/storybase | from defaults import *
DATABASES = {
'default': {
#'ENGINE': 'django.db.backends.postgresql_psycopg2',
# 'ENGINE': 'django.contrib.gis.db.backends.postgis',
'ENGINE': 'django.db.backends.sqlite3',
#'NAME': 'atlas_test',
#'USER': 'postgres',
'PASSWORD': '',
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'haystack.backends.simple_backend.SimpleEngine'
}
}
Switch to postgis for Travis
At least until I have time to figure out the spatialite automatic build. | from defaults import *
DATABASES = {
'default': {
#'ENGINE': 'django.db.backends.postgresql_psycopg2',
'ENGINE': 'django.contrib.gis.db.backends.postgis',
# 'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'atlas_test',
'USER': 'postgres',
'PASSWORD': '',
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'haystack.backends.simple_backend.SimpleEngine'
}
}
| <commit_before>from defaults import *
DATABASES = {
'default': {
#'ENGINE': 'django.db.backends.postgresql_psycopg2',
# 'ENGINE': 'django.contrib.gis.db.backends.postgis',
'ENGINE': 'django.db.backends.sqlite3',
#'NAME': 'atlas_test',
#'USER': 'postgres',
'PASSWORD': '',
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'haystack.backends.simple_backend.SimpleEngine'
}
}
<commit_msg>Switch to postgis for Travis
At least until I have time to figure out the spatialite automatic build.<commit_after> | from defaults import *
DATABASES = {
'default': {
#'ENGINE': 'django.db.backends.postgresql_psycopg2',
'ENGINE': 'django.contrib.gis.db.backends.postgis',
# 'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'atlas_test',
'USER': 'postgres',
'PASSWORD': '',
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'haystack.backends.simple_backend.SimpleEngine'
}
}
| from defaults import *
DATABASES = {
'default': {
#'ENGINE': 'django.db.backends.postgresql_psycopg2',
# 'ENGINE': 'django.contrib.gis.db.backends.postgis',
'ENGINE': 'django.db.backends.sqlite3',
#'NAME': 'atlas_test',
#'USER': 'postgres',
'PASSWORD': '',
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'haystack.backends.simple_backend.SimpleEngine'
}
}
Switch to postgis for Travis
At least until I have time to figure out the spatialite automatic build.from defaults import *
DATABASES = {
'default': {
#'ENGINE': 'django.db.backends.postgresql_psycopg2',
'ENGINE': 'django.contrib.gis.db.backends.postgis',
# 'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'atlas_test',
'USER': 'postgres',
'PASSWORD': '',
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'haystack.backends.simple_backend.SimpleEngine'
}
}
| <commit_before>from defaults import *
DATABASES = {
'default': {
#'ENGINE': 'django.db.backends.postgresql_psycopg2',
# 'ENGINE': 'django.contrib.gis.db.backends.postgis',
'ENGINE': 'django.db.backends.sqlite3',
#'NAME': 'atlas_test',
#'USER': 'postgres',
'PASSWORD': '',
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'haystack.backends.simple_backend.SimpleEngine'
}
}
<commit_msg>Switch to postgis for Travis
At least until I have time to figure out the spatialite automatic build.<commit_after>from defaults import *
DATABASES = {
'default': {
#'ENGINE': 'django.db.backends.postgresql_psycopg2',
'ENGINE': 'django.contrib.gis.db.backends.postgis',
# 'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'atlas_test',
'USER': 'postgres',
'PASSWORD': '',
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'haystack.backends.simple_backend.SimpleEngine'
}
}
|
e0fa24595a60dd3c2ab5d1b64a76bae9ce3c05a8 | testproject/testapp/tests/test_root.py | testproject/testapp/tests/test_root.py | from djet import assertions, restframework
from rest_framework import status
import djoser.constants
import djoser.utils
import djoser.views
class RootViewTest(restframework.APIViewTestCase,
assertions.StatusCodeAssertionsMixin):
view_class = djoser.views.RootView
def test_get_should_return_urls_map(self):
request = self.factory.get()
view_object = self.create_view_object(request)
response = view_object.dispatch(request)
self.assert_status_equal(response, status.HTTP_200_OK)
urlpattern_names = view_object.aggregate_djoser_urlpattern_names()
urls_map = view_object.get_urls_map(request, urlpattern_names, None)
self.assertEquals(urls_map, response.data)
def test_all_urlpattern_names_are_in_urls_map(self):
request = self.factory.get()
view_object = self.create_view_object(request)
response = view_object.dispatch(request)
self.assert_status_equal(response, status.HTTP_200_OK)
urlpattern_names = view_object.aggregate_djoser_urlpattern_names()
for urlpattern_name in urlpattern_names:
self.assertIn(urlpattern_name, response.data)
| from djet import assertions, restframework
from rest_framework import status
import djoser.constants
import djoser.utils
import djoser.views
class RootViewTest(restframework.APIViewTestCase,
assertions.StatusCodeAssertionsMixin):
view_class = djoser.views.RootView
def test_get_should_return_urls_map(self):
request = self.factory.get()
view_object = self.create_view_object(request)
response = view_object.dispatch(request)
self.assert_status_equal(response, status.HTTP_200_OK)
urlpattern_names = view_object.aggregate_djoser_urlpattern_names()
urls_map = view_object.get_urls_map(request, urlpattern_names, None)
self.assertEquals(urls_map, response.data)
def test_all_urlpattern_names_are_in_urls_map(self):
request = self.factory.get()
view_object = self.create_view_object(request)
response = view_object.dispatch(request)
self.assert_status_equal(response, status.HTTP_200_OK)
urlpattern_names = view_object.aggregate_djoser_urlpattern_names()
for urlpattern_name in urlpattern_names:
self.assertIn(urlpattern_name, response.data)
def test_non_existent_urlpattern_results_in_empty_string(self):
request = self.factory.get()
view_object = self.create_view_object(request)
urlpattern_names = ['non-existent-urlpattern']
urls_map = view_object.get_urls_map(request, urlpattern_names, None)
self.assertEquals(urls_map, {urlpattern_names[0]: ''})
| Add test for non existent url pattern | Add test for non existent url pattern
| Python | mit | akalipetis/djoser,sunscrapers/djoser,akalipetis/djoser,sunscrapers/djoser,sunscrapers/djoser | from djet import assertions, restframework
from rest_framework import status
import djoser.constants
import djoser.utils
import djoser.views
class RootViewTest(restframework.APIViewTestCase,
assertions.StatusCodeAssertionsMixin):
view_class = djoser.views.RootView
def test_get_should_return_urls_map(self):
request = self.factory.get()
view_object = self.create_view_object(request)
response = view_object.dispatch(request)
self.assert_status_equal(response, status.HTTP_200_OK)
urlpattern_names = view_object.aggregate_djoser_urlpattern_names()
urls_map = view_object.get_urls_map(request, urlpattern_names, None)
self.assertEquals(urls_map, response.data)
def test_all_urlpattern_names_are_in_urls_map(self):
request = self.factory.get()
view_object = self.create_view_object(request)
response = view_object.dispatch(request)
self.assert_status_equal(response, status.HTTP_200_OK)
urlpattern_names = view_object.aggregate_djoser_urlpattern_names()
for urlpattern_name in urlpattern_names:
self.assertIn(urlpattern_name, response.data)
Add test for non existent url pattern | from djet import assertions, restframework
from rest_framework import status
import djoser.constants
import djoser.utils
import djoser.views
class RootViewTest(restframework.APIViewTestCase,
assertions.StatusCodeAssertionsMixin):
view_class = djoser.views.RootView
def test_get_should_return_urls_map(self):
request = self.factory.get()
view_object = self.create_view_object(request)
response = view_object.dispatch(request)
self.assert_status_equal(response, status.HTTP_200_OK)
urlpattern_names = view_object.aggregate_djoser_urlpattern_names()
urls_map = view_object.get_urls_map(request, urlpattern_names, None)
self.assertEquals(urls_map, response.data)
def test_all_urlpattern_names_are_in_urls_map(self):
request = self.factory.get()
view_object = self.create_view_object(request)
response = view_object.dispatch(request)
self.assert_status_equal(response, status.HTTP_200_OK)
urlpattern_names = view_object.aggregate_djoser_urlpattern_names()
for urlpattern_name in urlpattern_names:
self.assertIn(urlpattern_name, response.data)
def test_non_existent_urlpattern_results_in_empty_string(self):
request = self.factory.get()
view_object = self.create_view_object(request)
urlpattern_names = ['non-existent-urlpattern']
urls_map = view_object.get_urls_map(request, urlpattern_names, None)
self.assertEquals(urls_map, {urlpattern_names[0]: ''})
| <commit_before>from djet import assertions, restframework
from rest_framework import status
import djoser.constants
import djoser.utils
import djoser.views
class RootViewTest(restframework.APIViewTestCase,
assertions.StatusCodeAssertionsMixin):
view_class = djoser.views.RootView
def test_get_should_return_urls_map(self):
request = self.factory.get()
view_object = self.create_view_object(request)
response = view_object.dispatch(request)
self.assert_status_equal(response, status.HTTP_200_OK)
urlpattern_names = view_object.aggregate_djoser_urlpattern_names()
urls_map = view_object.get_urls_map(request, urlpattern_names, None)
self.assertEquals(urls_map, response.data)
def test_all_urlpattern_names_are_in_urls_map(self):
request = self.factory.get()
view_object = self.create_view_object(request)
response = view_object.dispatch(request)
self.assert_status_equal(response, status.HTTP_200_OK)
urlpattern_names = view_object.aggregate_djoser_urlpattern_names()
for urlpattern_name in urlpattern_names:
self.assertIn(urlpattern_name, response.data)
<commit_msg>Add test for non existent url pattern<commit_after> | from djet import assertions, restframework
from rest_framework import status
import djoser.constants
import djoser.utils
import djoser.views
class RootViewTest(restframework.APIViewTestCase,
assertions.StatusCodeAssertionsMixin):
view_class = djoser.views.RootView
def test_get_should_return_urls_map(self):
request = self.factory.get()
view_object = self.create_view_object(request)
response = view_object.dispatch(request)
self.assert_status_equal(response, status.HTTP_200_OK)
urlpattern_names = view_object.aggregate_djoser_urlpattern_names()
urls_map = view_object.get_urls_map(request, urlpattern_names, None)
self.assertEquals(urls_map, response.data)
def test_all_urlpattern_names_are_in_urls_map(self):
request = self.factory.get()
view_object = self.create_view_object(request)
response = view_object.dispatch(request)
self.assert_status_equal(response, status.HTTP_200_OK)
urlpattern_names = view_object.aggregate_djoser_urlpattern_names()
for urlpattern_name in urlpattern_names:
self.assertIn(urlpattern_name, response.data)
def test_non_existent_urlpattern_results_in_empty_string(self):
request = self.factory.get()
view_object = self.create_view_object(request)
urlpattern_names = ['non-existent-urlpattern']
urls_map = view_object.get_urls_map(request, urlpattern_names, None)
self.assertEquals(urls_map, {urlpattern_names[0]: ''})
| from djet import assertions, restframework
from rest_framework import status
import djoser.constants
import djoser.utils
import djoser.views
class RootViewTest(restframework.APIViewTestCase,
assertions.StatusCodeAssertionsMixin):
view_class = djoser.views.RootView
def test_get_should_return_urls_map(self):
request = self.factory.get()
view_object = self.create_view_object(request)
response = view_object.dispatch(request)
self.assert_status_equal(response, status.HTTP_200_OK)
urlpattern_names = view_object.aggregate_djoser_urlpattern_names()
urls_map = view_object.get_urls_map(request, urlpattern_names, None)
self.assertEquals(urls_map, response.data)
def test_all_urlpattern_names_are_in_urls_map(self):
request = self.factory.get()
view_object = self.create_view_object(request)
response = view_object.dispatch(request)
self.assert_status_equal(response, status.HTTP_200_OK)
urlpattern_names = view_object.aggregate_djoser_urlpattern_names()
for urlpattern_name in urlpattern_names:
self.assertIn(urlpattern_name, response.data)
Add test for non existent url patternfrom djet import assertions, restframework
from rest_framework import status
import djoser.constants
import djoser.utils
import djoser.views
class RootViewTest(restframework.APIViewTestCase,
assertions.StatusCodeAssertionsMixin):
view_class = djoser.views.RootView
def test_get_should_return_urls_map(self):
request = self.factory.get()
view_object = self.create_view_object(request)
response = view_object.dispatch(request)
self.assert_status_equal(response, status.HTTP_200_OK)
urlpattern_names = view_object.aggregate_djoser_urlpattern_names()
urls_map = view_object.get_urls_map(request, urlpattern_names, None)
self.assertEquals(urls_map, response.data)
def test_all_urlpattern_names_are_in_urls_map(self):
request = self.factory.get()
view_object = self.create_view_object(request)
response = view_object.dispatch(request)
self.assert_status_equal(response, status.HTTP_200_OK)
urlpattern_names = view_object.aggregate_djoser_urlpattern_names()
for urlpattern_name in urlpattern_names:
self.assertIn(urlpattern_name, response.data)
def test_non_existent_urlpattern_results_in_empty_string(self):
request = self.factory.get()
view_object = self.create_view_object(request)
urlpattern_names = ['non-existent-urlpattern']
urls_map = view_object.get_urls_map(request, urlpattern_names, None)
self.assertEquals(urls_map, {urlpattern_names[0]: ''})
| <commit_before>from djet import assertions, restframework
from rest_framework import status
import djoser.constants
import djoser.utils
import djoser.views
class RootViewTest(restframework.APIViewTestCase,
assertions.StatusCodeAssertionsMixin):
view_class = djoser.views.RootView
def test_get_should_return_urls_map(self):
request = self.factory.get()
view_object = self.create_view_object(request)
response = view_object.dispatch(request)
self.assert_status_equal(response, status.HTTP_200_OK)
urlpattern_names = view_object.aggregate_djoser_urlpattern_names()
urls_map = view_object.get_urls_map(request, urlpattern_names, None)
self.assertEquals(urls_map, response.data)
def test_all_urlpattern_names_are_in_urls_map(self):
request = self.factory.get()
view_object = self.create_view_object(request)
response = view_object.dispatch(request)
self.assert_status_equal(response, status.HTTP_200_OK)
urlpattern_names = view_object.aggregate_djoser_urlpattern_names()
for urlpattern_name in urlpattern_names:
self.assertIn(urlpattern_name, response.data)
<commit_msg>Add test for non existent url pattern<commit_after>from djet import assertions, restframework
from rest_framework import status
import djoser.constants
import djoser.utils
import djoser.views
class RootViewTest(restframework.APIViewTestCase,
assertions.StatusCodeAssertionsMixin):
view_class = djoser.views.RootView
def test_get_should_return_urls_map(self):
request = self.factory.get()
view_object = self.create_view_object(request)
response = view_object.dispatch(request)
self.assert_status_equal(response, status.HTTP_200_OK)
urlpattern_names = view_object.aggregate_djoser_urlpattern_names()
urls_map = view_object.get_urls_map(request, urlpattern_names, None)
self.assertEquals(urls_map, response.data)
def test_all_urlpattern_names_are_in_urls_map(self):
request = self.factory.get()
view_object = self.create_view_object(request)
response = view_object.dispatch(request)
self.assert_status_equal(response, status.HTTP_200_OK)
urlpattern_names = view_object.aggregate_djoser_urlpattern_names()
for urlpattern_name in urlpattern_names:
self.assertIn(urlpattern_name, response.data)
def test_non_existent_urlpattern_results_in_empty_string(self):
request = self.factory.get()
view_object = self.create_view_object(request)
urlpattern_names = ['non-existent-urlpattern']
urls_map = view_object.get_urls_map(request, urlpattern_names, None)
self.assertEquals(urls_map, {urlpattern_names[0]: ''})
|
8f24d774227dca13500c0db0c10771d6b4ee7141 | corehq/apps/domain/management/commands/find_secure_submission_image_domains.py | corehq/apps/domain/management/commands/find_secure_submission_image_domains.py | from django.core.management.base import BaseCommand
from corehq.apps.domain.models import Domain
import csv
class Command(BaseCommand):
help = 'Find domains with secure submissions and image questions'
def handle(self, *args, **options):
with open('domain_results.csv', 'wb+') as csvfile:
csv_writer = csv.writer(
csvfile,
delimiter=',',
quotechar='|',
quoting=csv.QUOTE_MINIMAL
)
csv_writer.writerow(['domain', 'app', 'domain_creator'])
for domain in Domain.get_all(include_docs=True):
if domain.secure_submissions:
for app in domain.full_applications(include_builds=False):
for module in app.modules:
for form in module.forms:
for question in form.get_questions(app.langs):
if question['type'] == 'Image':
csv_writer.writerow([
domain.name,
app.name,
domain.creating_user
])
| from django.core.management.base import BaseCommand
from corehq.apps.domain.models import Domain
import csv
class Command(BaseCommand):
help = 'Find domains with secure submissions and image questions'
def check_domain(self, domain, csv_writer):
if domain.secure_submissions:
for app in domain.full_applications(include_builds=False):
for module in app.modules:
for form in module.forms:
for question in form.get_questions(app.langs):
if question['type'] in ('Image', 'Audio', 'Video'):
csv_writer.writerow([
domain.name,
app.name,
domain.creating_user
])
return
def handle(self, *args, **options):
with open('domain_results.csv', 'wb+') as csvfile:
csv_writer = csv.writer(
csvfile,
delimiter=',',
quotechar='|',
quoting=csv.QUOTE_MINIMAL
)
csv_writer.writerow(['domain', 'app', 'domain_creator'])
for domain in Domain.get_all(include_docs=True):
self.check_domain(domain, csv_writer)
| Add audio/video support and bail on findings | Add audio/video support and bail on findings
| Python | bsd-3-clause | qedsoftware/commcare-hq,qedsoftware/commcare-hq,SEL-Columbia/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,SEL-Columbia/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,SEL-Columbia/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq | from django.core.management.base import BaseCommand
from corehq.apps.domain.models import Domain
import csv
class Command(BaseCommand):
help = 'Find domains with secure submissions and image questions'
def handle(self, *args, **options):
with open('domain_results.csv', 'wb+') as csvfile:
csv_writer = csv.writer(
csvfile,
delimiter=',',
quotechar='|',
quoting=csv.QUOTE_MINIMAL
)
csv_writer.writerow(['domain', 'app', 'domain_creator'])
for domain in Domain.get_all(include_docs=True):
if domain.secure_submissions:
for app in domain.full_applications(include_builds=False):
for module in app.modules:
for form in module.forms:
for question in form.get_questions(app.langs):
if question['type'] == 'Image':
csv_writer.writerow([
domain.name,
app.name,
domain.creating_user
])
Add audio/video support and bail on findings | from django.core.management.base import BaseCommand
from corehq.apps.domain.models import Domain
import csv
class Command(BaseCommand):
help = 'Find domains with secure submissions and image questions'
def check_domain(self, domain, csv_writer):
if domain.secure_submissions:
for app in domain.full_applications(include_builds=False):
for module in app.modules:
for form in module.forms:
for question in form.get_questions(app.langs):
if question['type'] in ('Image', 'Audio', 'Video'):
csv_writer.writerow([
domain.name,
app.name,
domain.creating_user
])
return
def handle(self, *args, **options):
with open('domain_results.csv', 'wb+') as csvfile:
csv_writer = csv.writer(
csvfile,
delimiter=',',
quotechar='|',
quoting=csv.QUOTE_MINIMAL
)
csv_writer.writerow(['domain', 'app', 'domain_creator'])
for domain in Domain.get_all(include_docs=True):
self.check_domain(domain, csv_writer)
| <commit_before>from django.core.management.base import BaseCommand
from corehq.apps.domain.models import Domain
import csv
class Command(BaseCommand):
help = 'Find domains with secure submissions and image questions'
def handle(self, *args, **options):
with open('domain_results.csv', 'wb+') as csvfile:
csv_writer = csv.writer(
csvfile,
delimiter=',',
quotechar='|',
quoting=csv.QUOTE_MINIMAL
)
csv_writer.writerow(['domain', 'app', 'domain_creator'])
for domain in Domain.get_all(include_docs=True):
if domain.secure_submissions:
for app in domain.full_applications(include_builds=False):
for module in app.modules:
for form in module.forms:
for question in form.get_questions(app.langs):
if question['type'] == 'Image':
csv_writer.writerow([
domain.name,
app.name,
domain.creating_user
])
<commit_msg>Add audio/video support and bail on findings<commit_after> | from django.core.management.base import BaseCommand
from corehq.apps.domain.models import Domain
import csv
class Command(BaseCommand):
help = 'Find domains with secure submissions and image questions'
def check_domain(self, domain, csv_writer):
if domain.secure_submissions:
for app in domain.full_applications(include_builds=False):
for module in app.modules:
for form in module.forms:
for question in form.get_questions(app.langs):
if question['type'] in ('Image', 'Audio', 'Video'):
csv_writer.writerow([
domain.name,
app.name,
domain.creating_user
])
return
def handle(self, *args, **options):
with open('domain_results.csv', 'wb+') as csvfile:
csv_writer = csv.writer(
csvfile,
delimiter=',',
quotechar='|',
quoting=csv.QUOTE_MINIMAL
)
csv_writer.writerow(['domain', 'app', 'domain_creator'])
for domain in Domain.get_all(include_docs=True):
self.check_domain(domain, csv_writer)
| from django.core.management.base import BaseCommand
from corehq.apps.domain.models import Domain
import csv
class Command(BaseCommand):
help = 'Find domains with secure submissions and image questions'
def handle(self, *args, **options):
with open('domain_results.csv', 'wb+') as csvfile:
csv_writer = csv.writer(
csvfile,
delimiter=',',
quotechar='|',
quoting=csv.QUOTE_MINIMAL
)
csv_writer.writerow(['domain', 'app', 'domain_creator'])
for domain in Domain.get_all(include_docs=True):
if domain.secure_submissions:
for app in domain.full_applications(include_builds=False):
for module in app.modules:
for form in module.forms:
for question in form.get_questions(app.langs):
if question['type'] == 'Image':
csv_writer.writerow([
domain.name,
app.name,
domain.creating_user
])
Add audio/video support and bail on findingsfrom django.core.management.base import BaseCommand
from corehq.apps.domain.models import Domain
import csv
class Command(BaseCommand):
help = 'Find domains with secure submissions and image questions'
def check_domain(self, domain, csv_writer):
if domain.secure_submissions:
for app in domain.full_applications(include_builds=False):
for module in app.modules:
for form in module.forms:
for question in form.get_questions(app.langs):
if question['type'] in ('Image', 'Audio', 'Video'):
csv_writer.writerow([
domain.name,
app.name,
domain.creating_user
])
return
def handle(self, *args, **options):
with open('domain_results.csv', 'wb+') as csvfile:
csv_writer = csv.writer(
csvfile,
delimiter=',',
quotechar='|',
quoting=csv.QUOTE_MINIMAL
)
csv_writer.writerow(['domain', 'app', 'domain_creator'])
for domain in Domain.get_all(include_docs=True):
self.check_domain(domain, csv_writer)
| <commit_before>from django.core.management.base import BaseCommand
from corehq.apps.domain.models import Domain
import csv
class Command(BaseCommand):
help = 'Find domains with secure submissions and image questions'
def handle(self, *args, **options):
with open('domain_results.csv', 'wb+') as csvfile:
csv_writer = csv.writer(
csvfile,
delimiter=',',
quotechar='|',
quoting=csv.QUOTE_MINIMAL
)
csv_writer.writerow(['domain', 'app', 'domain_creator'])
for domain in Domain.get_all(include_docs=True):
if domain.secure_submissions:
for app in domain.full_applications(include_builds=False):
for module in app.modules:
for form in module.forms:
for question in form.get_questions(app.langs):
if question['type'] == 'Image':
csv_writer.writerow([
domain.name,
app.name,
domain.creating_user
])
<commit_msg>Add audio/video support and bail on findings<commit_after>from django.core.management.base import BaseCommand
from corehq.apps.domain.models import Domain
import csv
class Command(BaseCommand):
help = 'Find domains with secure submissions and image questions'
def check_domain(self, domain, csv_writer):
if domain.secure_submissions:
for app in domain.full_applications(include_builds=False):
for module in app.modules:
for form in module.forms:
for question in form.get_questions(app.langs):
if question['type'] in ('Image', 'Audio', 'Video'):
csv_writer.writerow([
domain.name,
app.name,
domain.creating_user
])
return
def handle(self, *args, **options):
with open('domain_results.csv', 'wb+') as csvfile:
csv_writer = csv.writer(
csvfile,
delimiter=',',
quotechar='|',
quoting=csv.QUOTE_MINIMAL
)
csv_writer.writerow(['domain', 'app', 'domain_creator'])
for domain in Domain.get_all(include_docs=True):
self.check_domain(domain, csv_writer)
|
194a84b4559449f0b0e3e9cc9e7026392822c0af | questions/urls.py | questions/urls.py | from django.conf.urls.defaults import *
from spenglr.education.models import *
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
(r'^m/(?P<object_id>\d+)$', 'django.views.generic.list_detail.object_detail', { 'queryset': Module.objects.all(), 'template_name': 'education/module_questions.html' } ),
)
| from django.conf.urls.defaults import *
from spenglr.education.models import *
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
(r'^m/(?P<object_id>\d+)$', 'django.views.generic.list_detail.object_detail', { 'queryset': Module.objects.all(), 'template_name': 'questions/question_list.html' } ),
)
| Change to questions template location. | Change to questions template location.
| Python | bsd-3-clause | mfitzp/smrtr,mfitzp/smrtr | from django.conf.urls.defaults import *
from spenglr.education.models import *
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
(r'^m/(?P<object_id>\d+)$', 'django.views.generic.list_detail.object_detail', { 'queryset': Module.objects.all(), 'template_name': 'education/module_questions.html' } ),
)
Change to questions template location. | from django.conf.urls.defaults import *
from spenglr.education.models import *
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
(r'^m/(?P<object_id>\d+)$', 'django.views.generic.list_detail.object_detail', { 'queryset': Module.objects.all(), 'template_name': 'questions/question_list.html' } ),
)
| <commit_before>from django.conf.urls.defaults import *
from spenglr.education.models import *
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
(r'^m/(?P<object_id>\d+)$', 'django.views.generic.list_detail.object_detail', { 'queryset': Module.objects.all(), 'template_name': 'education/module_questions.html' } ),
)
<commit_msg>Change to questions template location.<commit_after> | from django.conf.urls.defaults import *
from spenglr.education.models import *
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
(r'^m/(?P<object_id>\d+)$', 'django.views.generic.list_detail.object_detail', { 'queryset': Module.objects.all(), 'template_name': 'questions/question_list.html' } ),
)
| from django.conf.urls.defaults import *
from spenglr.education.models import *
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
(r'^m/(?P<object_id>\d+)$', 'django.views.generic.list_detail.object_detail', { 'queryset': Module.objects.all(), 'template_name': 'education/module_questions.html' } ),
)
Change to questions template location.from django.conf.urls.defaults import *
from spenglr.education.models import *
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
(r'^m/(?P<object_id>\d+)$', 'django.views.generic.list_detail.object_detail', { 'queryset': Module.objects.all(), 'template_name': 'questions/question_list.html' } ),
)
| <commit_before>from django.conf.urls.defaults import *
from spenglr.education.models import *
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
(r'^m/(?P<object_id>\d+)$', 'django.views.generic.list_detail.object_detail', { 'queryset': Module.objects.all(), 'template_name': 'education/module_questions.html' } ),
)
<commit_msg>Change to questions template location.<commit_after>from django.conf.urls.defaults import *
from spenglr.education.models import *
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
(r'^m/(?P<object_id>\d+)$', 'django.views.generic.list_detail.object_detail', { 'queryset': Module.objects.all(), 'template_name': 'questions/question_list.html' } ),
)
|
b9389a54183e37d8b0d17f74c4655dfb51bf2053 | neo/test/rawiotest/test_openephysbinaryrawio.py | neo/test/rawiotest/test_openephysbinaryrawio.py | import unittest
from neo.rawio.openephysbinaryrawio import OpenEphysBinaryRawIO
from neo.test.rawiotest.common_rawio_test import BaseTestRawIO
class TestOpenEphysBinaryRawIO(BaseTestRawIO, unittest.TestCase):
rawioclass = OpenEphysBinaryRawIO
entities_to_download = [
'openephysbinary'
]
entities_to_test = [
'openephysbinary/v0.5.3_two_neuropixels_stream',
'openephysbinary/v0.4.4.1_with_video_tracking',
'openephysbinary/v0.5.x_two_nodes',
]
if __name__ == "__main__":
unittest.main()
| import unittest
from neo.rawio.openephysbinaryrawio import OpenEphysBinaryRawIO
from neo.test.rawiotest.common_rawio_test import BaseTestRawIO
class TestOpenEphysBinaryRawIO(BaseTestRawIO, unittest.TestCase):
rawioclass = OpenEphysBinaryRawIO
entities_to_download = [
'openephysbinary'
]
entities_to_test = [
'openephysbinary/v0.5.3_two_neuropixels_stream',
'openephysbinary/v0.4.4.1_with_video_tracking',
'openephysbinary/v0.5.x_two_nodes',
'openephysbinary/v0.6.x_neuropixels_multiexp_multistream',
]
if __name__ == "__main__":
unittest.main()
| Add new OE test folder | Add new OE test folder
| Python | bsd-3-clause | NeuralEnsemble/python-neo,INM-6/python-neo,JuliaSprenger/python-neo,apdavison/python-neo | import unittest
from neo.rawio.openephysbinaryrawio import OpenEphysBinaryRawIO
from neo.test.rawiotest.common_rawio_test import BaseTestRawIO
class TestOpenEphysBinaryRawIO(BaseTestRawIO, unittest.TestCase):
rawioclass = OpenEphysBinaryRawIO
entities_to_download = [
'openephysbinary'
]
entities_to_test = [
'openephysbinary/v0.5.3_two_neuropixels_stream',
'openephysbinary/v0.4.4.1_with_video_tracking',
'openephysbinary/v0.5.x_two_nodes',
]
if __name__ == "__main__":
unittest.main()
Add new OE test folder | import unittest
from neo.rawio.openephysbinaryrawio import OpenEphysBinaryRawIO
from neo.test.rawiotest.common_rawio_test import BaseTestRawIO
class TestOpenEphysBinaryRawIO(BaseTestRawIO, unittest.TestCase):
rawioclass = OpenEphysBinaryRawIO
entities_to_download = [
'openephysbinary'
]
entities_to_test = [
'openephysbinary/v0.5.3_two_neuropixels_stream',
'openephysbinary/v0.4.4.1_with_video_tracking',
'openephysbinary/v0.5.x_two_nodes',
'openephysbinary/v0.6.x_neuropixels_multiexp_multistream',
]
if __name__ == "__main__":
unittest.main()
| <commit_before>import unittest
from neo.rawio.openephysbinaryrawio import OpenEphysBinaryRawIO
from neo.test.rawiotest.common_rawio_test import BaseTestRawIO
class TestOpenEphysBinaryRawIO(BaseTestRawIO, unittest.TestCase):
rawioclass = OpenEphysBinaryRawIO
entities_to_download = [
'openephysbinary'
]
entities_to_test = [
'openephysbinary/v0.5.3_two_neuropixels_stream',
'openephysbinary/v0.4.4.1_with_video_tracking',
'openephysbinary/v0.5.x_two_nodes',
]
if __name__ == "__main__":
unittest.main()
<commit_msg>Add new OE test folder<commit_after> | import unittest
from neo.rawio.openephysbinaryrawio import OpenEphysBinaryRawIO
from neo.test.rawiotest.common_rawio_test import BaseTestRawIO
class TestOpenEphysBinaryRawIO(BaseTestRawIO, unittest.TestCase):
rawioclass = OpenEphysBinaryRawIO
entities_to_download = [
'openephysbinary'
]
entities_to_test = [
'openephysbinary/v0.5.3_two_neuropixels_stream',
'openephysbinary/v0.4.4.1_with_video_tracking',
'openephysbinary/v0.5.x_two_nodes',
'openephysbinary/v0.6.x_neuropixels_multiexp_multistream',
]
if __name__ == "__main__":
unittest.main()
| import unittest
from neo.rawio.openephysbinaryrawio import OpenEphysBinaryRawIO
from neo.test.rawiotest.common_rawio_test import BaseTestRawIO
class TestOpenEphysBinaryRawIO(BaseTestRawIO, unittest.TestCase):
rawioclass = OpenEphysBinaryRawIO
entities_to_download = [
'openephysbinary'
]
entities_to_test = [
'openephysbinary/v0.5.3_two_neuropixels_stream',
'openephysbinary/v0.4.4.1_with_video_tracking',
'openephysbinary/v0.5.x_two_nodes',
]
if __name__ == "__main__":
unittest.main()
Add new OE test folderimport unittest
from neo.rawio.openephysbinaryrawio import OpenEphysBinaryRawIO
from neo.test.rawiotest.common_rawio_test import BaseTestRawIO
class TestOpenEphysBinaryRawIO(BaseTestRawIO, unittest.TestCase):
rawioclass = OpenEphysBinaryRawIO
entities_to_download = [
'openephysbinary'
]
entities_to_test = [
'openephysbinary/v0.5.3_two_neuropixels_stream',
'openephysbinary/v0.4.4.1_with_video_tracking',
'openephysbinary/v0.5.x_two_nodes',
'openephysbinary/v0.6.x_neuropixels_multiexp_multistream',
]
if __name__ == "__main__":
unittest.main()
| <commit_before>import unittest
from neo.rawio.openephysbinaryrawio import OpenEphysBinaryRawIO
from neo.test.rawiotest.common_rawio_test import BaseTestRawIO
class TestOpenEphysBinaryRawIO(BaseTestRawIO, unittest.TestCase):
rawioclass = OpenEphysBinaryRawIO
entities_to_download = [
'openephysbinary'
]
entities_to_test = [
'openephysbinary/v0.5.3_two_neuropixels_stream',
'openephysbinary/v0.4.4.1_with_video_tracking',
'openephysbinary/v0.5.x_two_nodes',
]
if __name__ == "__main__":
unittest.main()
<commit_msg>Add new OE test folder<commit_after>import unittest
from neo.rawio.openephysbinaryrawio import OpenEphysBinaryRawIO
from neo.test.rawiotest.common_rawio_test import BaseTestRawIO
class TestOpenEphysBinaryRawIO(BaseTestRawIO, unittest.TestCase):
rawioclass = OpenEphysBinaryRawIO
entities_to_download = [
'openephysbinary'
]
entities_to_test = [
'openephysbinary/v0.5.3_two_neuropixels_stream',
'openephysbinary/v0.4.4.1_with_video_tracking',
'openephysbinary/v0.5.x_two_nodes',
'openephysbinary/v0.6.x_neuropixels_multiexp_multistream',
]
if __name__ == "__main__":
unittest.main()
|
9f8db061956fc73a197d9c5eb1b045a6e0655dc0 | fc2json.py | fc2json.py | #!/usr/bin/env python
import sys, json
file = sys.argv[1]
subject = file.split('.')[0]
data = {
"subject": subject,
"cards": {}
}
fc = [line.split(':') for line in open(file, 'r').read().splitlines()]
js = open(subject + ".json", 'w')
for line in fc:
data["cards"][line[0]] = line[1]
js.write(json.dumps(data))
js.close()
| #!/usr/bin/env python
'''
File: fc2json.py
Author: Kristoffer Dalby
Description: Tiny script for converting flashcard format to json.
'''
import sys, json
file = sys.argv[1]
subject = file.split('.')[0]
data = {
"subject": subject,
"cards": {}
}
fc = [line.split(':') for line in open(file, 'r').read().splitlines()]
js = open(subject + ".json", 'w')
for line in fc:
data["cards"][line[0]] = line[1]
js.write(json.dumps(data))
js.close()
| Use a real JS construct, WTF knows why this works in chromium. | Use a real JS construct, WTF knows why this works in chromium.
| Python | mit | kradalby/flashcards,kradalby/flashcards | #!/usr/bin/env python
import sys, json
file = sys.argv[1]
subject = file.split('.')[0]
data = {
"subject": subject,
"cards": {}
}
fc = [line.split(':') for line in open(file, 'r').read().splitlines()]
js = open(subject + ".json", 'w')
for line in fc:
data["cards"][line[0]] = line[1]
js.write(json.dumps(data))
js.close()
Use a real JS construct, WTF knows why this works in chromium. | #!/usr/bin/env python
'''
File: fc2json.py
Author: Kristoffer Dalby
Description: Tiny script for converting flashcard format to json.
'''
import sys, json
file = sys.argv[1]
subject = file.split('.')[0]
data = {
"subject": subject,
"cards": {}
}
fc = [line.split(':') for line in open(file, 'r').read().splitlines()]
js = open(subject + ".json", 'w')
for line in fc:
data["cards"][line[0]] = line[1]
js.write(json.dumps(data))
js.close()
| <commit_before>#!/usr/bin/env python
import sys, json
file = sys.argv[1]
subject = file.split('.')[0]
data = {
"subject": subject,
"cards": {}
}
fc = [line.split(':') for line in open(file, 'r').read().splitlines()]
js = open(subject + ".json", 'w')
for line in fc:
data["cards"][line[0]] = line[1]
js.write(json.dumps(data))
js.close()
<commit_msg>Use a real JS construct, WTF knows why this works in chromium.<commit_after> | #!/usr/bin/env python
'''
File: fc2json.py
Author: Kristoffer Dalby
Description: Tiny script for converting flashcard format to json.
'''
import sys, json
file = sys.argv[1]
subject = file.split('.')[0]
data = {
"subject": subject,
"cards": {}
}
fc = [line.split(':') for line in open(file, 'r').read().splitlines()]
js = open(subject + ".json", 'w')
for line in fc:
data["cards"][line[0]] = line[1]
js.write(json.dumps(data))
js.close()
| #!/usr/bin/env python
import sys, json
file = sys.argv[1]
subject = file.split('.')[0]
data = {
"subject": subject,
"cards": {}
}
fc = [line.split(':') for line in open(file, 'r').read().splitlines()]
js = open(subject + ".json", 'w')
for line in fc:
data["cards"][line[0]] = line[1]
js.write(json.dumps(data))
js.close()
Use a real JS construct, WTF knows why this works in chromium.#!/usr/bin/env python
'''
File: fc2json.py
Author: Kristoffer Dalby
Description: Tiny script for converting flashcard format to json.
'''
import sys, json
file = sys.argv[1]
subject = file.split('.')[0]
data = {
"subject": subject,
"cards": {}
}
fc = [line.split(':') for line in open(file, 'r').read().splitlines()]
js = open(subject + ".json", 'w')
for line in fc:
data["cards"][line[0]] = line[1]
js.write(json.dumps(data))
js.close()
| <commit_before>#!/usr/bin/env python
import sys, json
file = sys.argv[1]
subject = file.split('.')[0]
data = {
"subject": subject,
"cards": {}
}
fc = [line.split(':') for line in open(file, 'r').read().splitlines()]
js = open(subject + ".json", 'w')
for line in fc:
data["cards"][line[0]] = line[1]
js.write(json.dumps(data))
js.close()
<commit_msg>Use a real JS construct, WTF knows why this works in chromium.<commit_after>#!/usr/bin/env python
'''
File: fc2json.py
Author: Kristoffer Dalby
Description: Tiny script for converting flashcard format to json.
'''
import sys, json
file = sys.argv[1]
subject = file.split('.')[0]
data = {
"subject": subject,
"cards": {}
}
fc = [line.split(':') for line in open(file, 'r').read().splitlines()]
js = open(subject + ".json", 'w')
for line in fc:
data["cards"][line[0]] = line[1]
js.write(json.dumps(data))
js.close()
|
9d78b571fcd0575e02d4849a0938a51f15e07961 | multi_schema/management/__init__.py | multi_schema/management/__init__.py | from django.db import models, connection
from django.core.management.color import no_style
from multi_schema.models import Schema
def post_syncdb_duplicator(sender, **kwargs):
# See if any of the newly created models are schema-aware
schema_aware_models = [m for m in kwargs['created_models'] if m._is_schema_aware and kwargs['app'].__name__ == m.__module__]
if schema_aware_models:
import pdb; pdb.set_trace()
for schema in Schema.objects.all():
cursor = connection.cursor()
tables = connection.introspection.table_names()
known_models = set([model for model in connection.introspection.installed_models(tables)])
for model in schema_aware_models:
output, references = connection.creation.sql_create_model(model, no_style(), known_models, schema.schema)
if output and kwargs.get('verbosity', 0) > 1:
print "Creating table %s in schema %s" % (model._meta.db_table, schema.schema)
for statement in output:
cursor.execute(statement)
models.signals.post_syncdb.connect(post_syncdb_duplicator) | from django.db import models, connection
from django.core.management.color import no_style
from multi_schema.models import Schema
def post_syncdb_duplicator(sender, **kwargs):
# See if any of the newly created models are schema-aware
schema_aware_models = [m for m in kwargs['created_models'] if m._is_schema_aware and kwargs['app'].__name__ == m.__module__]
if schema_aware_models:
for schema in Schema.objects.all():
cursor = connection.cursor()
schema.activate(cursor)
tables = connection.introspection.table_names()
pending_references = {}
known_models = set([model for model in connection.introspection.installed_models(tables)])
seen_models = set(known_models)
for model in schema_aware_models:
output, references = connection.creation.sql_create_model(model, no_style(), known_models, schema.schema)
seen_models.add(model)
for refto, refs in references.items():
pending_references.setdefault(refto, []).extend(refs)
if refto in seen_models:
output.extend(connection.creation.sql_for_pending_references(refto, no_style(), pending_references))
output.extend(connection.creation.sql_for_pending_references(model, no_style(), pending_references))
if output and kwargs.get('verbosity', 0) > 1:
print " ... creating table %s in schema %s" % (model._meta.db_table, schema.schema)
for statement in output:
cursor.execute(statement)
tables.append(connection.introspection.table_name_converter(model._meta.db_table))
models.signals.post_syncdb.connect(post_syncdb_duplicator) | Create the correct references to other tables in all schemata. | Create the correct references to other tables in all schemata.
| Python | bsd-3-clause | schinckel/django-boardinghouse,schinckel/django-boardinghouse,schinckel/django-boardinghouse | from django.db import models, connection
from django.core.management.color import no_style
from multi_schema.models import Schema
def post_syncdb_duplicator(sender, **kwargs):
# See if any of the newly created models are schema-aware
schema_aware_models = [m for m in kwargs['created_models'] if m._is_schema_aware and kwargs['app'].__name__ == m.__module__]
if schema_aware_models:
import pdb; pdb.set_trace()
for schema in Schema.objects.all():
cursor = connection.cursor()
tables = connection.introspection.table_names()
known_models = set([model for model in connection.introspection.installed_models(tables)])
for model in schema_aware_models:
output, references = connection.creation.sql_create_model(model, no_style(), known_models, schema.schema)
if output and kwargs.get('verbosity', 0) > 1:
print "Creating table %s in schema %s" % (model._meta.db_table, schema.schema)
for statement in output:
cursor.execute(statement)
models.signals.post_syncdb.connect(post_syncdb_duplicator)Create the correct references to other tables in all schemata. | from django.db import models, connection
from django.core.management.color import no_style
from multi_schema.models import Schema
def post_syncdb_duplicator(sender, **kwargs):
# See if any of the newly created models are schema-aware
schema_aware_models = [m for m in kwargs['created_models'] if m._is_schema_aware and kwargs['app'].__name__ == m.__module__]
if schema_aware_models:
for schema in Schema.objects.all():
cursor = connection.cursor()
schema.activate(cursor)
tables = connection.introspection.table_names()
pending_references = {}
known_models = set([model for model in connection.introspection.installed_models(tables)])
seen_models = set(known_models)
for model in schema_aware_models:
output, references = connection.creation.sql_create_model(model, no_style(), known_models, schema.schema)
seen_models.add(model)
for refto, refs in references.items():
pending_references.setdefault(refto, []).extend(refs)
if refto in seen_models:
output.extend(connection.creation.sql_for_pending_references(refto, no_style(), pending_references))
output.extend(connection.creation.sql_for_pending_references(model, no_style(), pending_references))
if output and kwargs.get('verbosity', 0) > 1:
print " ... creating table %s in schema %s" % (model._meta.db_table, schema.schema)
for statement in output:
cursor.execute(statement)
tables.append(connection.introspection.table_name_converter(model._meta.db_table))
models.signals.post_syncdb.connect(post_syncdb_duplicator) | <commit_before>from django.db import models, connection
from django.core.management.color import no_style
from multi_schema.models import Schema
def post_syncdb_duplicator(sender, **kwargs):
# See if any of the newly created models are schema-aware
schema_aware_models = [m for m in kwargs['created_models'] if m._is_schema_aware and kwargs['app'].__name__ == m.__module__]
if schema_aware_models:
import pdb; pdb.set_trace()
for schema in Schema.objects.all():
cursor = connection.cursor()
tables = connection.introspection.table_names()
known_models = set([model for model in connection.introspection.installed_models(tables)])
for model in schema_aware_models:
output, references = connection.creation.sql_create_model(model, no_style(), known_models, schema.schema)
if output and kwargs.get('verbosity', 0) > 1:
print "Creating table %s in schema %s" % (model._meta.db_table, schema.schema)
for statement in output:
cursor.execute(statement)
models.signals.post_syncdb.connect(post_syncdb_duplicator)<commit_msg>Create the correct references to other tables in all schemata.<commit_after> | from django.db import models, connection
from django.core.management.color import no_style
from multi_schema.models import Schema
def post_syncdb_duplicator(sender, **kwargs):
# See if any of the newly created models are schema-aware
schema_aware_models = [m for m in kwargs['created_models'] if m._is_schema_aware and kwargs['app'].__name__ == m.__module__]
if schema_aware_models:
for schema in Schema.objects.all():
cursor = connection.cursor()
schema.activate(cursor)
tables = connection.introspection.table_names()
pending_references = {}
known_models = set([model for model in connection.introspection.installed_models(tables)])
seen_models = set(known_models)
for model in schema_aware_models:
output, references = connection.creation.sql_create_model(model, no_style(), known_models, schema.schema)
seen_models.add(model)
for refto, refs in references.items():
pending_references.setdefault(refto, []).extend(refs)
if refto in seen_models:
output.extend(connection.creation.sql_for_pending_references(refto, no_style(), pending_references))
output.extend(connection.creation.sql_for_pending_references(model, no_style(), pending_references))
if output and kwargs.get('verbosity', 0) > 1:
print " ... creating table %s in schema %s" % (model._meta.db_table, schema.schema)
for statement in output:
cursor.execute(statement)
tables.append(connection.introspection.table_name_converter(model._meta.db_table))
models.signals.post_syncdb.connect(post_syncdb_duplicator) | from django.db import models, connection
from django.core.management.color import no_style
from multi_schema.models import Schema
def post_syncdb_duplicator(sender, **kwargs):
# See if any of the newly created models are schema-aware
schema_aware_models = [m for m in kwargs['created_models'] if m._is_schema_aware and kwargs['app'].__name__ == m.__module__]
if schema_aware_models:
import pdb; pdb.set_trace()
for schema in Schema.objects.all():
cursor = connection.cursor()
tables = connection.introspection.table_names()
known_models = set([model for model in connection.introspection.installed_models(tables)])
for model in schema_aware_models:
output, references = connection.creation.sql_create_model(model, no_style(), known_models, schema.schema)
if output and kwargs.get('verbosity', 0) > 1:
print "Creating table %s in schema %s" % (model._meta.db_table, schema.schema)
for statement in output:
cursor.execute(statement)
models.signals.post_syncdb.connect(post_syncdb_duplicator)Create the correct references to other tables in all schemata.from django.db import models, connection
from django.core.management.color import no_style
from multi_schema.models import Schema
def post_syncdb_duplicator(sender, **kwargs):
# See if any of the newly created models are schema-aware
schema_aware_models = [m for m in kwargs['created_models'] if m._is_schema_aware and kwargs['app'].__name__ == m.__module__]
if schema_aware_models:
for schema in Schema.objects.all():
cursor = connection.cursor()
schema.activate(cursor)
tables = connection.introspection.table_names()
pending_references = {}
known_models = set([model for model in connection.introspection.installed_models(tables)])
seen_models = set(known_models)
for model in schema_aware_models:
output, references = connection.creation.sql_create_model(model, no_style(), known_models, schema.schema)
seen_models.add(model)
for refto, refs in references.items():
pending_references.setdefault(refto, []).extend(refs)
if refto in seen_models:
output.extend(connection.creation.sql_for_pending_references(refto, no_style(), pending_references))
output.extend(connection.creation.sql_for_pending_references(model, no_style(), pending_references))
if output and kwargs.get('verbosity', 0) > 1:
print " ... creating table %s in schema %s" % (model._meta.db_table, schema.schema)
for statement in output:
cursor.execute(statement)
tables.append(connection.introspection.table_name_converter(model._meta.db_table))
models.signals.post_syncdb.connect(post_syncdb_duplicator) | <commit_before>from django.db import models, connection
from django.core.management.color import no_style
from multi_schema.models import Schema
def post_syncdb_duplicator(sender, **kwargs):
# See if any of the newly created models are schema-aware
schema_aware_models = [m for m in kwargs['created_models'] if m._is_schema_aware and kwargs['app'].__name__ == m.__module__]
if schema_aware_models:
import pdb; pdb.set_trace()
for schema in Schema.objects.all():
cursor = connection.cursor()
tables = connection.introspection.table_names()
known_models = set([model for model in connection.introspection.installed_models(tables)])
for model in schema_aware_models:
output, references = connection.creation.sql_create_model(model, no_style(), known_models, schema.schema)
if output and kwargs.get('verbosity', 0) > 1:
print "Creating table %s in schema %s" % (model._meta.db_table, schema.schema)
for statement in output:
cursor.execute(statement)
models.signals.post_syncdb.connect(post_syncdb_duplicator)<commit_msg>Create the correct references to other tables in all schemata.<commit_after>from django.db import models, connection
from django.core.management.color import no_style
from multi_schema.models import Schema
def post_syncdb_duplicator(sender, **kwargs):
# See if any of the newly created models are schema-aware
schema_aware_models = [m for m in kwargs['created_models'] if m._is_schema_aware and kwargs['app'].__name__ == m.__module__]
if schema_aware_models:
for schema in Schema.objects.all():
cursor = connection.cursor()
schema.activate(cursor)
tables = connection.introspection.table_names()
pending_references = {}
known_models = set([model for model in connection.introspection.installed_models(tables)])
seen_models = set(known_models)
for model in schema_aware_models:
output, references = connection.creation.sql_create_model(model, no_style(), known_models, schema.schema)
seen_models.add(model)
for refto, refs in references.items():
pending_references.setdefault(refto, []).extend(refs)
if refto in seen_models:
output.extend(connection.creation.sql_for_pending_references(refto, no_style(), pending_references))
output.extend(connection.creation.sql_for_pending_references(model, no_style(), pending_references))
if output and kwargs.get('verbosity', 0) > 1:
print " ... creating table %s in schema %s" % (model._meta.db_table, schema.schema)
for statement in output:
cursor.execute(statement)
tables.append(connection.introspection.table_name_converter(model._meta.db_table))
models.signals.post_syncdb.connect(post_syncdb_duplicator) |
533194b5b8e044bca2aaeccff4d550731922b3b8 | genome_designer/conf/demo_settings.py | genome_designer/conf/demo_settings.py | """
Settings for DEMO_MODE.
Must set DEMO_MODE = True in local_settings.py.
"""
# Views that are visible in demo mode.
DEMO_SAFE_VIEWS = [
'main.views.home_view',
'main.views.project_list_view',
'main.views.project_view',
'main.views.tab_root_analyze',
'main.views.reference_genome_list_view',
'main.views.reference_genome_view',
'main.views.sample_list_view',
'main.views.alignment_list_view',
'main.views.alignment_view',
'main.views.sample_alignment_error_view',
'main.views.variant_set_list_view',
'main.views.variant_set_view',
'main.views.single_variant_view',
'main.xhr_handlers.get_variant_list',
'main.xhr_handlers.get_variant_set_list',
'main.xhr_handlers.get_gene_list',
'main.xhr_handlers.get_alignment_groups',
'main.xhr_handlers.is_materialized_view_valid',
'main.xhr_handlers.get_ref_genomes',
'main.xhr_handlers.compile_jbrowse_and_redirect',
'main.template_xhrs.variant_filter_controls',
]
| """
Settings for DEMO_MODE.
Must set DEMO_MODE = True in local_settings.py.
"""
# Views that are visible in demo mode.
DEMO_SAFE_VIEWS = [
'main.views.home_view',
'main.views.project_list_view',
'main.views.project_view',
'main.views.tab_root_analyze',
'main.views.reference_genome_list_view',
'main.views.reference_genome_view',
'main.views.sample_list_view',
'main.views.alignment_list_view',
'main.views.alignment_view',
'main.views.sample_alignment_error_view',
'main.views.variant_set_list_view',
'main.views.variant_set_view',
'main.views.single_variant_view',
'main.xhr_handlers.get_variant_list',
'main.xhr_handlers.get_variant_set_list',
'main.xhr_handlers.get_gene_list',
'main.xhr_handlers.get_alignment_groups',
'main.xhr_handlers.is_materialized_view_valid',
'main.xhr_handlers.get_ref_genomes',
'main.xhr_handlers.compile_jbrowse_and_redirect',
'main.template_xhrs.variant_filter_controls',
'main.demo_view_overrides.login_demo_account',
'django.contrib.auth.views.logout'
]
| Add login and logout to allowed views in DEMO_MODE. | Add login and logout to allowed views in DEMO_MODE.
| Python | mit | woodymit/millstone,churchlab/millstone,churchlab/millstone,woodymit/millstone_accidental_source,woodymit/millstone,woodymit/millstone,woodymit/millstone,woodymit/millstone_accidental_source,churchlab/millstone,woodymit/millstone_accidental_source,churchlab/millstone,woodymit/millstone_accidental_source | """
Settings for DEMO_MODE.
Must set DEMO_MODE = True in local_settings.py.
"""
# Views that are visible in demo mode.
DEMO_SAFE_VIEWS = [
'main.views.home_view',
'main.views.project_list_view',
'main.views.project_view',
'main.views.tab_root_analyze',
'main.views.reference_genome_list_view',
'main.views.reference_genome_view',
'main.views.sample_list_view',
'main.views.alignment_list_view',
'main.views.alignment_view',
'main.views.sample_alignment_error_view',
'main.views.variant_set_list_view',
'main.views.variant_set_view',
'main.views.single_variant_view',
'main.xhr_handlers.get_variant_list',
'main.xhr_handlers.get_variant_set_list',
'main.xhr_handlers.get_gene_list',
'main.xhr_handlers.get_alignment_groups',
'main.xhr_handlers.is_materialized_view_valid',
'main.xhr_handlers.get_ref_genomes',
'main.xhr_handlers.compile_jbrowse_and_redirect',
'main.template_xhrs.variant_filter_controls',
]
Add login and logout to allowed views in DEMO_MODE. | """
Settings for DEMO_MODE.
Must set DEMO_MODE = True in local_settings.py.
"""
# Views that are visible in demo mode.
DEMO_SAFE_VIEWS = [
'main.views.home_view',
'main.views.project_list_view',
'main.views.project_view',
'main.views.tab_root_analyze',
'main.views.reference_genome_list_view',
'main.views.reference_genome_view',
'main.views.sample_list_view',
'main.views.alignment_list_view',
'main.views.alignment_view',
'main.views.sample_alignment_error_view',
'main.views.variant_set_list_view',
'main.views.variant_set_view',
'main.views.single_variant_view',
'main.xhr_handlers.get_variant_list',
'main.xhr_handlers.get_variant_set_list',
'main.xhr_handlers.get_gene_list',
'main.xhr_handlers.get_alignment_groups',
'main.xhr_handlers.is_materialized_view_valid',
'main.xhr_handlers.get_ref_genomes',
'main.xhr_handlers.compile_jbrowse_and_redirect',
'main.template_xhrs.variant_filter_controls',
'main.demo_view_overrides.login_demo_account',
'django.contrib.auth.views.logout'
]
| <commit_before>"""
Settings for DEMO_MODE.
Must set DEMO_MODE = True in local_settings.py.
"""
# Views that are visible in demo mode.
DEMO_SAFE_VIEWS = [
'main.views.home_view',
'main.views.project_list_view',
'main.views.project_view',
'main.views.tab_root_analyze',
'main.views.reference_genome_list_view',
'main.views.reference_genome_view',
'main.views.sample_list_view',
'main.views.alignment_list_view',
'main.views.alignment_view',
'main.views.sample_alignment_error_view',
'main.views.variant_set_list_view',
'main.views.variant_set_view',
'main.views.single_variant_view',
'main.xhr_handlers.get_variant_list',
'main.xhr_handlers.get_variant_set_list',
'main.xhr_handlers.get_gene_list',
'main.xhr_handlers.get_alignment_groups',
'main.xhr_handlers.is_materialized_view_valid',
'main.xhr_handlers.get_ref_genomes',
'main.xhr_handlers.compile_jbrowse_and_redirect',
'main.template_xhrs.variant_filter_controls',
]
<commit_msg>Add login and logout to allowed views in DEMO_MODE.<commit_after> | """
Settings for DEMO_MODE.
Must set DEMO_MODE = True in local_settings.py.
"""
# Views that are visible in demo mode.
DEMO_SAFE_VIEWS = [
'main.views.home_view',
'main.views.project_list_view',
'main.views.project_view',
'main.views.tab_root_analyze',
'main.views.reference_genome_list_view',
'main.views.reference_genome_view',
'main.views.sample_list_view',
'main.views.alignment_list_view',
'main.views.alignment_view',
'main.views.sample_alignment_error_view',
'main.views.variant_set_list_view',
'main.views.variant_set_view',
'main.views.single_variant_view',
'main.xhr_handlers.get_variant_list',
'main.xhr_handlers.get_variant_set_list',
'main.xhr_handlers.get_gene_list',
'main.xhr_handlers.get_alignment_groups',
'main.xhr_handlers.is_materialized_view_valid',
'main.xhr_handlers.get_ref_genomes',
'main.xhr_handlers.compile_jbrowse_and_redirect',
'main.template_xhrs.variant_filter_controls',
'main.demo_view_overrides.login_demo_account',
'django.contrib.auth.views.logout'
]
| """
Settings for DEMO_MODE.
Must set DEMO_MODE = True in local_settings.py.
"""
# Views that are visible in demo mode.
DEMO_SAFE_VIEWS = [
'main.views.home_view',
'main.views.project_list_view',
'main.views.project_view',
'main.views.tab_root_analyze',
'main.views.reference_genome_list_view',
'main.views.reference_genome_view',
'main.views.sample_list_view',
'main.views.alignment_list_view',
'main.views.alignment_view',
'main.views.sample_alignment_error_view',
'main.views.variant_set_list_view',
'main.views.variant_set_view',
'main.views.single_variant_view',
'main.xhr_handlers.get_variant_list',
'main.xhr_handlers.get_variant_set_list',
'main.xhr_handlers.get_gene_list',
'main.xhr_handlers.get_alignment_groups',
'main.xhr_handlers.is_materialized_view_valid',
'main.xhr_handlers.get_ref_genomes',
'main.xhr_handlers.compile_jbrowse_and_redirect',
'main.template_xhrs.variant_filter_controls',
]
Add login and logout to allowed views in DEMO_MODE."""
Settings for DEMO_MODE.
Must set DEMO_MODE = True in local_settings.py.
"""
# Views that are visible in demo mode.
DEMO_SAFE_VIEWS = [
'main.views.home_view',
'main.views.project_list_view',
'main.views.project_view',
'main.views.tab_root_analyze',
'main.views.reference_genome_list_view',
'main.views.reference_genome_view',
'main.views.sample_list_view',
'main.views.alignment_list_view',
'main.views.alignment_view',
'main.views.sample_alignment_error_view',
'main.views.variant_set_list_view',
'main.views.variant_set_view',
'main.views.single_variant_view',
'main.xhr_handlers.get_variant_list',
'main.xhr_handlers.get_variant_set_list',
'main.xhr_handlers.get_gene_list',
'main.xhr_handlers.get_alignment_groups',
'main.xhr_handlers.is_materialized_view_valid',
'main.xhr_handlers.get_ref_genomes',
'main.xhr_handlers.compile_jbrowse_and_redirect',
'main.template_xhrs.variant_filter_controls',
'main.demo_view_overrides.login_demo_account',
'django.contrib.auth.views.logout'
]
| <commit_before>"""
Settings for DEMO_MODE.
Must set DEMO_MODE = True in local_settings.py.
"""
# Views that are visible in demo mode.
DEMO_SAFE_VIEWS = [
'main.views.home_view',
'main.views.project_list_view',
'main.views.project_view',
'main.views.tab_root_analyze',
'main.views.reference_genome_list_view',
'main.views.reference_genome_view',
'main.views.sample_list_view',
'main.views.alignment_list_view',
'main.views.alignment_view',
'main.views.sample_alignment_error_view',
'main.views.variant_set_list_view',
'main.views.variant_set_view',
'main.views.single_variant_view',
'main.xhr_handlers.get_variant_list',
'main.xhr_handlers.get_variant_set_list',
'main.xhr_handlers.get_gene_list',
'main.xhr_handlers.get_alignment_groups',
'main.xhr_handlers.is_materialized_view_valid',
'main.xhr_handlers.get_ref_genomes',
'main.xhr_handlers.compile_jbrowse_and_redirect',
'main.template_xhrs.variant_filter_controls',
]
<commit_msg>Add login and logout to allowed views in DEMO_MODE.<commit_after>"""
Settings for DEMO_MODE.
Must set DEMO_MODE = True in local_settings.py.
"""
# Views that are visible in demo mode.
DEMO_SAFE_VIEWS = [
'main.views.home_view',
'main.views.project_list_view',
'main.views.project_view',
'main.views.tab_root_analyze',
'main.views.reference_genome_list_view',
'main.views.reference_genome_view',
'main.views.sample_list_view',
'main.views.alignment_list_view',
'main.views.alignment_view',
'main.views.sample_alignment_error_view',
'main.views.variant_set_list_view',
'main.views.variant_set_view',
'main.views.single_variant_view',
'main.xhr_handlers.get_variant_list',
'main.xhr_handlers.get_variant_set_list',
'main.xhr_handlers.get_gene_list',
'main.xhr_handlers.get_alignment_groups',
'main.xhr_handlers.is_materialized_view_valid',
'main.xhr_handlers.get_ref_genomes',
'main.xhr_handlers.compile_jbrowse_and_redirect',
'main.template_xhrs.variant_filter_controls',
'main.demo_view_overrides.login_demo_account',
'django.contrib.auth.views.logout'
]
|
6282aa2617bcc9bb8f293ea620eff23d2009334b | example/test.py | example/test.py |
import rust_ext
import numpy as np
a = np.array([1.0, 2.0])
rust_ext.mult(3, a)
print(a)
| #!/usr/bin/env python
import rust_ext
import numpy as np
x = np.array([1.0, 2.0])
y = np.array([2.0, 3.0])
result = rust_ext.axpy(3, x, y)
print(result)
| Use axpy on sample Python script | Use axpy on sample Python script
| Python | bsd-2-clause | termoshtt/rust-numpy,termoshtt/rust-numpy |
import rust_ext
import numpy as np
a = np.array([1.0, 2.0])
rust_ext.mult(3, a)
print(a)
Use axpy on sample Python script | #!/usr/bin/env python
import rust_ext
import numpy as np
x = np.array([1.0, 2.0])
y = np.array([2.0, 3.0])
result = rust_ext.axpy(3, x, y)
print(result)
| <commit_before>
import rust_ext
import numpy as np
a = np.array([1.0, 2.0])
rust_ext.mult(3, a)
print(a)
<commit_msg>Use axpy on sample Python script<commit_after> | #!/usr/bin/env python
import rust_ext
import numpy as np
x = np.array([1.0, 2.0])
y = np.array([2.0, 3.0])
result = rust_ext.axpy(3, x, y)
print(result)
|
import rust_ext
import numpy as np
a = np.array([1.0, 2.0])
rust_ext.mult(3, a)
print(a)
Use axpy on sample Python script#!/usr/bin/env python
import rust_ext
import numpy as np
x = np.array([1.0, 2.0])
y = np.array([2.0, 3.0])
result = rust_ext.axpy(3, x, y)
print(result)
| <commit_before>
import rust_ext
import numpy as np
a = np.array([1.0, 2.0])
rust_ext.mult(3, a)
print(a)
<commit_msg>Use axpy on sample Python script<commit_after>#!/usr/bin/env python
import rust_ext
import numpy as np
x = np.array([1.0, 2.0])
y = np.array([2.0, 3.0])
result = rust_ext.axpy(3, x, y)
print(result)
|
f894aff53577fb459bfac1802f3880133e4143cf | build/build.py | build/build.py | #
# Copyright (c) 2004 Specifix, Inc.
# All rights reserved
#
import os
import shutil
import util
class Make:
def doBuild(self, dir):
os.system("cd %s; make" % dir)
class MakeInstall:
def doInstall(self, dir, root):
os.system("cd %s; make %s=%s install" % (dir, self.rootVar, root))
def __init__(self, rootVar = "DESTDIR"):
self.rootVar = rootVar
class InstallFile:
def doInstall(self, dir, root):
dest = root + self.toFile
util.mkdirChain(os.path.dirname(dest))
shutil.copyfile(self.toFile, dest)
os.chmod(dest, self.mode)
def __init__(self, fromFile, toFile, perms = 0644):
self.toFile = toFile
self.file = fromFile
self.mode = perms
| #
# Copyright (c) 2004 Specifix, Inc.
# All rights reserved
#
import os
import shutil
import util
class ManualConfigure:
def doBuild(self, dir):
os.system("cd %s; ./configure %s" % (dir, self.extraflags))
def __init__(self, extraflags=""):
self.extraflags = extraflags
class Configure:
def doBuild(self, dir):
os.system("cd %s; ./configure --prefix=/usr --sysconfdir=/etc %s" % (dir, self.extraflags))
def __init__(self, extraflags=""):
self.extraflags = extraflags
class Make:
def doBuild(self, dir):
os.system("cd %s; make" % dir)
class MakeInstall:
def doInstall(self, dir, root):
os.system("cd %s; make %s=%s install" % (dir, self.rootVar, root))
def __init__(self, rootVar = "DESTDIR"):
self.rootVar = rootVar
class InstallFile:
def doInstall(self, dir, root):
dest = root + self.toFile
util.mkdirChain(os.path.dirname(dest))
shutil.copyfile(self.toFile, dest)
os.chmod(dest, self.mode)
def __init__(self, fromFile, toFile, perms = 0644):
self.toFile = toFile
self.file = fromFile
self.mode = perms
| Add classes to run ./configure | Add classes to run ./configure
| Python | apache-2.0 | fedora-conary/conary,fedora-conary/conary,fedora-conary/conary,fedora-conary/conary,fedora-conary/conary | #
# Copyright (c) 2004 Specifix, Inc.
# All rights reserved
#
import os
import shutil
import util
class Make:
def doBuild(self, dir):
os.system("cd %s; make" % dir)
class MakeInstall:
def doInstall(self, dir, root):
os.system("cd %s; make %s=%s install" % (dir, self.rootVar, root))
def __init__(self, rootVar = "DESTDIR"):
self.rootVar = rootVar
class InstallFile:
def doInstall(self, dir, root):
dest = root + self.toFile
util.mkdirChain(os.path.dirname(dest))
shutil.copyfile(self.toFile, dest)
os.chmod(dest, self.mode)
def __init__(self, fromFile, toFile, perms = 0644):
self.toFile = toFile
self.file = fromFile
self.mode = perms
Add classes to run ./configure | #
# Copyright (c) 2004 Specifix, Inc.
# All rights reserved
#
import os
import shutil
import util
class ManualConfigure:
def doBuild(self, dir):
os.system("cd %s; ./configure %s" % (dir, self.extraflags))
def __init__(self, extraflags=""):
self.extraflags = extraflags
class Configure:
def doBuild(self, dir):
os.system("cd %s; ./configure --prefix=/usr --sysconfdir=/etc %s" % (dir, self.extraflags))
def __init__(self, extraflags=""):
self.extraflags = extraflags
class Make:
def doBuild(self, dir):
os.system("cd %s; make" % dir)
class MakeInstall:
def doInstall(self, dir, root):
os.system("cd %s; make %s=%s install" % (dir, self.rootVar, root))
def __init__(self, rootVar = "DESTDIR"):
self.rootVar = rootVar
class InstallFile:
def doInstall(self, dir, root):
dest = root + self.toFile
util.mkdirChain(os.path.dirname(dest))
shutil.copyfile(self.toFile, dest)
os.chmod(dest, self.mode)
def __init__(self, fromFile, toFile, perms = 0644):
self.toFile = toFile
self.file = fromFile
self.mode = perms
| <commit_before>#
# Copyright (c) 2004 Specifix, Inc.
# All rights reserved
#
import os
import shutil
import util
class Make:
def doBuild(self, dir):
os.system("cd %s; make" % dir)
class MakeInstall:
def doInstall(self, dir, root):
os.system("cd %s; make %s=%s install" % (dir, self.rootVar, root))
def __init__(self, rootVar = "DESTDIR"):
self.rootVar = rootVar
class InstallFile:
def doInstall(self, dir, root):
dest = root + self.toFile
util.mkdirChain(os.path.dirname(dest))
shutil.copyfile(self.toFile, dest)
os.chmod(dest, self.mode)
def __init__(self, fromFile, toFile, perms = 0644):
self.toFile = toFile
self.file = fromFile
self.mode = perms
<commit_msg>Add classes to run ./configure<commit_after> | #
# Copyright (c) 2004 Specifix, Inc.
# All rights reserved
#
import os
import shutil
import util
class ManualConfigure:
def doBuild(self, dir):
os.system("cd %s; ./configure %s" % (dir, self.extraflags))
def __init__(self, extraflags=""):
self.extraflags = extraflags
class Configure:
def doBuild(self, dir):
os.system("cd %s; ./configure --prefix=/usr --sysconfdir=/etc %s" % (dir, self.extraflags))
def __init__(self, extraflags=""):
self.extraflags = extraflags
class Make:
def doBuild(self, dir):
os.system("cd %s; make" % dir)
class MakeInstall:
def doInstall(self, dir, root):
os.system("cd %s; make %s=%s install" % (dir, self.rootVar, root))
def __init__(self, rootVar = "DESTDIR"):
self.rootVar = rootVar
class InstallFile:
def doInstall(self, dir, root):
dest = root + self.toFile
util.mkdirChain(os.path.dirname(dest))
shutil.copyfile(self.toFile, dest)
os.chmod(dest, self.mode)
def __init__(self, fromFile, toFile, perms = 0644):
self.toFile = toFile
self.file = fromFile
self.mode = perms
| #
# Copyright (c) 2004 Specifix, Inc.
# All rights reserved
#
import os
import shutil
import util
class Make:
def doBuild(self, dir):
os.system("cd %s; make" % dir)
class MakeInstall:
def doInstall(self, dir, root):
os.system("cd %s; make %s=%s install" % (dir, self.rootVar, root))
def __init__(self, rootVar = "DESTDIR"):
self.rootVar = rootVar
class InstallFile:
def doInstall(self, dir, root):
dest = root + self.toFile
util.mkdirChain(os.path.dirname(dest))
shutil.copyfile(self.toFile, dest)
os.chmod(dest, self.mode)
def __init__(self, fromFile, toFile, perms = 0644):
self.toFile = toFile
self.file = fromFile
self.mode = perms
Add classes to run ./configure#
# Copyright (c) 2004 Specifix, Inc.
# All rights reserved
#
import os
import shutil
import util
class ManualConfigure:
def doBuild(self, dir):
os.system("cd %s; ./configure %s" % (dir, self.extraflags))
def __init__(self, extraflags=""):
self.extraflags = extraflags
class Configure:
def doBuild(self, dir):
os.system("cd %s; ./configure --prefix=/usr --sysconfdir=/etc %s" % (dir, self.extraflags))
def __init__(self, extraflags=""):
self.extraflags = extraflags
class Make:
def doBuild(self, dir):
os.system("cd %s; make" % dir)
class MakeInstall:
def doInstall(self, dir, root):
os.system("cd %s; make %s=%s install" % (dir, self.rootVar, root))
def __init__(self, rootVar = "DESTDIR"):
self.rootVar = rootVar
class InstallFile:
def doInstall(self, dir, root):
dest = root + self.toFile
util.mkdirChain(os.path.dirname(dest))
shutil.copyfile(self.toFile, dest)
os.chmod(dest, self.mode)
def __init__(self, fromFile, toFile, perms = 0644):
self.toFile = toFile
self.file = fromFile
self.mode = perms
| <commit_before>#
# Copyright (c) 2004 Specifix, Inc.
# All rights reserved
#
import os
import shutil
import util
class Make:
def doBuild(self, dir):
os.system("cd %s; make" % dir)
class MakeInstall:
def doInstall(self, dir, root):
os.system("cd %s; make %s=%s install" % (dir, self.rootVar, root))
def __init__(self, rootVar = "DESTDIR"):
self.rootVar = rootVar
class InstallFile:
def doInstall(self, dir, root):
dest = root + self.toFile
util.mkdirChain(os.path.dirname(dest))
shutil.copyfile(self.toFile, dest)
os.chmod(dest, self.mode)
def __init__(self, fromFile, toFile, perms = 0644):
self.toFile = toFile
self.file = fromFile
self.mode = perms
<commit_msg>Add classes to run ./configure<commit_after>#
# Copyright (c) 2004 Specifix, Inc.
# All rights reserved
#
import os
import shutil
import util
class ManualConfigure:
def doBuild(self, dir):
os.system("cd %s; ./configure %s" % (dir, self.extraflags))
def __init__(self, extraflags=""):
self.extraflags = extraflags
class Configure:
def doBuild(self, dir):
os.system("cd %s; ./configure --prefix=/usr --sysconfdir=/etc %s" % (dir, self.extraflags))
def __init__(self, extraflags=""):
self.extraflags = extraflags
class Make:
def doBuild(self, dir):
os.system("cd %s; make" % dir)
class MakeInstall:
def doInstall(self, dir, root):
os.system("cd %s; make %s=%s install" % (dir, self.rootVar, root))
def __init__(self, rootVar = "DESTDIR"):
self.rootVar = rootVar
class InstallFile:
def doInstall(self, dir, root):
dest = root + self.toFile
util.mkdirChain(os.path.dirname(dest))
shutil.copyfile(self.toFile, dest)
os.chmod(dest, self.mode)
def __init__(self, fromFile, toFile, perms = 0644):
self.toFile = toFile
self.file = fromFile
self.mode = perms
|
cf3596ee93eabf425c7d42c15fc07b11f7741158 | humblemedia/causes/models.py | humblemedia/causes/models.py | from django.db import models
class Cause(models.Model):
title = models.CharField(max_length=64)
description = models.TextField()
creator = models.ForeignKey('auth.User', related_name='causes')
target = models.PositiveIntegerField(null=True, blank=True)
url = models.URLField(null=True, blank=True)
is_verified = models.BooleanField(default=False)
is_published = models.BooleanField(default=False)
def __str__(self):
return self.title
| from django.db import models
class Cause(models.Model):
title = models.CharField(max_length=64)
description = models.TextField()
creator = models.ForeignKey('auth.User', related_name='causes')
organization = models.ForeignKey('organizations.Organization', related_name='causes', null=True, blank=True)
target = models.PositiveIntegerField(null=True, blank=True)
url = models.URLField(null=True, blank=True)
is_verified = models.BooleanField(default=False)
is_published = models.BooleanField(default=False)
def __str__(self):
return self.title
| Add organization to cause model | Add organization to cause model
| Python | mit | vladimiroff/humble-media,vladimiroff/humble-media | from django.db import models
class Cause(models.Model):
title = models.CharField(max_length=64)
description = models.TextField()
creator = models.ForeignKey('auth.User', related_name='causes')
target = models.PositiveIntegerField(null=True, blank=True)
url = models.URLField(null=True, blank=True)
is_verified = models.BooleanField(default=False)
is_published = models.BooleanField(default=False)
def __str__(self):
return self.title
Add organization to cause model | from django.db import models
class Cause(models.Model):
title = models.CharField(max_length=64)
description = models.TextField()
creator = models.ForeignKey('auth.User', related_name='causes')
organization = models.ForeignKey('organizations.Organization', related_name='causes', null=True, blank=True)
target = models.PositiveIntegerField(null=True, blank=True)
url = models.URLField(null=True, blank=True)
is_verified = models.BooleanField(default=False)
is_published = models.BooleanField(default=False)
def __str__(self):
return self.title
| <commit_before>from django.db import models
class Cause(models.Model):
title = models.CharField(max_length=64)
description = models.TextField()
creator = models.ForeignKey('auth.User', related_name='causes')
target = models.PositiveIntegerField(null=True, blank=True)
url = models.URLField(null=True, blank=True)
is_verified = models.BooleanField(default=False)
is_published = models.BooleanField(default=False)
def __str__(self):
return self.title
<commit_msg>Add organization to cause model<commit_after> | from django.db import models
class Cause(models.Model):
title = models.CharField(max_length=64)
description = models.TextField()
creator = models.ForeignKey('auth.User', related_name='causes')
organization = models.ForeignKey('organizations.Organization', related_name='causes', null=True, blank=True)
target = models.PositiveIntegerField(null=True, blank=True)
url = models.URLField(null=True, blank=True)
is_verified = models.BooleanField(default=False)
is_published = models.BooleanField(default=False)
def __str__(self):
return self.title
| from django.db import models
class Cause(models.Model):
title = models.CharField(max_length=64)
description = models.TextField()
creator = models.ForeignKey('auth.User', related_name='causes')
target = models.PositiveIntegerField(null=True, blank=True)
url = models.URLField(null=True, blank=True)
is_verified = models.BooleanField(default=False)
is_published = models.BooleanField(default=False)
def __str__(self):
return self.title
Add organization to cause modelfrom django.db import models
class Cause(models.Model):
title = models.CharField(max_length=64)
description = models.TextField()
creator = models.ForeignKey('auth.User', related_name='causes')
organization = models.ForeignKey('organizations.Organization', related_name='causes', null=True, blank=True)
target = models.PositiveIntegerField(null=True, blank=True)
url = models.URLField(null=True, blank=True)
is_verified = models.BooleanField(default=False)
is_published = models.BooleanField(default=False)
def __str__(self):
return self.title
| <commit_before>from django.db import models
class Cause(models.Model):
title = models.CharField(max_length=64)
description = models.TextField()
creator = models.ForeignKey('auth.User', related_name='causes')
target = models.PositiveIntegerField(null=True, blank=True)
url = models.URLField(null=True, blank=True)
is_verified = models.BooleanField(default=False)
is_published = models.BooleanField(default=False)
def __str__(self):
return self.title
<commit_msg>Add organization to cause model<commit_after>from django.db import models
class Cause(models.Model):
title = models.CharField(max_length=64)
description = models.TextField()
creator = models.ForeignKey('auth.User', related_name='causes')
organization = models.ForeignKey('organizations.Organization', related_name='causes', null=True, blank=True)
target = models.PositiveIntegerField(null=True, blank=True)
url = models.URLField(null=True, blank=True)
is_verified = models.BooleanField(default=False)
is_published = models.BooleanField(default=False)
def __str__(self):
return self.title
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.