commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
aacfb96c55c5179e768745f06b1586b3c0f70969
|
pygp/utils/abc.py
|
pygp/utils/abc.py
|
"""
Modifications to ABC to allow for additional metaclass actions.
"""
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# global imports
from abc import ABCMeta as ABCMeta_
from abc import abstractmethod
# exported symbols
__all__ = ['ABCMeta', 'abstractmethod']
class ABCMeta(ABCMeta_):
pass
|
"""
Modifications to ABC to allow for additional metaclass actions.
"""
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# global imports
from abc import ABCMeta as ABCMeta_
from abc import abstractmethod
# exported symbols
__all__ = ['ABCMeta', 'abstractmethod']
class ABCMeta(ABCMeta_):
"""
Slight modification to ABCMeta that copies docstrings from an
abstractmethod to its implementation if the implementation lacks a
docstring.
"""
def __new__(mcs, name, bases, attrs):
abstracts = dict(
(attr, getattr(base, attr))
for base in bases
for attr in getattr(base, '__abstractmethods__', set()))
for attr, value in attrs.items():
implements = (attr in abstracts and
not getattr(value, '__isabstractmethod__', False))
if implements and not getattr(value, '__doc__', False):
docstring = getattr(abstracts[attr], '__doc__', None)
setattr(value, '__doc__', docstring)
return super(ABCMeta, mcs).__new__(mcs, name, bases, attrs)
|
Update the modified ABCMeta to copy docstrings to abstractmethods.
|
Update the modified ABCMeta to copy docstrings to abstractmethods.
|
Python
|
bsd-2-clause
|
mwhoffman/pygp
|
"""
Modifications to ABC to allow for additional metaclass actions.
"""
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# global imports
from abc import ABCMeta as ABCMeta_
from abc import abstractmethod
# exported symbols
__all__ = ['ABCMeta', 'abstractmethod']
class ABCMeta(ABCMeta_):
pass
Update the modified ABCMeta to copy docstrings to abstractmethods.
|
"""
Modifications to ABC to allow for additional metaclass actions.
"""
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# global imports
from abc import ABCMeta as ABCMeta_
from abc import abstractmethod
# exported symbols
__all__ = ['ABCMeta', 'abstractmethod']
class ABCMeta(ABCMeta_):
"""
Slight modification to ABCMeta that copies docstrings from an
abstractmethod to its implementation if the implementation lacks a
docstring.
"""
def __new__(mcs, name, bases, attrs):
abstracts = dict(
(attr, getattr(base, attr))
for base in bases
for attr in getattr(base, '__abstractmethods__', set()))
for attr, value in attrs.items():
implements = (attr in abstracts and
not getattr(value, '__isabstractmethod__', False))
if implements and not getattr(value, '__doc__', False):
docstring = getattr(abstracts[attr], '__doc__', None)
setattr(value, '__doc__', docstring)
return super(ABCMeta, mcs).__new__(mcs, name, bases, attrs)
|
<commit_before>"""
Modifications to ABC to allow for additional metaclass actions.
"""
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# global imports
from abc import ABCMeta as ABCMeta_
from abc import abstractmethod
# exported symbols
__all__ = ['ABCMeta', 'abstractmethod']
class ABCMeta(ABCMeta_):
pass
<commit_msg>Update the modified ABCMeta to copy docstrings to abstractmethods.<commit_after>
|
"""
Modifications to ABC to allow for additional metaclass actions.
"""
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# global imports
from abc import ABCMeta as ABCMeta_
from abc import abstractmethod
# exported symbols
__all__ = ['ABCMeta', 'abstractmethod']
class ABCMeta(ABCMeta_):
"""
Slight modification to ABCMeta that copies docstrings from an
abstractmethod to its implementation if the implementation lacks a
docstring.
"""
def __new__(mcs, name, bases, attrs):
abstracts = dict(
(attr, getattr(base, attr))
for base in bases
for attr in getattr(base, '__abstractmethods__', set()))
for attr, value in attrs.items():
implements = (attr in abstracts and
not getattr(value, '__isabstractmethod__', False))
if implements and not getattr(value, '__doc__', False):
docstring = getattr(abstracts[attr], '__doc__', None)
setattr(value, '__doc__', docstring)
return super(ABCMeta, mcs).__new__(mcs, name, bases, attrs)
|
"""
Modifications to ABC to allow for additional metaclass actions.
"""
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# global imports
from abc import ABCMeta as ABCMeta_
from abc import abstractmethod
# exported symbols
__all__ = ['ABCMeta', 'abstractmethod']
class ABCMeta(ABCMeta_):
pass
Update the modified ABCMeta to copy docstrings to abstractmethods."""
Modifications to ABC to allow for additional metaclass actions.
"""
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# global imports
from abc import ABCMeta as ABCMeta_
from abc import abstractmethod
# exported symbols
__all__ = ['ABCMeta', 'abstractmethod']
class ABCMeta(ABCMeta_):
"""
Slight modification to ABCMeta that copies docstrings from an
abstractmethod to its implementation if the implementation lacks a
docstring.
"""
def __new__(mcs, name, bases, attrs):
abstracts = dict(
(attr, getattr(base, attr))
for base in bases
for attr in getattr(base, '__abstractmethods__', set()))
for attr, value in attrs.items():
implements = (attr in abstracts and
not getattr(value, '__isabstractmethod__', False))
if implements and not getattr(value, '__doc__', False):
docstring = getattr(abstracts[attr], '__doc__', None)
setattr(value, '__doc__', docstring)
return super(ABCMeta, mcs).__new__(mcs, name, bases, attrs)
|
<commit_before>"""
Modifications to ABC to allow for additional metaclass actions.
"""
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# global imports
from abc import ABCMeta as ABCMeta_
from abc import abstractmethod
# exported symbols
__all__ = ['ABCMeta', 'abstractmethod']
class ABCMeta(ABCMeta_):
pass
<commit_msg>Update the modified ABCMeta to copy docstrings to abstractmethods.<commit_after>"""
Modifications to ABC to allow for additional metaclass actions.
"""
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# global imports
from abc import ABCMeta as ABCMeta_
from abc import abstractmethod
# exported symbols
__all__ = ['ABCMeta', 'abstractmethod']
class ABCMeta(ABCMeta_):
"""
Slight modification to ABCMeta that copies docstrings from an
abstractmethod to its implementation if the implementation lacks a
docstring.
"""
def __new__(mcs, name, bases, attrs):
abstracts = dict(
(attr, getattr(base, attr))
for base in bases
for attr in getattr(base, '__abstractmethods__', set()))
for attr, value in attrs.items():
implements = (attr in abstracts and
not getattr(value, '__isabstractmethod__', False))
if implements and not getattr(value, '__doc__', False):
docstring = getattr(abstracts[attr], '__doc__', None)
setattr(value, '__doc__', docstring)
return super(ABCMeta, mcs).__new__(mcs, name, bases, attrs)
|
738e4ddd0043c204095767f1f7458db9e6948262
|
tensorflow/tools/docker/jupyter_notebook_config.py
|
tensorflow/tools/docker/jupyter_notebook_config.py
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import os
from IPython.lib import passwd
c.NotebookApp.ip = '*'
c.NotebookApp.port = int(os.getenv('PORT', 8888))
c.NotebookApp.open_browser = False
c.MultiKernelManager.default_kernel_name = 'python2'
# sets a password if PASSWORD is set in the environment
if 'PASSWORD' in os.environ:
c.NotebookApp.password = passwd(os.environ['PASSWORD'])
del os.environ['PASSWORD']
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import os
from IPython.lib import passwd
c.NotebookApp.ip = '*'
c.NotebookApp.port = int(os.getenv('PORT', 8888))
c.NotebookApp.open_browser = False
c.MultiKernelManager.default_kernel_name = 'python2'
# sets a password if PASSWORD is set in the environment
if 'PASSWORD' in os.environ:
password = os.environ['PASSWORD']
if password:
c.NotebookApp.password = passwd(password)
else:
c.NotebookApp.password = ''
c.NotebookApp.token = ''
del os.environ['PASSWORD']
|
Allow disabling password and token auth on jupyter notebooks
|
Allow disabling password and token auth on jupyter notebooks
|
Python
|
apache-2.0
|
Intel-tensorflow/tensorflow,renyi533/tensorflow,hsaputra/tensorflow,zasdfgbnm/tensorflow,dendisuhubdy/tensorflow,ageron/tensorflow,pavelchristof/gomoku-ai,av8ramit/tensorflow,hfp/tensorflow-xsmm,alivecor/tensorflow,Xeralux/tensorflow,seanli9jan/tensorflow,DavidNorman/tensorflow,ghchinoy/tensorflow,seanli9jan/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Mistobaan/tensorflow,snnn/tensorflow,jalexvig/tensorflow,nightjean/Deep-Learning,arborh/tensorflow,rabipanda/tensorflow,freedomtan/tensorflow,a-doumoulakis/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,manazhao/tf_recsys,alivecor/tensorflow,apark263/tensorflow,frreiss/tensorflow-fred,DavidNorman/tensorflow,jhseu/tensorflow,andrewcmyers/tensorflow,davidzchen/tensorflow,frreiss/tensorflow-fred,yufengg/tensorflow,jhseu/tensorflow,kevin-coder/tensorflow-fork,lukeiwanski/tensorflow,tensorflow/tensorflow,dongjoon-hyun/tensorflow,Bulochkin/tensorflow_pack,maciekcc/tensorflow,hehongliang/tensorflow,jart/tensorflow,renyi533/tensorflow,asimshankar/tensorflow,jbedorf/tensorflow,rabipanda/tensorflow,Xeralux/tensorflow,xzturn/tensorflow,apark263/tensorflow,freedomtan/tensorflow,gunan/tensorflow,kobejean/tensorflow,gunan/tensorflow,jart/tensorflow,manipopopo/tensorflow,Bulochkin/tensorflow_pack,gunan/tensorflow,ishay2b/tensorflow,kevin-coder/tensorflow-fork,unsiloai/syntaxnet-ops-hack,caisq/tensorflow,theflofly/tensorflow,freedomtan/tensorflow,gautam1858/tensorflow,zycdragonball/tensorflow,hfp/tensorflow-xsmm,guschmue/tensorflow,ychfan/tensorflow,jbedorf/tensorflow,ageron/tensorflow,Intel-tensorflow/tensorflow,AnishShah/tensorflow,tiagofrepereira2012/tensorflow,dendisuhubdy/tensorflow,jalexvig/tensorflow,jhseu/tensorflow,jhseu/tensorflow,meteorcloudy/tensorflow,davidzchen/tensorflow,JingJunYin/tensorflow,frreiss/tensorflow-fred,yanchen036/tensorflow,dendisuhubdy/tensorflow,benoitsteiner/tensorflow,aldian/tensorflow,girving/tensorflow,ychfan/tensorflow,benoitsteiner/tensorflow-xsmm,codrut3/tensorflow,gojira/tensorflow,benoitsteiner/tensorflow,maciekcc/tensorflow,a-doumoulakis/tensorflow,karllessard/tensorflow,raymondxyang/tensorflow,alistairlow/tensorflow,jwlawson/tensorflow,Xeralux/tensorflow,dyoung418/tensorflow,gautam1858/tensorflow,jostep/tensorflow,freedomtan/tensorflow,eaplatanios/tensorflow,girving/tensorflow,eadgarchen/tensorflow,Kongsea/tensorflow,zasdfgbnm/tensorflow,ghchinoy/tensorflow,karllessard/tensorflow,theflofly/tensorflow,aselle/tensorflow,sjperkins/tensorflow,Bismarrck/tensorflow,tornadozou/tensorflow,jbedorf/tensorflow,asimshankar/tensorflow,mavenlin/tensorflow,ravindrapanda/tensorflow,renyi533/tensorflow,lukeiwanski/tensorflow,arborh/tensorflow,chemelnucfin/tensorflow,mavenlin/tensorflow,manazhao/tf_recsys,horance-liu/tensorflow,davidzchen/tensorflow,DavidNorman/tensorflow,Moriadry/tensorflow,xzturn/tensorflow,petewarden/tensorflow,Intel-tensorflow/tensorflow,brchiu/tensorflow,gojira/tensorflow,eadgarchen/tensorflow,av8ramit/tensorflow,Intel-tensorflow/tensorflow,DavidNorman/tensorflow,Mazecreator/tensorflow,jostep/tensorflow,Bismarrck/tensorflow,tensorflow/tensorflow-pywrap_saved_model,ville-k/tensorflow,brchiu/tensorflow,hfp/tensorflow-xsmm,unsiloai/syntaxnet-ops-hack,tensorflow/tensorflow-experimental_link_static_libraries_once,nolanliou/tensorflow,kevin-coder/tensorflow-fork,mavenlin/tensorflow,yufengg/tensorflow,nburn42/tensorflow,ville-k/tensorflow,codrut3/tensorflow,a-doumoulakis/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model,allenlavoie/tensorflow,Bismarrck/tensorflow,manipopopo/tensorflow,hsaputra/tensorflow,rabipanda/tensorflow,jhseu/tensorflow,ArtsiomCh/tensorflow,tensorflow/tensorflow-pywrap_saved_model,llhe/tensorflow,gojira/tensorflow,benoitsteiner/tensorflow-opencl,raymondxyang/tensorflow,llhe/tensorflow,seanli9jan/tensorflow,alistairlow/tensorflow,Xeralux/tensorflow,nightjean/Deep-Learning,alsrgv/tensorflow,tensorflow/tensorflow-pywrap_saved_model,benoitsteiner/tensorflow-xsmm,paolodedios/tensorflow,ville-k/tensorflow,ishay2b/tensorflow,meteorcloudy/tensorflow,jwlawson/tensorflow,alivecor/tensorflow,allenlavoie/tensorflow,meteorcloudy/tensorflow,maciekcc/tensorflow,lakshayg/tensorflow,snnn/tensorflow,jwlawson/tensorflow,bowang/tensorflow,alivecor/tensorflow,chemelnucfin/tensorflow,alsrgv/tensorflow,alistairlow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,ishay2b/tensorflow,asimshankar/tensorflow,kevin-coder/tensorflow-fork,ville-k/tensorflow,ville-k/tensorflow,JVillella/tensorflow,renyi533/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,jhaux/tensorflow,ychfan/tensorflow,hehongliang/tensorflow,kevin-coder/tensorflow-fork,Xeralux/tensorflow,kobejean/tensorflow,ishay2b/tensorflow,DavidNorman/tensorflow,Bismarrck/tensorflow,alsrgv/tensorflow,alivecor/tensorflow,ArtsiomCh/tensorflow,mdrumond/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,ZhangXinNan/tensorflow,Mistobaan/tensorflow,dongjoon-hyun/tensorflow,nolanliou/tensorflow,benoitsteiner/tensorflow,gojira/tensorflow,paolodedios/tensorflow,ravindrapanda/tensorflow,dongjoon-hyun/tensorflow,nburn42/tensorflow,snnn/tensorflow,cxxgtxy/tensorflow,laszlocsomor/tensorflow,cxxgtxy/tensorflow,Mazecreator/tensorflow,laszlocsomor/tensorflow,kevin-coder/tensorflow-fork,hsaputra/tensorflow,Intel-Corporation/tensorflow,alshedivat/tensorflow,aldian/tensorflow,Mistobaan/tensorflow,gunan/tensorflow,dyoung418/tensorflow,freedomtan/tensorflow,davidzchen/tensorflow,DavidNorman/tensorflow,dongjoon-hyun/tensorflow,JingJunYin/tensorflow,theflofly/tensorflow,ychfan/tensorflow,dendisuhubdy/tensorflow,gunan/tensorflow,ville-k/tensorflow,xzturn/tensorflow,jalexvig/tensorflow,Moriadry/tensorflow,dongjoon-hyun/tensorflow,mixturemodel-flow/tensorflow,jhaux/tensorflow,theflofly/tensorflow,AnishShah/tensorflow,llhe/tensorflow,jendap/tensorflow,aldian/tensorflow,asimshankar/tensorflow,aam-at/tensorflow,bowang/tensorflow,adamtiger/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,alsrgv/tensorflow,JVillella/tensorflow,ppwwyyxx/tensorflow,adit-chandra/tensorflow,annarev/tensorflow,nburn42/tensorflow,davidzchen/tensorflow,rabipanda/tensorflow,benoitsteiner/tensorflow,drpngx/tensorflow,nolanliou/tensorflow,nolanliou/tensorflow,meteorcloudy/tensorflow,sjperkins/tensorflow,ran5515/DeepDecision,laszlocsomor/tensorflow,freedomtan/tensorflow,renyi533/tensorflow,freedomtan/tensorflow,ZhangXinNan/tensorflow,zasdfgbnm/tensorflow,frreiss/tensorflow-fred,karllessard/tensorflow,DavidNorman/tensorflow,a-doumoulakis/tensorflow,brchiu/tensorflow,jalexvig/tensorflow,with-git/tensorflow,Mazecreator/tensorflow,hsaputra/tensorflow,gunan/tensorflow,codrut3/tensorflow,yufengg/tensorflow,karllessard/tensorflow,nightjean/Deep-Learning,girving/tensorflow,horance-liu/tensorflow,asimshankar/tensorflow,snnn/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,AnishShah/tensorflow,sarvex/tensorflow,guschmue/tensorflow,jalexvig/tensorflow,av8ramit/tensorflow,benoitsteiner/tensorflow,xzturn/tensorflow,raymondxyang/tensorflow,yongtang/tensorflow,asimshankar/tensorflow,llhe/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,benoitsteiner/tensorflow-xsmm,hsaputra/tensorflow,aselle/tensorflow,eaplatanios/tensorflow,jendap/tensorflow,Xeralux/tensorflow,ghchinoy/tensorflow,aselle/tensorflow,xodus7/tensorflow,mdrumond/tensorflow,Mazecreator/tensorflow,manipopopo/tensorflow,tensorflow/tensorflow,seanli9jan/tensorflow,gunan/tensorflow,Bulochkin/tensorflow_pack,ppwwyyxx/tensorflow,karllessard/tensorflow,gojira/tensorflow,JVillella/tensorflow,jhaux/tensorflow,alivecor/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,adit-chandra/tensorflow,davidzchen/tensorflow,horance-liu/tensorflow,Bismarrck/tensorflow,hfp/tensorflow-xsmm,alistairlow/tensorflow,caisq/tensorflow,AnishShah/tensorflow,Bulochkin/tensorflow_pack,kobejean/tensorflow,eadgarchen/tensorflow,allenlavoie/tensorflow,dancingdan/tensorflow,hsaputra/tensorflow,ghchinoy/tensorflow,with-git/tensorflow,jostep/tensorflow,seanli9jan/tensorflow,manazhao/tf_recsys,yanchen036/tensorflow,hfp/tensorflow-xsmm,yongtang/tensorflow,Moriadry/tensorflow,a-doumoulakis/tensorflow,petewarden/tensorflow,eaplatanios/tensorflow,JingJunYin/tensorflow,zasdfgbnm/tensorflow,with-git/tensorflow,girving/tensorflow,davidzchen/tensorflow,benoitsteiner/tensorflow-xsmm,jalexvig/tensorflow,brchiu/tensorflow,girving/tensorflow,hfp/tensorflow-xsmm,paolodedios/tensorflow,brchiu/tensorflow,renyi533/tensorflow,ppwwyyxx/tensorflow,drpngx/tensorflow,Bismarrck/tensorflow,zycdragonball/tensorflow,annarev/tensorflow,apark263/tensorflow,gunan/tensorflow,petewarden/tensorflow,adamtiger/tensorflow,eadgarchen/tensorflow,nolanliou/tensorflow,cxxgtxy/tensorflow,petewarden/tensorflow,ishay2b/tensorflow,asimshankar/tensorflow,caisq/tensorflow,adit-chandra/tensorflow,dyoung418/tensorflow,alistairlow/tensorflow,tornadozou/tensorflow,dongjoon-hyun/tensorflow,Xeralux/tensorflow,xodus7/tensorflow,tensorflow/tensorflow,maciekcc/tensorflow,paolodedios/tensorflow,hsaputra/tensorflow,drpngx/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,kobejean/tensorflow,av8ramit/tensorflow,hsaputra/tensorflow,ZhangXinNan/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,jhaux/tensorflow,jhaux/tensorflow,Bulochkin/tensorflow_pack,jart/tensorflow,drpngx/tensorflow,dancingdan/tensorflow,DavidNorman/tensorflow,renyi533/tensorflow,apark263/tensorflow,raymondxyang/tensorflow,annarev/tensorflow,gojira/tensorflow,xzturn/tensorflow,jart/tensorflow,jendap/tensorflow,ran5515/DeepDecision,benoitsteiner/tensorflow-opencl,tensorflow/tensorflow-pywrap_saved_model,ArtsiomCh/tensorflow,tornadozou/tensorflow,codrut3/tensorflow,mdrumond/tensorflow,horance-liu/tensorflow,dendisuhubdy/tensorflow,adit-chandra/tensorflow,horance-liu/tensorflow,davidzchen/tensorflow,ppwwyyxx/tensorflow,alsrgv/tensorflow,kobejean/tensorflow,andrewcmyers/tensorflow,benoitsteiner/tensorflow-opencl,girving/tensorflow,codrut3/tensorflow,jwlawson/tensorflow,frreiss/tensorflow-fred,jalexvig/tensorflow,dancingdan/tensorflow,alshedivat/tensorflow,sjperkins/tensorflow,jbedorf/tensorflow,AnishShah/tensorflow,llhe/tensorflow,sarvex/tensorflow,yongtang/tensorflow,bowang/tensorflow,nburn42/tensorflow,llhe/tensorflow,bowang/tensorflow,eadgarchen/tensorflow,guschmue/tensorflow,caisq/tensorflow,ychfan/tensorflow,eaplatanios/tensorflow,yanchen036/tensorflow,chemelnucfin/tensorflow,aldian/tensorflow,tiagofrepereira2012/tensorflow,adit-chandra/tensorflow,Intel-tensorflow/tensorflow,ville-k/tensorflow,benoitsteiner/tensorflow-xsmm,Bulochkin/tensorflow_pack,alsrgv/tensorflow,brchiu/tensorflow,apark263/tensorflow,nburn42/tensorflow,guschmue/tensorflow,kobejean/tensorflow,renyi533/tensorflow,laszlocsomor/tensorflow,horance-liu/tensorflow,meteorcloudy/tensorflow,nightjean/Deep-Learning,a-doumoulakis/tensorflow,benoitsteiner/tensorflow,zasdfgbnm/tensorflow,tornadozou/tensorflow,rabipanda/tensorflow,snnn/tensorflow,unsiloai/syntaxnet-ops-hack,tiagofrepereira2012/tensorflow,karllessard/tensorflow,dancingdan/tensorflow,benoitsteiner/tensorflow-xsmm,apark263/tensorflow,hfp/tensorflow-xsmm,hfp/tensorflow-xsmm,jwlawson/tensorflow,alsrgv/tensorflow,aam-at/tensorflow,ghchinoy/tensorflow,zycdragonball/tensorflow,av8ramit/tensorflow,adamtiger/tensorflow,Xeralux/tensorflow,tiagofrepereira2012/tensorflow,jart/tensorflow,unsiloai/syntaxnet-ops-hack,karllessard/tensorflow,gautam1858/tensorflow,alistairlow/tensorflow,theflofly/tensorflow,nburn42/tensorflow,asimshankar/tensorflow,gunan/tensorflow,ppwwyyxx/tensorflow,ppwwyyxx/tensorflow,tensorflow/tensorflow,eaplatanios/tensorflow,seanli9jan/tensorflow,dancingdan/tensorflow,dendisuhubdy/tensorflow,aselle/tensorflow,ville-k/tensorflow,theflofly/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,jendap/tensorflow,eadgarchen/tensorflow,unsiloai/syntaxnet-ops-hack,Xeralux/tensorflow,girving/tensorflow,jbedorf/tensorflow,benoitsteiner/tensorflow-xsmm,ppwwyyxx/tensorflow,girving/tensorflow,jbedorf/tensorflow,tiagofrepereira2012/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,zasdfgbnm/tensorflow,alsrgv/tensorflow,Intel-Corporation/tensorflow,ZhangXinNan/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,drpngx/tensorflow,frreiss/tensorflow-fred,ArtsiomCh/tensorflow,davidzchen/tensorflow,ran5515/DeepDecision,dancingdan/tensorflow,bowang/tensorflow,brchiu/tensorflow,chemelnucfin/tensorflow,dendisuhubdy/tensorflow,frreiss/tensorflow-fred,kobejean/tensorflow,ran5515/DeepDecision,hehongliang/tensorflow,bowang/tensorflow,jart/tensorflow,manipopopo/tensorflow,frreiss/tensorflow-fred,snnn/tensorflow,codrut3/tensorflow,tensorflow/tensorflow,manazhao/tf_recsys,jendap/tensorflow,Intel-tensorflow/tensorflow,Bismarrck/tensorflow,ishay2b/tensorflow,ageron/tensorflow,benoitsteiner/tensorflow-xsmm,pavelchristof/gomoku-ai,xodus7/tensorflow,davidzchen/tensorflow,JingJunYin/tensorflow,allenlavoie/tensorflow,benoitsteiner/tensorflow-opencl,jhseu/tensorflow,raymondxyang/tensorflow,llhe/tensorflow,arborh/tensorflow,frreiss/tensorflow-fred,drpngx/tensorflow,seanli9jan/tensorflow,manipopopo/tensorflow,gautam1858/tensorflow,with-git/tensorflow,karllessard/tensorflow,pavelchristof/gomoku-ai,caisq/tensorflow,Bulochkin/tensorflow_pack,eaplatanios/tensorflow,tillahoffmann/tensorflow,ghchinoy/tensorflow,JingJunYin/tensorflow,Mazecreator/tensorflow,manipopopo/tensorflow,Intel-tensorflow/tensorflow,renyi533/tensorflow,Mistobaan/tensorflow,adit-chandra/tensorflow,Intel-Corporation/tensorflow,annarev/tensorflow,Kongsea/tensorflow,hehongliang/tensorflow,jendap/tensorflow,xzturn/tensorflow,adit-chandra/tensorflow,drpngx/tensorflow,girving/tensorflow,jart/tensorflow,manazhao/tf_recsys,gojira/tensorflow,laszlocsomor/tensorflow,snnn/tensorflow,alshedivat/tensorflow,JingJunYin/tensorflow,kevin-coder/tensorflow-fork,Mistobaan/tensorflow,davidzchen/tensorflow,freedomtan/tensorflow,cxxgtxy/tensorflow,chemelnucfin/tensorflow,alshedivat/tensorflow,yanchen036/tensorflow,aam-at/tensorflow,sarvex/tensorflow,pavelchristof/gomoku-ai,petewarden/tensorflow,guschmue/tensorflow,asimshankar/tensorflow,andrewcmyers/tensorflow,arborh/tensorflow,alshedivat/tensorflow,xodus7/tensorflow,Kongsea/tensorflow,raymondxyang/tensorflow,jendap/tensorflow,arborh/tensorflow,benoitsteiner/tensorflow,zasdfgbnm/tensorflow,hsaputra/tensorflow,jhaux/tensorflow,adit-chandra/tensorflow,hehongliang/tensorflow,yanchen036/tensorflow,annarev/tensorflow,zasdfgbnm/tensorflow,benoitsteiner/tensorflow-opencl,Mistobaan/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Kongsea/tensorflow,ppwwyyxx/tensorflow,alsrgv/tensorflow,jhseu/tensorflow,gunan/tensorflow,horance-liu/tensorflow,Mistobaan/tensorflow,renyi533/tensorflow,rabipanda/tensorflow,sarvex/tensorflow,renyi533/tensorflow,eaplatanios/tensorflow,lukeiwanski/tensorflow,apark263/tensorflow,ravindrapanda/tensorflow,snnn/tensorflow,yongtang/tensorflow,petewarden/tensorflow,manipopopo/tensorflow,ychfan/tensorflow,tillahoffmann/tensorflow,davidzchen/tensorflow,gunan/tensorflow,arborh/tensorflow,manipopopo/tensorflow,rabipanda/tensorflow,ychfan/tensorflow,jwlawson/tensorflow,sjperkins/tensorflow,ychfan/tensorflow,horance-liu/tensorflow,sarvex/tensorflow,lukeiwanski/tensorflow,xzturn/tensorflow,cxxgtxy/tensorflow,ville-k/tensorflow,girving/tensorflow,chemelnucfin/tensorflow,arborh/tensorflow,Bulochkin/tensorflow_pack,JVillella/tensorflow,raymondxyang/tensorflow,lakshayg/tensorflow,rabipanda/tensorflow,Intel-tensorflow/tensorflow,tillahoffmann/tensorflow,jhseu/tensorflow,ageron/tensorflow,nburn42/tensorflow,alshedivat/tensorflow,brchiu/tensorflow,dongjoon-hyun/tensorflow,kobejean/tensorflow,mixturemodel-flow/tensorflow,AnishShah/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,AnishShah/tensorflow,arborh/tensorflow,jalexvig/tensorflow,caisq/tensorflow,jbedorf/tensorflow,lakshayg/tensorflow,alshedivat/tensorflow,AnishShah/tensorflow,aselle/tensorflow,annarev/tensorflow,maciekcc/tensorflow,lakshayg/tensorflow,ageron/tensorflow,sjperkins/tensorflow,apark263/tensorflow,tiagofrepereira2012/tensorflow,rabipanda/tensorflow,tillahoffmann/tensorflow,drpngx/tensorflow,mixturemodel-flow/tensorflow,tensorflow/tensorflow,ville-k/tensorflow,gojira/tensorflow,sjperkins/tensorflow,tillahoffmann/tensorflow,maciekcc/tensorflow,Mistobaan/tensorflow,Moriadry/tensorflow,freedomtan/tensorflow,Intel-Corporation/tensorflow,xodus7/tensorflow,Intel-tensorflow/tensorflow,aam-at/tensorflow,nolanliou/tensorflow,meteorcloudy/tensorflow,jalexvig/tensorflow,ravindrapanda/tensorflow,rabipanda/tensorflow,sjperkins/tensorflow,ageron/tensorflow,benoitsteiner/tensorflow-xsmm,allenlavoie/tensorflow,allenlavoie/tensorflow,xodus7/tensorflow,ageron/tensorflow,mixturemodel-flow/tensorflow,nolanliou/tensorflow,DavidNorman/tensorflow,caisq/tensorflow,lakshayg/tensorflow,tillahoffmann/tensorflow,nightjean/Deep-Learning,nolanliou/tensorflow,tensorflow/tensorflow,jhaux/tensorflow,jhaux/tensorflow,ageron/tensorflow,zycdragonball/tensorflow,dancingdan/tensorflow,Mazecreator/tensorflow,cxxgtxy/tensorflow,mdrumond/tensorflow,dyoung418/tensorflow,av8ramit/tensorflow,kevin-coder/tensorflow-fork,Mistobaan/tensorflow,dyoung418/tensorflow,tornadozou/tensorflow,tillahoffmann/tensorflow,petewarden/tensorflow,adamtiger/tensorflow,alshedivat/tensorflow,zycdragonball/tensorflow,ZhangXinNan/tensorflow,gojira/tensorflow,guschmue/tensorflow,nightjean/Deep-Learning,aam-at/tensorflow,suiyuan2009/tensorflow,benoitsteiner/tensorflow,ghchinoy/tensorflow,sarvex/tensorflow,llhe/tensorflow,jart/tensorflow,frreiss/tensorflow-fred,freedomtan/tensorflow,brchiu/tensorflow,kobejean/tensorflow,dendisuhubdy/tensorflow,yongtang/tensorflow,lukeiwanski/tensorflow,guschmue/tensorflow,ArtsiomCh/tensorflow,ravindrapanda/tensorflow,jhseu/tensorflow,arborh/tensorflow,llhe/tensorflow,alistairlow/tensorflow,yanchen036/tensorflow,ppwwyyxx/tensorflow,jendap/tensorflow,dendisuhubdy/tensorflow,xodus7/tensorflow,dyoung418/tensorflow,xzturn/tensorflow,ravindrapanda/tensorflow,kevin-coder/tensorflow-fork,eadgarchen/tensorflow,ran5515/DeepDecision,arborh/tensorflow,yongtang/tensorflow,brchiu/tensorflow,manipopopo/tensorflow,ravindrapanda/tensorflow,alsrgv/tensorflow,mavenlin/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,aam-at/tensorflow,theflofly/tensorflow,Bismarrck/tensorflow,chemelnucfin/tensorflow,meteorcloudy/tensorflow,maciekcc/tensorflow,ran5515/DeepDecision,jwlawson/tensorflow,xzturn/tensorflow,sjperkins/tensorflow,guschmue/tensorflow,aldian/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,with-git/tensorflow,yongtang/tensorflow,suiyuan2009/tensorflow,Intel-tensorflow/tensorflow,bowang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,pavelchristof/gomoku-ai,Intel-Corporation/tensorflow,apark263/tensorflow,lukeiwanski/tensorflow,gautam1858/tensorflow,horance-liu/tensorflow,ghchinoy/tensorflow,mavenlin/tensorflow,jhseu/tensorflow,bowang/tensorflow,ghchinoy/tensorflow,aselle/tensorflow,ravindrapanda/tensorflow,adit-chandra/tensorflow,nburn42/tensorflow,gautam1858/tensorflow,ppwwyyxx/tensorflow,dendisuhubdy/tensorflow,petewarden/tensorflow,tensorflow/tensorflow,xodus7/tensorflow,raymondxyang/tensorflow,tornadozou/tensorflow,brchiu/tensorflow,llhe/tensorflow,kevin-coder/tensorflow-fork,jwlawson/tensorflow,jbedorf/tensorflow,jhaux/tensorflow,xodus7/tensorflow,cxxgtxy/tensorflow,Kongsea/tensorflow,theflofly/tensorflow,eaplatanios/tensorflow,DavidNorman/tensorflow,mixturemodel-flow/tensorflow,zasdfgbnm/tensorflow,annarev/tensorflow,drpngx/tensorflow,DavidNorman/tensorflow,aselle/tensorflow,aldian/tensorflow,tiagofrepereira2012/tensorflow,aam-at/tensorflow,aselle/tensorflow,sarvex/tensorflow,tillahoffmann/tensorflow,seanli9jan/tensorflow,lakshayg/tensorflow,manazhao/tf_recsys,yufengg/tensorflow,annarev/tensorflow,frreiss/tensorflow-fred,gautam1858/tensorflow,eaplatanios/tensorflow,theflofly/tensorflow,alshedivat/tensorflow,eaplatanios/tensorflow,xodus7/tensorflow,jendap/tensorflow,dancingdan/tensorflow,dancingdan/tensorflow,chemelnucfin/tensorflow,nolanliou/tensorflow,alistairlow/tensorflow,kobejean/tensorflow,annarev/tensorflow,unsiloai/syntaxnet-ops-hack,unsiloai/syntaxnet-ops-hack,arborh/tensorflow,lakshayg/tensorflow,meteorcloudy/tensorflow,allenlavoie/tensorflow,petewarden/tensorflow,zycdragonball/tensorflow,ArtsiomCh/tensorflow,jbedorf/tensorflow,yufengg/tensorflow,with-git/tensorflow,DavidNorman/tensorflow,tornadozou/tensorflow,jendap/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,benoitsteiner/tensorflow-opencl,laszlocsomor/tensorflow,manipopopo/tensorflow,caisq/tensorflow,Bismarrck/tensorflow,suiyuan2009/tensorflow,nburn42/tensorflow,JVillella/tensorflow,ArtsiomCh/tensorflow,mdrumond/tensorflow,zasdfgbnm/tensorflow,jbedorf/tensorflow,andrewcmyers/tensorflow,aldian/tensorflow,hehongliang/tensorflow,paolodedios/tensorflow,Moriadry/tensorflow,JingJunYin/tensorflow,Mazecreator/tensorflow,karllessard/tensorflow,paolodedios/tensorflow,aam-at/tensorflow,jart/tensorflow,yanchen036/tensorflow,suiyuan2009/tensorflow,meteorcloudy/tensorflow,aam-at/tensorflow,yufengg/tensorflow,mixturemodel-flow/tensorflow,apark263/tensorflow,alistairlow/tensorflow,alivecor/tensorflow,Xeralux/tensorflow,mixturemodel-flow/tensorflow,meteorcloudy/tensorflow,paolodedios/tensorflow,dancingdan/tensorflow,Mazecreator/tensorflow,frreiss/tensorflow-fred,paolodedios/tensorflow,jalexvig/tensorflow,allenlavoie/tensorflow,ArtsiomCh/tensorflow,seanli9jan/tensorflow,asimshankar/tensorflow,gautam1858/tensorflow,jostep/tensorflow,gautam1858/tensorflow,JingJunYin/tensorflow,ppwwyyxx/tensorflow,ZhangXinNan/tensorflow,petewarden/tensorflow,with-git/tensorflow,alsrgv/tensorflow,AnishShah/tensorflow,andrewcmyers/tensorflow,lukeiwanski/tensorflow,AnishShah/tensorflow,ZhangXinNan/tensorflow,adamtiger/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,adit-chandra/tensorflow,adit-chandra/tensorflow,mavenlin/tensorflow,lakshayg/tensorflow,ageron/tensorflow,a-doumoulakis/tensorflow,dongjoon-hyun/tensorflow,snnn/tensorflow,Bulochkin/tensorflow_pack,ZhangXinNan/tensorflow,jhseu/tensorflow,AnishShah/tensorflow,horance-liu/tensorflow,tensorflow/tensorflow,yanchen036/tensorflow,sjperkins/tensorflow,dyoung418/tensorflow,ppwwyyxx/tensorflow,xzturn/tensorflow,dongjoon-hyun/tensorflow,jhaux/tensorflow,aldian/tensorflow,Bismarrck/tensorflow,pavelchristof/gomoku-ai,girving/tensorflow,nburn42/tensorflow,Bismarrck/tensorflow,xodus7/tensorflow,mdrumond/tensorflow,paolodedios/tensorflow,xzturn/tensorflow,ran5515/DeepDecision,alistairlow/tensorflow,drpngx/tensorflow,cxxgtxy/tensorflow,snnn/tensorflow,apark263/tensorflow,kobejean/tensorflow,jwlawson/tensorflow,maciekcc/tensorflow,aselle/tensorflow,andrewcmyers/tensorflow,ghchinoy/tensorflow,Mistobaan/tensorflow,benoitsteiner/tensorflow-opencl,chemelnucfin/tensorflow,lukeiwanski/tensorflow,allenlavoie/tensorflow,annarev/tensorflow,Kongsea/tensorflow,adamtiger/tensorflow,ageron/tensorflow,jhseu/tensorflow,andrewcmyers/tensorflow,arborh/tensorflow,Moriadry/tensorflow,dongjoon-hyun/tensorflow,jwlawson/tensorflow,mdrumond/tensorflow,mavenlin/tensorflow,petewarden/tensorflow,aam-at/tensorflow,with-git/tensorflow,eadgarchen/tensorflow,Xeralux/tensorflow,Intel-Corporation/tensorflow,petewarden/tensorflow,jart/tensorflow,codrut3/tensorflow,seanli9jan/tensorflow,hfp/tensorflow-xsmm,pavelchristof/gomoku-ai,ghchinoy/tensorflow,jalexvig/tensorflow,av8ramit/tensorflow,eadgarchen/tensorflow,zasdfgbnm/tensorflow,andrewcmyers/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gojira/tensorflow,benoitsteiner/tensorflow-xsmm,snnn/tensorflow,yongtang/tensorflow,nolanliou/tensorflow,lukeiwanski/tensorflow,chemelnucfin/tensorflow,allenlavoie/tensorflow,Bulochkin/tensorflow_pack,codrut3/tensorflow,Intel-tensorflow/tensorflow,dancingdan/tensorflow,nightjean/Deep-Learning,Kongsea/tensorflow,sjperkins/tensorflow,mavenlin/tensorflow,sarvex/tensorflow,aselle/tensorflow,guschmue/tensorflow,suiyuan2009/tensorflow,pavelchristof/gomoku-ai,Bulochkin/tensorflow_pack,ageron/tensorflow,eaplatanios/tensorflow,lukeiwanski/tensorflow,freedomtan/tensorflow,jwlawson/tensorflow,theflofly/tensorflow,gojira/tensorflow,ravindrapanda/tensorflow,laszlocsomor/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,adamtiger/tensorflow,unsiloai/syntaxnet-ops-hack,manipopopo/tensorflow,caisq/tensorflow,benoitsteiner/tensorflow,benoitsteiner/tensorflow-opencl,laszlocsomor/tensorflow,rabipanda/tensorflow,aselle/tensorflow,Moriadry/tensorflow,laszlocsomor/tensorflow,annarev/tensorflow,codrut3/tensorflow,theflofly/tensorflow,ZhangXinNan/tensorflow,JingJunYin/tensorflow,allenlavoie/tensorflow,asimshankar/tensorflow,theflofly/tensorflow,Mazecreator/tensorflow,hehongliang/tensorflow,kevin-coder/tensorflow-fork,guschmue/tensorflow,alshedivat/tensorflow,Mistobaan/tensorflow,nburn42/tensorflow,aam-at/tensorflow,jostep/tensorflow,ZhangXinNan/tensorflow,Moriadry/tensorflow,suiyuan2009/tensorflow,ageron/tensorflow,benoitsteiner/tensorflow-opencl,chemelnucfin/tensorflow,dongjoon-hyun/tensorflow,nightjean/Deep-Learning,manazhao/tf_recsys,tornadozou/tensorflow,ghchinoy/tensorflow,tensorflow/tensorflow-pywrap_saved_model,alshedivat/tensorflow,jostep/tensorflow,jostep/tensorflow,JingJunYin/tensorflow,JVillella/tensorflow,laszlocsomor/tensorflow,chemelnucfin/tensorflow,av8ramit/tensorflow,Bulochkin/tensorflow_pack,av8ramit/tensorflow,seanli9jan/tensorflow,paolodedios/tensorflow,a-doumoulakis/tensorflow,jbedorf/tensorflow,gunan/tensorflow,tiagofrepereira2012/tensorflow,mixturemodel-flow/tensorflow,ychfan/tensorflow,dyoung418/tensorflow,caisq/tensorflow,ishay2b/tensorflow,alivecor/tensorflow,renyi533/tensorflow,yufengg/tensorflow,karllessard/tensorflow,benoitsteiner/tensorflow-xsmm,codrut3/tensorflow,mdrumond/tensorflow,jendap/tensorflow,freedomtan/tensorflow,mdrumond/tensorflow,benoitsteiner/tensorflow,hsaputra/tensorflow,Kongsea/tensorflow,ZhangXinNan/tensorflow,jostep/tensorflow,hfp/tensorflow-xsmm,alsrgv/tensorflow,av8ramit/tensorflow,zycdragonball/tensorflow,xzturn/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,JVillella/tensorflow,suiyuan2009/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,av8ramit/tensorflow,aam-at/tensorflow,gautam1858/tensorflow,eadgarchen/tensorflow,hfp/tensorflow-xsmm,jbedorf/tensorflow,adit-chandra/tensorflow
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import os
from IPython.lib import passwd
c.NotebookApp.ip = '*'
c.NotebookApp.port = int(os.getenv('PORT', 8888))
c.NotebookApp.open_browser = False
c.MultiKernelManager.default_kernel_name = 'python2'
# sets a password if PASSWORD is set in the environment
if 'PASSWORD' in os.environ:
c.NotebookApp.password = passwd(os.environ['PASSWORD'])
del os.environ['PASSWORD']
Allow disabling password and token auth on jupyter notebooks
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import os
from IPython.lib import passwd
c.NotebookApp.ip = '*'
c.NotebookApp.port = int(os.getenv('PORT', 8888))
c.NotebookApp.open_browser = False
c.MultiKernelManager.default_kernel_name = 'python2'
# sets a password if PASSWORD is set in the environment
if 'PASSWORD' in os.environ:
password = os.environ['PASSWORD']
if password:
c.NotebookApp.password = passwd(password)
else:
c.NotebookApp.password = ''
c.NotebookApp.token = ''
del os.environ['PASSWORD']
|
<commit_before># Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import os
from IPython.lib import passwd
c.NotebookApp.ip = '*'
c.NotebookApp.port = int(os.getenv('PORT', 8888))
c.NotebookApp.open_browser = False
c.MultiKernelManager.default_kernel_name = 'python2'
# sets a password if PASSWORD is set in the environment
if 'PASSWORD' in os.environ:
c.NotebookApp.password = passwd(os.environ['PASSWORD'])
del os.environ['PASSWORD']
<commit_msg>Allow disabling password and token auth on jupyter notebooks<commit_after>
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import os
from IPython.lib import passwd
c.NotebookApp.ip = '*'
c.NotebookApp.port = int(os.getenv('PORT', 8888))
c.NotebookApp.open_browser = False
c.MultiKernelManager.default_kernel_name = 'python2'
# sets a password if PASSWORD is set in the environment
if 'PASSWORD' in os.environ:
password = os.environ['PASSWORD']
if password:
c.NotebookApp.password = passwd(password)
else:
c.NotebookApp.password = ''
c.NotebookApp.token = ''
del os.environ['PASSWORD']
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import os
from IPython.lib import passwd
c.NotebookApp.ip = '*'
c.NotebookApp.port = int(os.getenv('PORT', 8888))
c.NotebookApp.open_browser = False
c.MultiKernelManager.default_kernel_name = 'python2'
# sets a password if PASSWORD is set in the environment
if 'PASSWORD' in os.environ:
c.NotebookApp.password = passwd(os.environ['PASSWORD'])
del os.environ['PASSWORD']
Allow disabling password and token auth on jupyter notebooks# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import os
from IPython.lib import passwd
c.NotebookApp.ip = '*'
c.NotebookApp.port = int(os.getenv('PORT', 8888))
c.NotebookApp.open_browser = False
c.MultiKernelManager.default_kernel_name = 'python2'
# sets a password if PASSWORD is set in the environment
if 'PASSWORD' in os.environ:
password = os.environ['PASSWORD']
if password:
c.NotebookApp.password = passwd(password)
else:
c.NotebookApp.password = ''
c.NotebookApp.token = ''
del os.environ['PASSWORD']
|
<commit_before># Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import os
from IPython.lib import passwd
c.NotebookApp.ip = '*'
c.NotebookApp.port = int(os.getenv('PORT', 8888))
c.NotebookApp.open_browser = False
c.MultiKernelManager.default_kernel_name = 'python2'
# sets a password if PASSWORD is set in the environment
if 'PASSWORD' in os.environ:
c.NotebookApp.password = passwd(os.environ['PASSWORD'])
del os.environ['PASSWORD']
<commit_msg>Allow disabling password and token auth on jupyter notebooks<commit_after># Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import os
from IPython.lib import passwd
c.NotebookApp.ip = '*'
c.NotebookApp.port = int(os.getenv('PORT', 8888))
c.NotebookApp.open_browser = False
c.MultiKernelManager.default_kernel_name = 'python2'
# sets a password if PASSWORD is set in the environment
if 'PASSWORD' in os.environ:
password = os.environ['PASSWORD']
if password:
c.NotebookApp.password = passwd(password)
else:
c.NotebookApp.password = ''
c.NotebookApp.token = ''
del os.environ['PASSWORD']
|
99a31b3396d88629a5f866ebad0122f111d75778
|
Lib/test/leakers/test_gestalt.py
|
Lib/test/leakers/test_gestalt.py
|
import sys
if sys.platform != 'darwin':
raise ValueError, "This test only leaks on Mac OS X'
def leak():
# taken from platform._mac_ver_lookup()
from gestalt import gestalt
import MacOS
try:
gestalt('sysu')
except MacOS.Error:
pass
|
import sys
if sys.platform != 'darwin':
raise ValueError, "This test only leaks on Mac OS X"
def leak():
# taken from platform._mac_ver_lookup()
from gestalt import gestalt
import MacOS
try:
gestalt('sysu')
except MacOS.Error:
pass
|
Fix mismatch opening and closing quotes on a string.
|
Fix mismatch opening and closing quotes on a string.
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
import sys
if sys.platform != 'darwin':
raise ValueError, "This test only leaks on Mac OS X'
def leak():
# taken from platform._mac_ver_lookup()
from gestalt import gestalt
import MacOS
try:
gestalt('sysu')
except MacOS.Error:
pass
Fix mismatch opening and closing quotes on a string.
|
import sys
if sys.platform != 'darwin':
raise ValueError, "This test only leaks on Mac OS X"
def leak():
# taken from platform._mac_ver_lookup()
from gestalt import gestalt
import MacOS
try:
gestalt('sysu')
except MacOS.Error:
pass
|
<commit_before>
import sys
if sys.platform != 'darwin':
raise ValueError, "This test only leaks on Mac OS X'
def leak():
# taken from platform._mac_ver_lookup()
from gestalt import gestalt
import MacOS
try:
gestalt('sysu')
except MacOS.Error:
pass
<commit_msg>Fix mismatch opening and closing quotes on a string.<commit_after>
|
import sys
if sys.platform != 'darwin':
raise ValueError, "This test only leaks on Mac OS X"
def leak():
# taken from platform._mac_ver_lookup()
from gestalt import gestalt
import MacOS
try:
gestalt('sysu')
except MacOS.Error:
pass
|
import sys
if sys.platform != 'darwin':
raise ValueError, "This test only leaks on Mac OS X'
def leak():
# taken from platform._mac_ver_lookup()
from gestalt import gestalt
import MacOS
try:
gestalt('sysu')
except MacOS.Error:
pass
Fix mismatch opening and closing quotes on a string.import sys
if sys.platform != 'darwin':
raise ValueError, "This test only leaks on Mac OS X"
def leak():
# taken from platform._mac_ver_lookup()
from gestalt import gestalt
import MacOS
try:
gestalt('sysu')
except MacOS.Error:
pass
|
<commit_before>
import sys
if sys.platform != 'darwin':
raise ValueError, "This test only leaks on Mac OS X'
def leak():
# taken from platform._mac_ver_lookup()
from gestalt import gestalt
import MacOS
try:
gestalt('sysu')
except MacOS.Error:
pass
<commit_msg>Fix mismatch opening and closing quotes on a string.<commit_after>import sys
if sys.platform != 'darwin':
raise ValueError, "This test only leaks on Mac OS X"
def leak():
# taken from platform._mac_ver_lookup()
from gestalt import gestalt
import MacOS
try:
gestalt('sysu')
except MacOS.Error:
pass
|
ee85d2fffc0e42022be66bf667005eb44391cb9e
|
django/similarities/utils.py
|
django/similarities/utils.py
|
import echonest
from artists.models import Artist
from echonest.models import SimilarResponse
from users.models import User
from .models import (GeneralArtist, UserSimilarity, Similarity,
update_similarities)
def add_new_similarities(artist, force_update=False):
similarities = []
responses = SimilarResponse.objects.filter(
normalized_name=artist.normalized_name)
if responses.exists() and not force_update:
return # Echo Nest similarities already added
user = User.objects.get(email='echonest')
artist_names = echonest.get_similar(artist.name)
cc_artists = Artist.objects.filter(name__in=artist_names)
for cc_artist in cc_artists:
kwargs = dict(
cc_artist=cc_artist,
other_artist=artist,
)
UserSimilarity.objects.get_or_create(defaults={'weight': 1},
user=user, **kwargs)
similarities.append(Similarity.objects.get_or_create(**kwargs)[0])
update_similarities(similarities)
def get_similar(name):
artist, _ = GeneralArtist.objects.get_or_create(
normalized_name=name.upper(), defaults={'name': name})
add_new_similarities(artist)
return Artist.objects.filter(similarity__other_artist=artist,
similarity__weight__gt=0)
|
from django.db.models import Q
import echonest
from artists.models import Artist
from echonest.models import SimilarResponse
from users.models import User
from .models import (GeneralArtist, UserSimilarity, Similarity,
update_similarities)
def add_new_similarities(artist, force_update=False):
similarities = []
responses = SimilarResponse.objects.filter(
normalized_name=artist.normalized_name)
if responses.exists() and not force_update:
return # Echo Nest similarities already added
user = User.objects.get(email='echonest')
artist_names = echonest.get_similar(artist.name)
cc_artists = Artist.objects.filter(name__in=artist_names)
for cc_artist in cc_artists:
kwargs = dict(
cc_artist=cc_artist,
other_artist=artist,
)
UserSimilarity.objects.get_or_create(defaults={'weight': 1},
user=user, **kwargs)
similarities.append(Similarity.objects.get_or_create(**kwargs)[0])
update_similarities(similarities)
def get_similar(name):
artist, _ = GeneralArtist.objects.get_or_create(
normalized_name=name.upper(), defaults={'name': name})
add_new_similarities(artist)
similar = Q(similarity__other_artist=artist, similarity__weight__gt=0)
return Artist.objects.filter(similar).order_by('-similarity__weight')
|
Order similar artist results properly
|
Order similar artist results properly
|
Python
|
bsd-3-clause
|
FreeMusicNinja/freemusic.ninja,FreeMusicNinja/freemusic.ninja
|
import echonest
from artists.models import Artist
from echonest.models import SimilarResponse
from users.models import User
from .models import (GeneralArtist, UserSimilarity, Similarity,
update_similarities)
def add_new_similarities(artist, force_update=False):
similarities = []
responses = SimilarResponse.objects.filter(
normalized_name=artist.normalized_name)
if responses.exists() and not force_update:
return # Echo Nest similarities already added
user = User.objects.get(email='echonest')
artist_names = echonest.get_similar(artist.name)
cc_artists = Artist.objects.filter(name__in=artist_names)
for cc_artist in cc_artists:
kwargs = dict(
cc_artist=cc_artist,
other_artist=artist,
)
UserSimilarity.objects.get_or_create(defaults={'weight': 1},
user=user, **kwargs)
similarities.append(Similarity.objects.get_or_create(**kwargs)[0])
update_similarities(similarities)
def get_similar(name):
artist, _ = GeneralArtist.objects.get_or_create(
normalized_name=name.upper(), defaults={'name': name})
add_new_similarities(artist)
return Artist.objects.filter(similarity__other_artist=artist,
similarity__weight__gt=0)
Order similar artist results properly
|
from django.db.models import Q
import echonest
from artists.models import Artist
from echonest.models import SimilarResponse
from users.models import User
from .models import (GeneralArtist, UserSimilarity, Similarity,
update_similarities)
def add_new_similarities(artist, force_update=False):
similarities = []
responses = SimilarResponse.objects.filter(
normalized_name=artist.normalized_name)
if responses.exists() and not force_update:
return # Echo Nest similarities already added
user = User.objects.get(email='echonest')
artist_names = echonest.get_similar(artist.name)
cc_artists = Artist.objects.filter(name__in=artist_names)
for cc_artist in cc_artists:
kwargs = dict(
cc_artist=cc_artist,
other_artist=artist,
)
UserSimilarity.objects.get_or_create(defaults={'weight': 1},
user=user, **kwargs)
similarities.append(Similarity.objects.get_or_create(**kwargs)[0])
update_similarities(similarities)
def get_similar(name):
artist, _ = GeneralArtist.objects.get_or_create(
normalized_name=name.upper(), defaults={'name': name})
add_new_similarities(artist)
similar = Q(similarity__other_artist=artist, similarity__weight__gt=0)
return Artist.objects.filter(similar).order_by('-similarity__weight')
|
<commit_before>import echonest
from artists.models import Artist
from echonest.models import SimilarResponse
from users.models import User
from .models import (GeneralArtist, UserSimilarity, Similarity,
update_similarities)
def add_new_similarities(artist, force_update=False):
similarities = []
responses = SimilarResponse.objects.filter(
normalized_name=artist.normalized_name)
if responses.exists() and not force_update:
return # Echo Nest similarities already added
user = User.objects.get(email='echonest')
artist_names = echonest.get_similar(artist.name)
cc_artists = Artist.objects.filter(name__in=artist_names)
for cc_artist in cc_artists:
kwargs = dict(
cc_artist=cc_artist,
other_artist=artist,
)
UserSimilarity.objects.get_or_create(defaults={'weight': 1},
user=user, **kwargs)
similarities.append(Similarity.objects.get_or_create(**kwargs)[0])
update_similarities(similarities)
def get_similar(name):
artist, _ = GeneralArtist.objects.get_or_create(
normalized_name=name.upper(), defaults={'name': name})
add_new_similarities(artist)
return Artist.objects.filter(similarity__other_artist=artist,
similarity__weight__gt=0)
<commit_msg>Order similar artist results properly<commit_after>
|
from django.db.models import Q
import echonest
from artists.models import Artist
from echonest.models import SimilarResponse
from users.models import User
from .models import (GeneralArtist, UserSimilarity, Similarity,
update_similarities)
def add_new_similarities(artist, force_update=False):
similarities = []
responses = SimilarResponse.objects.filter(
normalized_name=artist.normalized_name)
if responses.exists() and not force_update:
return # Echo Nest similarities already added
user = User.objects.get(email='echonest')
artist_names = echonest.get_similar(artist.name)
cc_artists = Artist.objects.filter(name__in=artist_names)
for cc_artist in cc_artists:
kwargs = dict(
cc_artist=cc_artist,
other_artist=artist,
)
UserSimilarity.objects.get_or_create(defaults={'weight': 1},
user=user, **kwargs)
similarities.append(Similarity.objects.get_or_create(**kwargs)[0])
update_similarities(similarities)
def get_similar(name):
artist, _ = GeneralArtist.objects.get_or_create(
normalized_name=name.upper(), defaults={'name': name})
add_new_similarities(artist)
similar = Q(similarity__other_artist=artist, similarity__weight__gt=0)
return Artist.objects.filter(similar).order_by('-similarity__weight')
|
import echonest
from artists.models import Artist
from echonest.models import SimilarResponse
from users.models import User
from .models import (GeneralArtist, UserSimilarity, Similarity,
update_similarities)
def add_new_similarities(artist, force_update=False):
similarities = []
responses = SimilarResponse.objects.filter(
normalized_name=artist.normalized_name)
if responses.exists() and not force_update:
return # Echo Nest similarities already added
user = User.objects.get(email='echonest')
artist_names = echonest.get_similar(artist.name)
cc_artists = Artist.objects.filter(name__in=artist_names)
for cc_artist in cc_artists:
kwargs = dict(
cc_artist=cc_artist,
other_artist=artist,
)
UserSimilarity.objects.get_or_create(defaults={'weight': 1},
user=user, **kwargs)
similarities.append(Similarity.objects.get_or_create(**kwargs)[0])
update_similarities(similarities)
def get_similar(name):
artist, _ = GeneralArtist.objects.get_or_create(
normalized_name=name.upper(), defaults={'name': name})
add_new_similarities(artist)
return Artist.objects.filter(similarity__other_artist=artist,
similarity__weight__gt=0)
Order similar artist results properlyfrom django.db.models import Q
import echonest
from artists.models import Artist
from echonest.models import SimilarResponse
from users.models import User
from .models import (GeneralArtist, UserSimilarity, Similarity,
update_similarities)
def add_new_similarities(artist, force_update=False):
similarities = []
responses = SimilarResponse.objects.filter(
normalized_name=artist.normalized_name)
if responses.exists() and not force_update:
return # Echo Nest similarities already added
user = User.objects.get(email='echonest')
artist_names = echonest.get_similar(artist.name)
cc_artists = Artist.objects.filter(name__in=artist_names)
for cc_artist in cc_artists:
kwargs = dict(
cc_artist=cc_artist,
other_artist=artist,
)
UserSimilarity.objects.get_or_create(defaults={'weight': 1},
user=user, **kwargs)
similarities.append(Similarity.objects.get_or_create(**kwargs)[0])
update_similarities(similarities)
def get_similar(name):
artist, _ = GeneralArtist.objects.get_or_create(
normalized_name=name.upper(), defaults={'name': name})
add_new_similarities(artist)
similar = Q(similarity__other_artist=artist, similarity__weight__gt=0)
return Artist.objects.filter(similar).order_by('-similarity__weight')
|
<commit_before>import echonest
from artists.models import Artist
from echonest.models import SimilarResponse
from users.models import User
from .models import (GeneralArtist, UserSimilarity, Similarity,
update_similarities)
def add_new_similarities(artist, force_update=False):
similarities = []
responses = SimilarResponse.objects.filter(
normalized_name=artist.normalized_name)
if responses.exists() and not force_update:
return # Echo Nest similarities already added
user = User.objects.get(email='echonest')
artist_names = echonest.get_similar(artist.name)
cc_artists = Artist.objects.filter(name__in=artist_names)
for cc_artist in cc_artists:
kwargs = dict(
cc_artist=cc_artist,
other_artist=artist,
)
UserSimilarity.objects.get_or_create(defaults={'weight': 1},
user=user, **kwargs)
similarities.append(Similarity.objects.get_or_create(**kwargs)[0])
update_similarities(similarities)
def get_similar(name):
artist, _ = GeneralArtist.objects.get_or_create(
normalized_name=name.upper(), defaults={'name': name})
add_new_similarities(artist)
return Artist.objects.filter(similarity__other_artist=artist,
similarity__weight__gt=0)
<commit_msg>Order similar artist results properly<commit_after>from django.db.models import Q
import echonest
from artists.models import Artist
from echonest.models import SimilarResponse
from users.models import User
from .models import (GeneralArtist, UserSimilarity, Similarity,
update_similarities)
def add_new_similarities(artist, force_update=False):
similarities = []
responses = SimilarResponse.objects.filter(
normalized_name=artist.normalized_name)
if responses.exists() and not force_update:
return # Echo Nest similarities already added
user = User.objects.get(email='echonest')
artist_names = echonest.get_similar(artist.name)
cc_artists = Artist.objects.filter(name__in=artist_names)
for cc_artist in cc_artists:
kwargs = dict(
cc_artist=cc_artist,
other_artist=artist,
)
UserSimilarity.objects.get_or_create(defaults={'weight': 1},
user=user, **kwargs)
similarities.append(Similarity.objects.get_or_create(**kwargs)[0])
update_similarities(similarities)
def get_similar(name):
artist, _ = GeneralArtist.objects.get_or_create(
normalized_name=name.upper(), defaults={'name': name})
add_new_similarities(artist)
similar = Q(similarity__other_artist=artist, similarity__weight__gt=0)
return Artist.objects.filter(similar).order_by('-similarity__weight')
|
d72f5bb03e2fcab85fae164df8d0eb76bb2d1ad7
|
api/views.py
|
api/views.py
|
from django.shortcuts import render
from django.views.generic import View
from .utils import apimethod
# Create your views here.
class StatusView(View):
@apimethod
def get(self, request):
return {
'web': 'UP', #Tautological: If we're responding, we're up
'xmpp': 'UNK', #TODO: Check if it's up
}
|
from django.shortcuts import render
from django.views.generic import View
from .utils import apimethod
# Create your views here.
class StatusView(View):
@apimethod
def get(self, request):
return {
'web': 'UP', #Tautological: If we're responding, we're up
'xmpp': 'UNK', #TODO: Check if it's up
'coffee': 'ERROR 418',
}
|
Add coffee status to API
|
Add coffee status to API
|
Python
|
mit
|
Kromey/fbxnano,Kromey/fbxnano,Kromey/akwriters,Kromey/fbxnano,Kromey/akwriters,Kromey/akwriters,Kromey/akwriters,Kromey/fbxnano
|
from django.shortcuts import render
from django.views.generic import View
from .utils import apimethod
# Create your views here.
class StatusView(View):
@apimethod
def get(self, request):
return {
'web': 'UP', #Tautological: If we're responding, we're up
'xmpp': 'UNK', #TODO: Check if it's up
}
Add coffee status to API
|
from django.shortcuts import render
from django.views.generic import View
from .utils import apimethod
# Create your views here.
class StatusView(View):
@apimethod
def get(self, request):
return {
'web': 'UP', #Tautological: If we're responding, we're up
'xmpp': 'UNK', #TODO: Check if it's up
'coffee': 'ERROR 418',
}
|
<commit_before>from django.shortcuts import render
from django.views.generic import View
from .utils import apimethod
# Create your views here.
class StatusView(View):
@apimethod
def get(self, request):
return {
'web': 'UP', #Tautological: If we're responding, we're up
'xmpp': 'UNK', #TODO: Check if it's up
}
<commit_msg>Add coffee status to API<commit_after>
|
from django.shortcuts import render
from django.views.generic import View
from .utils import apimethod
# Create your views here.
class StatusView(View):
@apimethod
def get(self, request):
return {
'web': 'UP', #Tautological: If we're responding, we're up
'xmpp': 'UNK', #TODO: Check if it's up
'coffee': 'ERROR 418',
}
|
from django.shortcuts import render
from django.views.generic import View
from .utils import apimethod
# Create your views here.
class StatusView(View):
@apimethod
def get(self, request):
return {
'web': 'UP', #Tautological: If we're responding, we're up
'xmpp': 'UNK', #TODO: Check if it's up
}
Add coffee status to APIfrom django.shortcuts import render
from django.views.generic import View
from .utils import apimethod
# Create your views here.
class StatusView(View):
@apimethod
def get(self, request):
return {
'web': 'UP', #Tautological: If we're responding, we're up
'xmpp': 'UNK', #TODO: Check if it's up
'coffee': 'ERROR 418',
}
|
<commit_before>from django.shortcuts import render
from django.views.generic import View
from .utils import apimethod
# Create your views here.
class StatusView(View):
@apimethod
def get(self, request):
return {
'web': 'UP', #Tautological: If we're responding, we're up
'xmpp': 'UNK', #TODO: Check if it's up
}
<commit_msg>Add coffee status to API<commit_after>from django.shortcuts import render
from django.views.generic import View
from .utils import apimethod
# Create your views here.
class StatusView(View):
@apimethod
def get(self, request):
return {
'web': 'UP', #Tautological: If we're responding, we're up
'xmpp': 'UNK', #TODO: Check if it's up
'coffee': 'ERROR 418',
}
|
9f967406e634fcb340fb8af0b8f5981661936038
|
profile_bs_xf03id/startup/01-bluesky.py
|
profile_bs_xf03id/startup/01-bluesky.py
|
from ophyd.commands import setup_ophyd
setup_ophyd()
from ophyd.commands import *
from bluesky.callbacks import *
from bluesky.plans import *
# from bluesky.spec_api import *
from bluesky.utils import install_qt_kicker
from bluesky.global_state import (get_gs, abort, stop, resume)
from databroker import (DataBroker as db, get_events, get_images, get_table,
get_fields, restream, process)
# The following line allows bluesky and pyqt4 GUIs to play nicely together:
install_qt_kicker()
gs = get_gs()
RE = gs.RE
RE.md['group'] = ''
RE.md['config'] = {}
RE.md['beamline_id'] = 'HXN'
RE.verbose = True
# RE.ignore_callback_exceptions = False
|
from ophyd.commands import setup_ophyd
setup_ophyd()
from ophyd.commands import *
from bluesky.callbacks import *
from bluesky.plans import *
# from bluesky.spec_api import *
from bluesky.utils import (install_qt_kicker, register_transform)
from bluesky.global_state import (get_gs, abort, stop, resume)
from databroker import (DataBroker as db, get_events, get_images, get_table,
get_fields, restream, process)
# The following line allows bluesky and pyqt4 GUIs to play nicely together:
install_qt_kicker()
gs = get_gs()
RE = gs.RE
RE.md['group'] = ''
RE.md['config'] = {}
RE.md['beamline_id'] = 'HXN'
RE.verbose = True
# RE.ignore_callback_exceptions = False
# Allow scans to be run by using the prefix '<' instead of typing RE(...)
register_transform('RE', prefix='<')
|
Add magic for RE(...) calls
|
Add magic for RE(...) calls
|
Python
|
bsd-2-clause
|
NSLS-II-HXN/ipython_ophyd,NSLS-II-HXN/ipython_ophyd
|
from ophyd.commands import setup_ophyd
setup_ophyd()
from ophyd.commands import *
from bluesky.callbacks import *
from bluesky.plans import *
# from bluesky.spec_api import *
from bluesky.utils import install_qt_kicker
from bluesky.global_state import (get_gs, abort, stop, resume)
from databroker import (DataBroker as db, get_events, get_images, get_table,
get_fields, restream, process)
# The following line allows bluesky and pyqt4 GUIs to play nicely together:
install_qt_kicker()
gs = get_gs()
RE = gs.RE
RE.md['group'] = ''
RE.md['config'] = {}
RE.md['beamline_id'] = 'HXN'
RE.verbose = True
# RE.ignore_callback_exceptions = False
Add magic for RE(...) calls
|
from ophyd.commands import setup_ophyd
setup_ophyd()
from ophyd.commands import *
from bluesky.callbacks import *
from bluesky.plans import *
# from bluesky.spec_api import *
from bluesky.utils import (install_qt_kicker, register_transform)
from bluesky.global_state import (get_gs, abort, stop, resume)
from databroker import (DataBroker as db, get_events, get_images, get_table,
get_fields, restream, process)
# The following line allows bluesky and pyqt4 GUIs to play nicely together:
install_qt_kicker()
gs = get_gs()
RE = gs.RE
RE.md['group'] = ''
RE.md['config'] = {}
RE.md['beamline_id'] = 'HXN'
RE.verbose = True
# RE.ignore_callback_exceptions = False
# Allow scans to be run by using the prefix '<' instead of typing RE(...)
register_transform('RE', prefix='<')
|
<commit_before>from ophyd.commands import setup_ophyd
setup_ophyd()
from ophyd.commands import *
from bluesky.callbacks import *
from bluesky.plans import *
# from bluesky.spec_api import *
from bluesky.utils import install_qt_kicker
from bluesky.global_state import (get_gs, abort, stop, resume)
from databroker import (DataBroker as db, get_events, get_images, get_table,
get_fields, restream, process)
# The following line allows bluesky and pyqt4 GUIs to play nicely together:
install_qt_kicker()
gs = get_gs()
RE = gs.RE
RE.md['group'] = ''
RE.md['config'] = {}
RE.md['beamline_id'] = 'HXN'
RE.verbose = True
# RE.ignore_callback_exceptions = False
<commit_msg>Add magic for RE(...) calls<commit_after>
|
from ophyd.commands import setup_ophyd
setup_ophyd()
from ophyd.commands import *
from bluesky.callbacks import *
from bluesky.plans import *
# from bluesky.spec_api import *
from bluesky.utils import (install_qt_kicker, register_transform)
from bluesky.global_state import (get_gs, abort, stop, resume)
from databroker import (DataBroker as db, get_events, get_images, get_table,
get_fields, restream, process)
# The following line allows bluesky and pyqt4 GUIs to play nicely together:
install_qt_kicker()
gs = get_gs()
RE = gs.RE
RE.md['group'] = ''
RE.md['config'] = {}
RE.md['beamline_id'] = 'HXN'
RE.verbose = True
# RE.ignore_callback_exceptions = False
# Allow scans to be run by using the prefix '<' instead of typing RE(...)
register_transform('RE', prefix='<')
|
from ophyd.commands import setup_ophyd
setup_ophyd()
from ophyd.commands import *
from bluesky.callbacks import *
from bluesky.plans import *
# from bluesky.spec_api import *
from bluesky.utils import install_qt_kicker
from bluesky.global_state import (get_gs, abort, stop, resume)
from databroker import (DataBroker as db, get_events, get_images, get_table,
get_fields, restream, process)
# The following line allows bluesky and pyqt4 GUIs to play nicely together:
install_qt_kicker()
gs = get_gs()
RE = gs.RE
RE.md['group'] = ''
RE.md['config'] = {}
RE.md['beamline_id'] = 'HXN'
RE.verbose = True
# RE.ignore_callback_exceptions = False
Add magic for RE(...) callsfrom ophyd.commands import setup_ophyd
setup_ophyd()
from ophyd.commands import *
from bluesky.callbacks import *
from bluesky.plans import *
# from bluesky.spec_api import *
from bluesky.utils import (install_qt_kicker, register_transform)
from bluesky.global_state import (get_gs, abort, stop, resume)
from databroker import (DataBroker as db, get_events, get_images, get_table,
get_fields, restream, process)
# The following line allows bluesky and pyqt4 GUIs to play nicely together:
install_qt_kicker()
gs = get_gs()
RE = gs.RE
RE.md['group'] = ''
RE.md['config'] = {}
RE.md['beamline_id'] = 'HXN'
RE.verbose = True
# RE.ignore_callback_exceptions = False
# Allow scans to be run by using the prefix '<' instead of typing RE(...)
register_transform('RE', prefix='<')
|
<commit_before>from ophyd.commands import setup_ophyd
setup_ophyd()
from ophyd.commands import *
from bluesky.callbacks import *
from bluesky.plans import *
# from bluesky.spec_api import *
from bluesky.utils import install_qt_kicker
from bluesky.global_state import (get_gs, abort, stop, resume)
from databroker import (DataBroker as db, get_events, get_images, get_table,
get_fields, restream, process)
# The following line allows bluesky and pyqt4 GUIs to play nicely together:
install_qt_kicker()
gs = get_gs()
RE = gs.RE
RE.md['group'] = ''
RE.md['config'] = {}
RE.md['beamline_id'] = 'HXN'
RE.verbose = True
# RE.ignore_callback_exceptions = False
<commit_msg>Add magic for RE(...) calls<commit_after>from ophyd.commands import setup_ophyd
setup_ophyd()
from ophyd.commands import *
from bluesky.callbacks import *
from bluesky.plans import *
# from bluesky.spec_api import *
from bluesky.utils import (install_qt_kicker, register_transform)
from bluesky.global_state import (get_gs, abort, stop, resume)
from databroker import (DataBroker as db, get_events, get_images, get_table,
get_fields, restream, process)
# The following line allows bluesky and pyqt4 GUIs to play nicely together:
install_qt_kicker()
gs = get_gs()
RE = gs.RE
RE.md['group'] = ''
RE.md['config'] = {}
RE.md['beamline_id'] = 'HXN'
RE.verbose = True
# RE.ignore_callback_exceptions = False
# Allow scans to be run by using the prefix '<' instead of typing RE(...)
register_transform('RE', prefix='<')
|
64feb2ed638e43f15b7008507907f7d607ebccf3
|
nbgrader/apps/assignapp.py
|
nbgrader/apps/assignapp.py
|
from IPython.config.loader import Config
from IPython.config.application import catch_config_error
from IPython.utils.traitlets import Unicode
from nbgrader.apps.customnbconvertapp import CustomNbConvertApp
class AssignApp(CustomNbConvertApp):
name = Unicode(u'nbgrader-assign')
description = Unicode(u'Prepare a student version of an assignment by removing solutions')
def _export_format_default(self):
return 'notebook'
def build_extra_config(self):
self.extra_config = Config()
self.extra_config.Exporter.preprocessors = [
'nbgrader.preprocessors.IncludeHeaderFooter',
'nbgrader.preprocessors.ClearSolutions',
'IPython.nbconvert.preprocessors.ClearOutputPreprocessor'
]
self.config.merge(self.extra_config)
|
from IPython.config.loader import Config
from IPython.utils.traitlets import Unicode
from nbgrader.apps.customnbconvertapp import CustomNbConvertApp
from nbgrader.apps.customnbconvertapp import aliases as base_aliases
from nbgrader.apps.customnbconvertapp import flags as base_flags
aliases = {}
aliases.update(base_aliases)
aliases.update({
'header': 'IncludeHeaderFooter.header',
'footer': 'IncludeHeaderFooter.footer'
})
flags = {}
flags.update(base_flags)
flags.update({
})
class AssignApp(CustomNbConvertApp):
name = Unicode(u'nbgrader-assign')
description = Unicode(u'Prepare a student version of an assignment by removing solutions')
aliases = aliases
flags = flags
def _export_format_default(self):
return 'notebook'
def build_extra_config(self):
self.extra_config = Config()
self.extra_config.Exporter.preprocessors = [
'nbgrader.preprocessors.IncludeHeaderFooter',
'nbgrader.preprocessors.ClearSolutions',
'IPython.nbconvert.preprocessors.ClearOutputPreprocessor'
]
self.config.merge(self.extra_config)
|
Add some flags to nbgrader assign
|
Add some flags to nbgrader assign
|
Python
|
bsd-3-clause
|
ellisonbg/nbgrader,modulexcite/nbgrader,jupyter/nbgrader,jupyter/nbgrader,ellisonbg/nbgrader,jdfreder/nbgrader,EdwardJKim/nbgrader,jhamrick/nbgrader,jhamrick/nbgrader,dementrock/nbgrader,EdwardJKim/nbgrader,ellisonbg/nbgrader,dementrock/nbgrader,alope107/nbgrader,MatKallada/nbgrader,jhamrick/nbgrader,ellisonbg/nbgrader,EdwardJKim/nbgrader,modulexcite/nbgrader,jupyter/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,MatKallada/nbgrader,EdwardJKim/nbgrader,jdfreder/nbgrader,alope107/nbgrader,jupyter/nbgrader
|
from IPython.config.loader import Config
from IPython.config.application import catch_config_error
from IPython.utils.traitlets import Unicode
from nbgrader.apps.customnbconvertapp import CustomNbConvertApp
class AssignApp(CustomNbConvertApp):
name = Unicode(u'nbgrader-assign')
description = Unicode(u'Prepare a student version of an assignment by removing solutions')
def _export_format_default(self):
return 'notebook'
def build_extra_config(self):
self.extra_config = Config()
self.extra_config.Exporter.preprocessors = [
'nbgrader.preprocessors.IncludeHeaderFooter',
'nbgrader.preprocessors.ClearSolutions',
'IPython.nbconvert.preprocessors.ClearOutputPreprocessor'
]
self.config.merge(self.extra_config)
Add some flags to nbgrader assign
|
from IPython.config.loader import Config
from IPython.utils.traitlets import Unicode
from nbgrader.apps.customnbconvertapp import CustomNbConvertApp
from nbgrader.apps.customnbconvertapp import aliases as base_aliases
from nbgrader.apps.customnbconvertapp import flags as base_flags
aliases = {}
aliases.update(base_aliases)
aliases.update({
'header': 'IncludeHeaderFooter.header',
'footer': 'IncludeHeaderFooter.footer'
})
flags = {}
flags.update(base_flags)
flags.update({
})
class AssignApp(CustomNbConvertApp):
name = Unicode(u'nbgrader-assign')
description = Unicode(u'Prepare a student version of an assignment by removing solutions')
aliases = aliases
flags = flags
def _export_format_default(self):
return 'notebook'
def build_extra_config(self):
self.extra_config = Config()
self.extra_config.Exporter.preprocessors = [
'nbgrader.preprocessors.IncludeHeaderFooter',
'nbgrader.preprocessors.ClearSolutions',
'IPython.nbconvert.preprocessors.ClearOutputPreprocessor'
]
self.config.merge(self.extra_config)
|
<commit_before>from IPython.config.loader import Config
from IPython.config.application import catch_config_error
from IPython.utils.traitlets import Unicode
from nbgrader.apps.customnbconvertapp import CustomNbConvertApp
class AssignApp(CustomNbConvertApp):
name = Unicode(u'nbgrader-assign')
description = Unicode(u'Prepare a student version of an assignment by removing solutions')
def _export_format_default(self):
return 'notebook'
def build_extra_config(self):
self.extra_config = Config()
self.extra_config.Exporter.preprocessors = [
'nbgrader.preprocessors.IncludeHeaderFooter',
'nbgrader.preprocessors.ClearSolutions',
'IPython.nbconvert.preprocessors.ClearOutputPreprocessor'
]
self.config.merge(self.extra_config)
<commit_msg>Add some flags to nbgrader assign<commit_after>
|
from IPython.config.loader import Config
from IPython.utils.traitlets import Unicode
from nbgrader.apps.customnbconvertapp import CustomNbConvertApp
from nbgrader.apps.customnbconvertapp import aliases as base_aliases
from nbgrader.apps.customnbconvertapp import flags as base_flags
aliases = {}
aliases.update(base_aliases)
aliases.update({
'header': 'IncludeHeaderFooter.header',
'footer': 'IncludeHeaderFooter.footer'
})
flags = {}
flags.update(base_flags)
flags.update({
})
class AssignApp(CustomNbConvertApp):
name = Unicode(u'nbgrader-assign')
description = Unicode(u'Prepare a student version of an assignment by removing solutions')
aliases = aliases
flags = flags
def _export_format_default(self):
return 'notebook'
def build_extra_config(self):
self.extra_config = Config()
self.extra_config.Exporter.preprocessors = [
'nbgrader.preprocessors.IncludeHeaderFooter',
'nbgrader.preprocessors.ClearSolutions',
'IPython.nbconvert.preprocessors.ClearOutputPreprocessor'
]
self.config.merge(self.extra_config)
|
from IPython.config.loader import Config
from IPython.config.application import catch_config_error
from IPython.utils.traitlets import Unicode
from nbgrader.apps.customnbconvertapp import CustomNbConvertApp
class AssignApp(CustomNbConvertApp):
name = Unicode(u'nbgrader-assign')
description = Unicode(u'Prepare a student version of an assignment by removing solutions')
def _export_format_default(self):
return 'notebook'
def build_extra_config(self):
self.extra_config = Config()
self.extra_config.Exporter.preprocessors = [
'nbgrader.preprocessors.IncludeHeaderFooter',
'nbgrader.preprocessors.ClearSolutions',
'IPython.nbconvert.preprocessors.ClearOutputPreprocessor'
]
self.config.merge(self.extra_config)
Add some flags to nbgrader assignfrom IPython.config.loader import Config
from IPython.utils.traitlets import Unicode
from nbgrader.apps.customnbconvertapp import CustomNbConvertApp
from nbgrader.apps.customnbconvertapp import aliases as base_aliases
from nbgrader.apps.customnbconvertapp import flags as base_flags
aliases = {}
aliases.update(base_aliases)
aliases.update({
'header': 'IncludeHeaderFooter.header',
'footer': 'IncludeHeaderFooter.footer'
})
flags = {}
flags.update(base_flags)
flags.update({
})
class AssignApp(CustomNbConvertApp):
name = Unicode(u'nbgrader-assign')
description = Unicode(u'Prepare a student version of an assignment by removing solutions')
aliases = aliases
flags = flags
def _export_format_default(self):
return 'notebook'
def build_extra_config(self):
self.extra_config = Config()
self.extra_config.Exporter.preprocessors = [
'nbgrader.preprocessors.IncludeHeaderFooter',
'nbgrader.preprocessors.ClearSolutions',
'IPython.nbconvert.preprocessors.ClearOutputPreprocessor'
]
self.config.merge(self.extra_config)
|
<commit_before>from IPython.config.loader import Config
from IPython.config.application import catch_config_error
from IPython.utils.traitlets import Unicode
from nbgrader.apps.customnbconvertapp import CustomNbConvertApp
class AssignApp(CustomNbConvertApp):
name = Unicode(u'nbgrader-assign')
description = Unicode(u'Prepare a student version of an assignment by removing solutions')
def _export_format_default(self):
return 'notebook'
def build_extra_config(self):
self.extra_config = Config()
self.extra_config.Exporter.preprocessors = [
'nbgrader.preprocessors.IncludeHeaderFooter',
'nbgrader.preprocessors.ClearSolutions',
'IPython.nbconvert.preprocessors.ClearOutputPreprocessor'
]
self.config.merge(self.extra_config)
<commit_msg>Add some flags to nbgrader assign<commit_after>from IPython.config.loader import Config
from IPython.utils.traitlets import Unicode
from nbgrader.apps.customnbconvertapp import CustomNbConvertApp
from nbgrader.apps.customnbconvertapp import aliases as base_aliases
from nbgrader.apps.customnbconvertapp import flags as base_flags
aliases = {}
aliases.update(base_aliases)
aliases.update({
'header': 'IncludeHeaderFooter.header',
'footer': 'IncludeHeaderFooter.footer'
})
flags = {}
flags.update(base_flags)
flags.update({
})
class AssignApp(CustomNbConvertApp):
name = Unicode(u'nbgrader-assign')
description = Unicode(u'Prepare a student version of an assignment by removing solutions')
aliases = aliases
flags = flags
def _export_format_default(self):
return 'notebook'
def build_extra_config(self):
self.extra_config = Config()
self.extra_config.Exporter.preprocessors = [
'nbgrader.preprocessors.IncludeHeaderFooter',
'nbgrader.preprocessors.ClearSolutions',
'IPython.nbconvert.preprocessors.ClearOutputPreprocessor'
]
self.config.merge(self.extra_config)
|
ea902f4002344c1cbf56dbd989c27aa1ad41a363
|
task_run_system.py
|
task_run_system.py
|
import sys
import bson.objectid
import config.global_configuration as global_conf
import database.client
import batch_analysis.trial_runner as trial_runner
def main(*args):
"""
Run a given system with a given image source.
This represents a basic task.
Scripts to run this will be autogenerated by the job system
The first argument is the system id, the second argument is the image source to use
(note that args[0] should be the x
:return:
"""
if len(args) >= 2:
system_id = bson.objectid.ObjectId(args[0])
image_source_id = bson.objectid.ObjectId(args[1])
config = global_conf.load_global_config('config.yml')
db_client = database.client.DatabaseClient(config=config)
s_system = db_client.system_collection.find_one({'_id': system_id})
system = db_client.deserialize_entity(s_system)
s_image_source = db_client.image_source_collection.find_one({'_id': image_source_id})
image_source = db_client.deserialize_entity(s_image_source)
trial_result = trial_runner.run_system_with_source(system, image_source)
db_client.trials_collection.insert(trial_result.serialize())
if __name__ == '__main__':
main(*sys.argv[1:])
|
import sys
import bson.objectid
import config.global_configuration as global_conf
import database.client
import core.image_collection
import core.image_entity
import systems.deep_learning.keras_frcnn
import batch_analysis.trial_runner as trial_runner
def main(*args):
"""
Run a given system with a given image source.
This represents a basic task.
Scripts to run this will be autogenerated by the job system
The first argument is the system id, the second argument is the image source to use
(note that args[0] should be the x
:return:
"""
if len(args) >= 2:
system_id = bson.objectid.ObjectId(args[0])
image_source_id = bson.objectid.ObjectId(args[1])
config = global_conf.load_global_config('config.yml')
db_client = database.client.DatabaseClient(config=config)
system = None
s_system = db_client.system_collection.find_one({'_id': system_id})
if s_system is not None:
system = db_client.deserialize_entity(s_system)
del s_system
image_source = None
s_image_source = db_client.image_source_collection.find_one({'_id': image_source_id})
if s_image_source is not None:
image_source = db_client.deserialize_entity(s_image_source)
del s_image_source
if system is not None and image_source is not None:
trial_result = trial_runner.run_system_with_source(system, image_source)
if trial_result is not None:
db_client.trials_collection.insert(trial_result.serialize())
if __name__ == '__main__':
main(*sys.argv[1:])
|
Make the run system task more error friendly
|
Make the run system task more error friendly
|
Python
|
bsd-2-clause
|
jskinn/robot-vision-experiment-framework,jskinn/robot-vision-experiment-framework
|
import sys
import bson.objectid
import config.global_configuration as global_conf
import database.client
import batch_analysis.trial_runner as trial_runner
def main(*args):
"""
Run a given system with a given image source.
This represents a basic task.
Scripts to run this will be autogenerated by the job system
The first argument is the system id, the second argument is the image source to use
(note that args[0] should be the x
:return:
"""
if len(args) >= 2:
system_id = bson.objectid.ObjectId(args[0])
image_source_id = bson.objectid.ObjectId(args[1])
config = global_conf.load_global_config('config.yml')
db_client = database.client.DatabaseClient(config=config)
s_system = db_client.system_collection.find_one({'_id': system_id})
system = db_client.deserialize_entity(s_system)
s_image_source = db_client.image_source_collection.find_one({'_id': image_source_id})
image_source = db_client.deserialize_entity(s_image_source)
trial_result = trial_runner.run_system_with_source(system, image_source)
db_client.trials_collection.insert(trial_result.serialize())
if __name__ == '__main__':
main(*sys.argv[1:])
Make the run system task more error friendly
|
import sys
import bson.objectid
import config.global_configuration as global_conf
import database.client
import core.image_collection
import core.image_entity
import systems.deep_learning.keras_frcnn
import batch_analysis.trial_runner as trial_runner
def main(*args):
"""
Run a given system with a given image source.
This represents a basic task.
Scripts to run this will be autogenerated by the job system
The first argument is the system id, the second argument is the image source to use
(note that args[0] should be the x
:return:
"""
if len(args) >= 2:
system_id = bson.objectid.ObjectId(args[0])
image_source_id = bson.objectid.ObjectId(args[1])
config = global_conf.load_global_config('config.yml')
db_client = database.client.DatabaseClient(config=config)
system = None
s_system = db_client.system_collection.find_one({'_id': system_id})
if s_system is not None:
system = db_client.deserialize_entity(s_system)
del s_system
image_source = None
s_image_source = db_client.image_source_collection.find_one({'_id': image_source_id})
if s_image_source is not None:
image_source = db_client.deserialize_entity(s_image_source)
del s_image_source
if system is not None and image_source is not None:
trial_result = trial_runner.run_system_with_source(system, image_source)
if trial_result is not None:
db_client.trials_collection.insert(trial_result.serialize())
if __name__ == '__main__':
main(*sys.argv[1:])
|
<commit_before>import sys
import bson.objectid
import config.global_configuration as global_conf
import database.client
import batch_analysis.trial_runner as trial_runner
def main(*args):
"""
Run a given system with a given image source.
This represents a basic task.
Scripts to run this will be autogenerated by the job system
The first argument is the system id, the second argument is the image source to use
(note that args[0] should be the x
:return:
"""
if len(args) >= 2:
system_id = bson.objectid.ObjectId(args[0])
image_source_id = bson.objectid.ObjectId(args[1])
config = global_conf.load_global_config('config.yml')
db_client = database.client.DatabaseClient(config=config)
s_system = db_client.system_collection.find_one({'_id': system_id})
system = db_client.deserialize_entity(s_system)
s_image_source = db_client.image_source_collection.find_one({'_id': image_source_id})
image_source = db_client.deserialize_entity(s_image_source)
trial_result = trial_runner.run_system_with_source(system, image_source)
db_client.trials_collection.insert(trial_result.serialize())
if __name__ == '__main__':
main(*sys.argv[1:])
<commit_msg>Make the run system task more error friendly<commit_after>
|
import sys
import bson.objectid
import config.global_configuration as global_conf
import database.client
import core.image_collection
import core.image_entity
import systems.deep_learning.keras_frcnn
import batch_analysis.trial_runner as trial_runner
def main(*args):
"""
Run a given system with a given image source.
This represents a basic task.
Scripts to run this will be autogenerated by the job system
The first argument is the system id, the second argument is the image source to use
(note that args[0] should be the x
:return:
"""
if len(args) >= 2:
system_id = bson.objectid.ObjectId(args[0])
image_source_id = bson.objectid.ObjectId(args[1])
config = global_conf.load_global_config('config.yml')
db_client = database.client.DatabaseClient(config=config)
system = None
s_system = db_client.system_collection.find_one({'_id': system_id})
if s_system is not None:
system = db_client.deserialize_entity(s_system)
del s_system
image_source = None
s_image_source = db_client.image_source_collection.find_one({'_id': image_source_id})
if s_image_source is not None:
image_source = db_client.deserialize_entity(s_image_source)
del s_image_source
if system is not None and image_source is not None:
trial_result = trial_runner.run_system_with_source(system, image_source)
if trial_result is not None:
db_client.trials_collection.insert(trial_result.serialize())
if __name__ == '__main__':
main(*sys.argv[1:])
|
import sys
import bson.objectid
import config.global_configuration as global_conf
import database.client
import batch_analysis.trial_runner as trial_runner
def main(*args):
"""
Run a given system with a given image source.
This represents a basic task.
Scripts to run this will be autogenerated by the job system
The first argument is the system id, the second argument is the image source to use
(note that args[0] should be the x
:return:
"""
if len(args) >= 2:
system_id = bson.objectid.ObjectId(args[0])
image_source_id = bson.objectid.ObjectId(args[1])
config = global_conf.load_global_config('config.yml')
db_client = database.client.DatabaseClient(config=config)
s_system = db_client.system_collection.find_one({'_id': system_id})
system = db_client.deserialize_entity(s_system)
s_image_source = db_client.image_source_collection.find_one({'_id': image_source_id})
image_source = db_client.deserialize_entity(s_image_source)
trial_result = trial_runner.run_system_with_source(system, image_source)
db_client.trials_collection.insert(trial_result.serialize())
if __name__ == '__main__':
main(*sys.argv[1:])
Make the run system task more error friendlyimport sys
import bson.objectid
import config.global_configuration as global_conf
import database.client
import core.image_collection
import core.image_entity
import systems.deep_learning.keras_frcnn
import batch_analysis.trial_runner as trial_runner
def main(*args):
"""
Run a given system with a given image source.
This represents a basic task.
Scripts to run this will be autogenerated by the job system
The first argument is the system id, the second argument is the image source to use
(note that args[0] should be the x
:return:
"""
if len(args) >= 2:
system_id = bson.objectid.ObjectId(args[0])
image_source_id = bson.objectid.ObjectId(args[1])
config = global_conf.load_global_config('config.yml')
db_client = database.client.DatabaseClient(config=config)
system = None
s_system = db_client.system_collection.find_one({'_id': system_id})
if s_system is not None:
system = db_client.deserialize_entity(s_system)
del s_system
image_source = None
s_image_source = db_client.image_source_collection.find_one({'_id': image_source_id})
if s_image_source is not None:
image_source = db_client.deserialize_entity(s_image_source)
del s_image_source
if system is not None and image_source is not None:
trial_result = trial_runner.run_system_with_source(system, image_source)
if trial_result is not None:
db_client.trials_collection.insert(trial_result.serialize())
if __name__ == '__main__':
main(*sys.argv[1:])
|
<commit_before>import sys
import bson.objectid
import config.global_configuration as global_conf
import database.client
import batch_analysis.trial_runner as trial_runner
def main(*args):
"""
Run a given system with a given image source.
This represents a basic task.
Scripts to run this will be autogenerated by the job system
The first argument is the system id, the second argument is the image source to use
(note that args[0] should be the x
:return:
"""
if len(args) >= 2:
system_id = bson.objectid.ObjectId(args[0])
image_source_id = bson.objectid.ObjectId(args[1])
config = global_conf.load_global_config('config.yml')
db_client = database.client.DatabaseClient(config=config)
s_system = db_client.system_collection.find_one({'_id': system_id})
system = db_client.deserialize_entity(s_system)
s_image_source = db_client.image_source_collection.find_one({'_id': image_source_id})
image_source = db_client.deserialize_entity(s_image_source)
trial_result = trial_runner.run_system_with_source(system, image_source)
db_client.trials_collection.insert(trial_result.serialize())
if __name__ == '__main__':
main(*sys.argv[1:])
<commit_msg>Make the run system task more error friendly<commit_after>import sys
import bson.objectid
import config.global_configuration as global_conf
import database.client
import core.image_collection
import core.image_entity
import systems.deep_learning.keras_frcnn
import batch_analysis.trial_runner as trial_runner
def main(*args):
"""
Run a given system with a given image source.
This represents a basic task.
Scripts to run this will be autogenerated by the job system
The first argument is the system id, the second argument is the image source to use
(note that args[0] should be the x
:return:
"""
if len(args) >= 2:
system_id = bson.objectid.ObjectId(args[0])
image_source_id = bson.objectid.ObjectId(args[1])
config = global_conf.load_global_config('config.yml')
db_client = database.client.DatabaseClient(config=config)
system = None
s_system = db_client.system_collection.find_one({'_id': system_id})
if s_system is not None:
system = db_client.deserialize_entity(s_system)
del s_system
image_source = None
s_image_source = db_client.image_source_collection.find_one({'_id': image_source_id})
if s_image_source is not None:
image_source = db_client.deserialize_entity(s_image_source)
del s_image_source
if system is not None and image_source is not None:
trial_result = trial_runner.run_system_with_source(system, image_source)
if trial_result is not None:
db_client.trials_collection.insert(trial_result.serialize())
if __name__ == '__main__':
main(*sys.argv[1:])
|
9e10bbb48f6d19ce7e2363dc843279a5ffb4957e
|
pywikibot/families/wikivoyage_family.py
|
pywikibot/families/wikivoyage_family.py
|
# -*- coding: utf-8 -*-
"""Family module for Wikivoyage."""
#
# (C) Pywikibot team, 2012-2018
#
# Distributed under the terms of the MIT license.
#
# The new wikivoyage family that is hosted at wikimedia
from __future__ import absolute_import, unicode_literals
from pywikibot import family
class Family(family.SubdomainFamily, family.WikimediaFamily):
"""Family class for Wikivoyage."""
name = 'wikivoyage'
languages_by_size = [
'en', 'de', 'fa', 'it', 'fr', 'pl', 'ru', 'nl', 'pt', 'zh', 'es', 'he',
'fi', 'vi', 'sv', 'el', 'ro', 'uk', 'bn', 'ps', 'hi',
]
category_redirect_templates = {
'_default': (),
'bn': ('বিষয়শ্রেণী পুনর্নির্দেশ',),
'zh': ('分类重定向',),
}
# Global bot allowed languages on
# https://meta.wikimedia.org/wiki/BPI#Current_implementation
# & https://meta.wikimedia.org/wiki/Special:WikiSets/2
cross_allowed = [
'el', 'en', 'es', 'fa', 'ru',
]
|
# -*- coding: utf-8 -*-
"""Family module for Wikivoyage."""
#
# (C) Pywikibot team, 2012-2018
#
# Distributed under the terms of the MIT license.
#
# The new wikivoyage family that is hosted at wikimedia
from __future__ import absolute_import, unicode_literals
from pywikibot import family
class Family(family.SubdomainFamily, family.WikimediaFamily):
"""Family class for Wikivoyage."""
name = 'wikivoyage'
languages_by_size = [
'en', 'de', 'fa', 'it', 'fr', 'pl', 'ru', 'nl', 'pt', 'zh', 'es', 'he',
'fi', 'vi', 'sv', 'el', 'ro', 'uk', 'bn', 'ps', 'hi',
]
category_redirect_templates = {
'_default': (),
'bn': ('বিষয়শ্রেণী পুনর্নির্দেশ',),
'zh': ('分类重定向',),
}
# Global bot allowed languages on
# https://meta.wikimedia.org/wiki/BPI#Current_implementation
# & https://meta.wikimedia.org/wiki/Special:WikiSets/2
cross_allowed = [
'el', 'en', 'es', 'fa', 'hi', 'ru',
]
|
Add hiwikivoyage to the list of global bot wikis
|
[IMPR] Add hiwikivoyage to the list of global bot wikis
https://meta.wikimedia.org/wiki/Bot_policy/Implementation
Change-Id: I25246991c177ce499bb63e4dfff46cb03d8e9f59
|
Python
|
mit
|
PersianWikipedia/pywikibot-core,wikimedia/pywikibot-core,wikimedia/pywikibot-core
|
# -*- coding: utf-8 -*-
"""Family module for Wikivoyage."""
#
# (C) Pywikibot team, 2012-2018
#
# Distributed under the terms of the MIT license.
#
# The new wikivoyage family that is hosted at wikimedia
from __future__ import absolute_import, unicode_literals
from pywikibot import family
class Family(family.SubdomainFamily, family.WikimediaFamily):
"""Family class for Wikivoyage."""
name = 'wikivoyage'
languages_by_size = [
'en', 'de', 'fa', 'it', 'fr', 'pl', 'ru', 'nl', 'pt', 'zh', 'es', 'he',
'fi', 'vi', 'sv', 'el', 'ro', 'uk', 'bn', 'ps', 'hi',
]
category_redirect_templates = {
'_default': (),
'bn': ('বিষয়শ্রেণী পুনর্নির্দেশ',),
'zh': ('分类重定向',),
}
# Global bot allowed languages on
# https://meta.wikimedia.org/wiki/BPI#Current_implementation
# & https://meta.wikimedia.org/wiki/Special:WikiSets/2
cross_allowed = [
'el', 'en', 'es', 'fa', 'ru',
]
[IMPR] Add hiwikivoyage to the list of global bot wikis
https://meta.wikimedia.org/wiki/Bot_policy/Implementation
Change-Id: I25246991c177ce499bb63e4dfff46cb03d8e9f59
|
# -*- coding: utf-8 -*-
"""Family module for Wikivoyage."""
#
# (C) Pywikibot team, 2012-2018
#
# Distributed under the terms of the MIT license.
#
# The new wikivoyage family that is hosted at wikimedia
from __future__ import absolute_import, unicode_literals
from pywikibot import family
class Family(family.SubdomainFamily, family.WikimediaFamily):
"""Family class for Wikivoyage."""
name = 'wikivoyage'
languages_by_size = [
'en', 'de', 'fa', 'it', 'fr', 'pl', 'ru', 'nl', 'pt', 'zh', 'es', 'he',
'fi', 'vi', 'sv', 'el', 'ro', 'uk', 'bn', 'ps', 'hi',
]
category_redirect_templates = {
'_default': (),
'bn': ('বিষয়শ্রেণী পুনর্নির্দেশ',),
'zh': ('分类重定向',),
}
# Global bot allowed languages on
# https://meta.wikimedia.org/wiki/BPI#Current_implementation
# & https://meta.wikimedia.org/wiki/Special:WikiSets/2
cross_allowed = [
'el', 'en', 'es', 'fa', 'hi', 'ru',
]
|
<commit_before># -*- coding: utf-8 -*-
"""Family module for Wikivoyage."""
#
# (C) Pywikibot team, 2012-2018
#
# Distributed under the terms of the MIT license.
#
# The new wikivoyage family that is hosted at wikimedia
from __future__ import absolute_import, unicode_literals
from pywikibot import family
class Family(family.SubdomainFamily, family.WikimediaFamily):
"""Family class for Wikivoyage."""
name = 'wikivoyage'
languages_by_size = [
'en', 'de', 'fa', 'it', 'fr', 'pl', 'ru', 'nl', 'pt', 'zh', 'es', 'he',
'fi', 'vi', 'sv', 'el', 'ro', 'uk', 'bn', 'ps', 'hi',
]
category_redirect_templates = {
'_default': (),
'bn': ('বিষয়শ্রেণী পুনর্নির্দেশ',),
'zh': ('分类重定向',),
}
# Global bot allowed languages on
# https://meta.wikimedia.org/wiki/BPI#Current_implementation
# & https://meta.wikimedia.org/wiki/Special:WikiSets/2
cross_allowed = [
'el', 'en', 'es', 'fa', 'ru',
]
<commit_msg>[IMPR] Add hiwikivoyage to the list of global bot wikis
https://meta.wikimedia.org/wiki/Bot_policy/Implementation
Change-Id: I25246991c177ce499bb63e4dfff46cb03d8e9f59<commit_after>
|
# -*- coding: utf-8 -*-
"""Family module for Wikivoyage."""
#
# (C) Pywikibot team, 2012-2018
#
# Distributed under the terms of the MIT license.
#
# The new wikivoyage family that is hosted at wikimedia
from __future__ import absolute_import, unicode_literals
from pywikibot import family
class Family(family.SubdomainFamily, family.WikimediaFamily):
"""Family class for Wikivoyage."""
name = 'wikivoyage'
languages_by_size = [
'en', 'de', 'fa', 'it', 'fr', 'pl', 'ru', 'nl', 'pt', 'zh', 'es', 'he',
'fi', 'vi', 'sv', 'el', 'ro', 'uk', 'bn', 'ps', 'hi',
]
category_redirect_templates = {
'_default': (),
'bn': ('বিষয়শ্রেণী পুনর্নির্দেশ',),
'zh': ('分类重定向',),
}
# Global bot allowed languages on
# https://meta.wikimedia.org/wiki/BPI#Current_implementation
# & https://meta.wikimedia.org/wiki/Special:WikiSets/2
cross_allowed = [
'el', 'en', 'es', 'fa', 'hi', 'ru',
]
|
# -*- coding: utf-8 -*-
"""Family module for Wikivoyage."""
#
# (C) Pywikibot team, 2012-2018
#
# Distributed under the terms of the MIT license.
#
# The new wikivoyage family that is hosted at wikimedia
from __future__ import absolute_import, unicode_literals
from pywikibot import family
class Family(family.SubdomainFamily, family.WikimediaFamily):
"""Family class for Wikivoyage."""
name = 'wikivoyage'
languages_by_size = [
'en', 'de', 'fa', 'it', 'fr', 'pl', 'ru', 'nl', 'pt', 'zh', 'es', 'he',
'fi', 'vi', 'sv', 'el', 'ro', 'uk', 'bn', 'ps', 'hi',
]
category_redirect_templates = {
'_default': (),
'bn': ('বিষয়শ্রেণী পুনর্নির্দেশ',),
'zh': ('分类重定向',),
}
# Global bot allowed languages on
# https://meta.wikimedia.org/wiki/BPI#Current_implementation
# & https://meta.wikimedia.org/wiki/Special:WikiSets/2
cross_allowed = [
'el', 'en', 'es', 'fa', 'ru',
]
[IMPR] Add hiwikivoyage to the list of global bot wikis
https://meta.wikimedia.org/wiki/Bot_policy/Implementation
Change-Id: I25246991c177ce499bb63e4dfff46cb03d8e9f59# -*- coding: utf-8 -*-
"""Family module for Wikivoyage."""
#
# (C) Pywikibot team, 2012-2018
#
# Distributed under the terms of the MIT license.
#
# The new wikivoyage family that is hosted at wikimedia
from __future__ import absolute_import, unicode_literals
from pywikibot import family
class Family(family.SubdomainFamily, family.WikimediaFamily):
"""Family class for Wikivoyage."""
name = 'wikivoyage'
languages_by_size = [
'en', 'de', 'fa', 'it', 'fr', 'pl', 'ru', 'nl', 'pt', 'zh', 'es', 'he',
'fi', 'vi', 'sv', 'el', 'ro', 'uk', 'bn', 'ps', 'hi',
]
category_redirect_templates = {
'_default': (),
'bn': ('বিষয়শ্রেণী পুনর্নির্দেশ',),
'zh': ('分类重定向',),
}
# Global bot allowed languages on
# https://meta.wikimedia.org/wiki/BPI#Current_implementation
# & https://meta.wikimedia.org/wiki/Special:WikiSets/2
cross_allowed = [
'el', 'en', 'es', 'fa', 'hi', 'ru',
]
|
<commit_before># -*- coding: utf-8 -*-
"""Family module for Wikivoyage."""
#
# (C) Pywikibot team, 2012-2018
#
# Distributed under the terms of the MIT license.
#
# The new wikivoyage family that is hosted at wikimedia
from __future__ import absolute_import, unicode_literals
from pywikibot import family
class Family(family.SubdomainFamily, family.WikimediaFamily):
"""Family class for Wikivoyage."""
name = 'wikivoyage'
languages_by_size = [
'en', 'de', 'fa', 'it', 'fr', 'pl', 'ru', 'nl', 'pt', 'zh', 'es', 'he',
'fi', 'vi', 'sv', 'el', 'ro', 'uk', 'bn', 'ps', 'hi',
]
category_redirect_templates = {
'_default': (),
'bn': ('বিষয়শ্রেণী পুনর্নির্দেশ',),
'zh': ('分类重定向',),
}
# Global bot allowed languages on
# https://meta.wikimedia.org/wiki/BPI#Current_implementation
# & https://meta.wikimedia.org/wiki/Special:WikiSets/2
cross_allowed = [
'el', 'en', 'es', 'fa', 'ru',
]
<commit_msg>[IMPR] Add hiwikivoyage to the list of global bot wikis
https://meta.wikimedia.org/wiki/Bot_policy/Implementation
Change-Id: I25246991c177ce499bb63e4dfff46cb03d8e9f59<commit_after># -*- coding: utf-8 -*-
"""Family module for Wikivoyage."""
#
# (C) Pywikibot team, 2012-2018
#
# Distributed under the terms of the MIT license.
#
# The new wikivoyage family that is hosted at wikimedia
from __future__ import absolute_import, unicode_literals
from pywikibot import family
class Family(family.SubdomainFamily, family.WikimediaFamily):
"""Family class for Wikivoyage."""
name = 'wikivoyage'
languages_by_size = [
'en', 'de', 'fa', 'it', 'fr', 'pl', 'ru', 'nl', 'pt', 'zh', 'es', 'he',
'fi', 'vi', 'sv', 'el', 'ro', 'uk', 'bn', 'ps', 'hi',
]
category_redirect_templates = {
'_default': (),
'bn': ('বিষয়শ্রেণী পুনর্নির্দেশ',),
'zh': ('分类重定向',),
}
# Global bot allowed languages on
# https://meta.wikimedia.org/wiki/BPI#Current_implementation
# & https://meta.wikimedia.org/wiki/Special:WikiSets/2
cross_allowed = [
'el', 'en', 'es', 'fa', 'hi', 'ru',
]
|
2a494efd72d34ac638763d162559d43fe3705698
|
test/test_datac.py
|
test/test_datac.py
|
# -*- coding: utf-8 -*-
import datac
import numpy as np
import os
params = {"temp_sun": 6000.}
bandgaps = np.linspace(0, 3.25, 100)
abscissae = datac.generate_abscissae(bandgaps, "bandgap", params)
pwd = os.getcwd()
testdir = "test"
filename = "data.dat"
fqpn = os.path.join(pwd, testdir, filename)
datac.write_json(fqpn, abscissae)
|
# -*- coding: utf-8 -*-
import datac
import numpy as np
import os
import unittest
class dummyclass(object):
"""
Simple class for testing `generate_ordinates`
"""
def __init__(self, params):
pass
def fun(self):
"""
Return value of `True`
"""
return True
params = {"temp_sun": 6000.}
bandgaps = np.linspace(0, 3.25, 100)
abscissae = datac.generate_abscissae(bandgaps, "bandgap", params)
data = datac.generate_ordinates(abscissae, dummyclass, "fun")
|
Add dummy class to test generate_ordinates
|
Add dummy class to test generate_ordinates
|
Python
|
mit
|
jrsmith3/datac,jrsmith3/datac
|
# -*- coding: utf-8 -*-
import datac
import numpy as np
import os
params = {"temp_sun": 6000.}
bandgaps = np.linspace(0, 3.25, 100)
abscissae = datac.generate_abscissae(bandgaps, "bandgap", params)
pwd = os.getcwd()
testdir = "test"
filename = "data.dat"
fqpn = os.path.join(pwd, testdir, filename)
datac.write_json(fqpn, abscissae)
Add dummy class to test generate_ordinates
|
# -*- coding: utf-8 -*-
import datac
import numpy as np
import os
import unittest
class dummyclass(object):
"""
Simple class for testing `generate_ordinates`
"""
def __init__(self, params):
pass
def fun(self):
"""
Return value of `True`
"""
return True
params = {"temp_sun": 6000.}
bandgaps = np.linspace(0, 3.25, 100)
abscissae = datac.generate_abscissae(bandgaps, "bandgap", params)
data = datac.generate_ordinates(abscissae, dummyclass, "fun")
|
<commit_before># -*- coding: utf-8 -*-
import datac
import numpy as np
import os
params = {"temp_sun": 6000.}
bandgaps = np.linspace(0, 3.25, 100)
abscissae = datac.generate_abscissae(bandgaps, "bandgap", params)
pwd = os.getcwd()
testdir = "test"
filename = "data.dat"
fqpn = os.path.join(pwd, testdir, filename)
datac.write_json(fqpn, abscissae)
<commit_msg>Add dummy class to test generate_ordinates<commit_after>
|
# -*- coding: utf-8 -*-
import datac
import numpy as np
import os
import unittest
class dummyclass(object):
"""
Simple class for testing `generate_ordinates`
"""
def __init__(self, params):
pass
def fun(self):
"""
Return value of `True`
"""
return True
params = {"temp_sun": 6000.}
bandgaps = np.linspace(0, 3.25, 100)
abscissae = datac.generate_abscissae(bandgaps, "bandgap", params)
data = datac.generate_ordinates(abscissae, dummyclass, "fun")
|
# -*- coding: utf-8 -*-
import datac
import numpy as np
import os
params = {"temp_sun": 6000.}
bandgaps = np.linspace(0, 3.25, 100)
abscissae = datac.generate_abscissae(bandgaps, "bandgap", params)
pwd = os.getcwd()
testdir = "test"
filename = "data.dat"
fqpn = os.path.join(pwd, testdir, filename)
datac.write_json(fqpn, abscissae)
Add dummy class to test generate_ordinates# -*- coding: utf-8 -*-
import datac
import numpy as np
import os
import unittest
class dummyclass(object):
"""
Simple class for testing `generate_ordinates`
"""
def __init__(self, params):
pass
def fun(self):
"""
Return value of `True`
"""
return True
params = {"temp_sun": 6000.}
bandgaps = np.linspace(0, 3.25, 100)
abscissae = datac.generate_abscissae(bandgaps, "bandgap", params)
data = datac.generate_ordinates(abscissae, dummyclass, "fun")
|
<commit_before># -*- coding: utf-8 -*-
import datac
import numpy as np
import os
params = {"temp_sun": 6000.}
bandgaps = np.linspace(0, 3.25, 100)
abscissae = datac.generate_abscissae(bandgaps, "bandgap", params)
pwd = os.getcwd()
testdir = "test"
filename = "data.dat"
fqpn = os.path.join(pwd, testdir, filename)
datac.write_json(fqpn, abscissae)
<commit_msg>Add dummy class to test generate_ordinates<commit_after># -*- coding: utf-8 -*-
import datac
import numpy as np
import os
import unittest
class dummyclass(object):
"""
Simple class for testing `generate_ordinates`
"""
def __init__(self, params):
pass
def fun(self):
"""
Return value of `True`
"""
return True
params = {"temp_sun": 6000.}
bandgaps = np.linspace(0, 3.25, 100)
abscissae = datac.generate_abscissae(bandgaps, "bandgap", params)
data = datac.generate_ordinates(abscissae, dummyclass, "fun")
|
ff735f207b44d1bf009fbcfa5df26da0da5aed4d
|
test/test_rooms.py
|
test/test_rooms.py
|
import pytest
import chatexchange
from chatexchange.events import MessageEdited
import live_testing
if live_testing:
@pytest.mark.xfail("not implemented yet")
def test_room_iterators():
client = chatexchange.Client(
'stackexchange.com', live_testing.email, live_testing.password)
me = client.get_me()
sandbox = client.get_room(11540)
my_message = None
with sandbox.messages() as messages:
sandbox.send_message("hello worl")
for message in messages:
if message.owner is me:
my_message = message
assert my_message.content == "hello worl"
break
with sandbox.events(MessageEdited) as edits:
my_message.edit("hello world")
for edit in edits:
assert isinstance(edit, MessageEdited)
if edit.message is my_message:
assert my_message.content == "hello world"
break
|
import pytest
import chatexchange
from chatexchange.events import MessageEdited
import live_testing
if live_testing:
@pytest.mark.xfail(reason="not implemented yet")
def test_room_iterators():
client = chatexchange.Client(
'stackexchange.com', live_testing.email, live_testing.password)
me = client.get_me()
sandbox = client.get_room(11540)
my_message = None
with sandbox.messages() as messages:
sandbox.send_message("hello worl")
for message in messages:
if message.owner is me:
my_message = message
assert my_message.content == "hello worl"
break
with sandbox.events(MessageEdited) as edits:
my_message.edit("hello world")
for edit in edits:
assert isinstance(edit, MessageEdited)
if edit.message is my_message:
assert my_message.content == "hello world"
break
|
Fix expected failure decorator in test.
|
Fix expected failure decorator in test.
|
Python
|
apache-2.0
|
hichris1234/ChatExchange,ByteCommander/ChatExchange6,Charcoal-SE/ChatExchange,Charcoal-SE/ChatExchange,ByteCommander/ChatExchange6,hichris1234/ChatExchange
|
import pytest
import chatexchange
from chatexchange.events import MessageEdited
import live_testing
if live_testing:
@pytest.mark.xfail("not implemented yet")
def test_room_iterators():
client = chatexchange.Client(
'stackexchange.com', live_testing.email, live_testing.password)
me = client.get_me()
sandbox = client.get_room(11540)
my_message = None
with sandbox.messages() as messages:
sandbox.send_message("hello worl")
for message in messages:
if message.owner is me:
my_message = message
assert my_message.content == "hello worl"
break
with sandbox.events(MessageEdited) as edits:
my_message.edit("hello world")
for edit in edits:
assert isinstance(edit, MessageEdited)
if edit.message is my_message:
assert my_message.content == "hello world"
break
Fix expected failure decorator in test.
|
import pytest
import chatexchange
from chatexchange.events import MessageEdited
import live_testing
if live_testing:
@pytest.mark.xfail(reason="not implemented yet")
def test_room_iterators():
client = chatexchange.Client(
'stackexchange.com', live_testing.email, live_testing.password)
me = client.get_me()
sandbox = client.get_room(11540)
my_message = None
with sandbox.messages() as messages:
sandbox.send_message("hello worl")
for message in messages:
if message.owner is me:
my_message = message
assert my_message.content == "hello worl"
break
with sandbox.events(MessageEdited) as edits:
my_message.edit("hello world")
for edit in edits:
assert isinstance(edit, MessageEdited)
if edit.message is my_message:
assert my_message.content == "hello world"
break
|
<commit_before>import pytest
import chatexchange
from chatexchange.events import MessageEdited
import live_testing
if live_testing:
@pytest.mark.xfail("not implemented yet")
def test_room_iterators():
client = chatexchange.Client(
'stackexchange.com', live_testing.email, live_testing.password)
me = client.get_me()
sandbox = client.get_room(11540)
my_message = None
with sandbox.messages() as messages:
sandbox.send_message("hello worl")
for message in messages:
if message.owner is me:
my_message = message
assert my_message.content == "hello worl"
break
with sandbox.events(MessageEdited) as edits:
my_message.edit("hello world")
for edit in edits:
assert isinstance(edit, MessageEdited)
if edit.message is my_message:
assert my_message.content == "hello world"
break
<commit_msg>Fix expected failure decorator in test.<commit_after>
|
import pytest
import chatexchange
from chatexchange.events import MessageEdited
import live_testing
if live_testing:
@pytest.mark.xfail(reason="not implemented yet")
def test_room_iterators():
client = chatexchange.Client(
'stackexchange.com', live_testing.email, live_testing.password)
me = client.get_me()
sandbox = client.get_room(11540)
my_message = None
with sandbox.messages() as messages:
sandbox.send_message("hello worl")
for message in messages:
if message.owner is me:
my_message = message
assert my_message.content == "hello worl"
break
with sandbox.events(MessageEdited) as edits:
my_message.edit("hello world")
for edit in edits:
assert isinstance(edit, MessageEdited)
if edit.message is my_message:
assert my_message.content == "hello world"
break
|
import pytest
import chatexchange
from chatexchange.events import MessageEdited
import live_testing
if live_testing:
@pytest.mark.xfail("not implemented yet")
def test_room_iterators():
client = chatexchange.Client(
'stackexchange.com', live_testing.email, live_testing.password)
me = client.get_me()
sandbox = client.get_room(11540)
my_message = None
with sandbox.messages() as messages:
sandbox.send_message("hello worl")
for message in messages:
if message.owner is me:
my_message = message
assert my_message.content == "hello worl"
break
with sandbox.events(MessageEdited) as edits:
my_message.edit("hello world")
for edit in edits:
assert isinstance(edit, MessageEdited)
if edit.message is my_message:
assert my_message.content == "hello world"
break
Fix expected failure decorator in test.import pytest
import chatexchange
from chatexchange.events import MessageEdited
import live_testing
if live_testing:
@pytest.mark.xfail(reason="not implemented yet")
def test_room_iterators():
client = chatexchange.Client(
'stackexchange.com', live_testing.email, live_testing.password)
me = client.get_me()
sandbox = client.get_room(11540)
my_message = None
with sandbox.messages() as messages:
sandbox.send_message("hello worl")
for message in messages:
if message.owner is me:
my_message = message
assert my_message.content == "hello worl"
break
with sandbox.events(MessageEdited) as edits:
my_message.edit("hello world")
for edit in edits:
assert isinstance(edit, MessageEdited)
if edit.message is my_message:
assert my_message.content == "hello world"
break
|
<commit_before>import pytest
import chatexchange
from chatexchange.events import MessageEdited
import live_testing
if live_testing:
@pytest.mark.xfail("not implemented yet")
def test_room_iterators():
client = chatexchange.Client(
'stackexchange.com', live_testing.email, live_testing.password)
me = client.get_me()
sandbox = client.get_room(11540)
my_message = None
with sandbox.messages() as messages:
sandbox.send_message("hello worl")
for message in messages:
if message.owner is me:
my_message = message
assert my_message.content == "hello worl"
break
with sandbox.events(MessageEdited) as edits:
my_message.edit("hello world")
for edit in edits:
assert isinstance(edit, MessageEdited)
if edit.message is my_message:
assert my_message.content == "hello world"
break
<commit_msg>Fix expected failure decorator in test.<commit_after>import pytest
import chatexchange
from chatexchange.events import MessageEdited
import live_testing
if live_testing:
@pytest.mark.xfail(reason="not implemented yet")
def test_room_iterators():
client = chatexchange.Client(
'stackexchange.com', live_testing.email, live_testing.password)
me = client.get_me()
sandbox = client.get_room(11540)
my_message = None
with sandbox.messages() as messages:
sandbox.send_message("hello worl")
for message in messages:
if message.owner is me:
my_message = message
assert my_message.content == "hello worl"
break
with sandbox.events(MessageEdited) as edits:
my_message.edit("hello world")
for edit in edits:
assert isinstance(edit, MessageEdited)
if edit.message is my_message:
assert my_message.content == "hello world"
break
|
f07a114ed23109c9b834b2cbc37ba54c728d73cb
|
fmn/lib/__init__.py
|
fmn/lib/__init__.py
|
""" fedmsg-notifications internal API """
import fmn.lib.models
import logging
log = logging.getLogger(__name__)
def recipients(session, config, message):
""" The main API function.
Accepts a fedmsg message as an argument.
Returns a dict mapping context names to lists of recipients.
"""
res = {}
for context in session.query(fmn.lib.models.Context).all():
res[context.name] = recipients_for_context(
session, config, context, message)
return res
def recipients_for_context(session, config, context, message):
""" Returns the recipients for a given fedmsg message and stated context.
Context may be either the name of a context or an instance of
fmn.lib.models.Context.
"""
if isinstance(context, basestring):
context = session.query(fmn.lib.models.Context)\
.filter_by(name=context).one()
return context.recipients(session, config, message)
def load_filters(root='fmn.filters'):
""" Load the big list of allowed callable filters. """
module = __import__(root, fromlist=[root.split('.')[0]])
filters = []
for name in dir(module):
obj = getattr(module, name)
if not callable(obj):
continue
log.info("Found filter %r %r" % (name, obj))
filters.append(obj)
return filters
|
""" fedmsg-notifications internal API """
import fmn.lib.models
import logging
log = logging.getLogger(__name__)
def recipients(session, config, message):
""" The main API function.
Accepts a fedmsg message as an argument.
Returns a dict mapping context names to lists of recipients.
"""
res = {}
for context in session.query(fmn.lib.models.Context).all():
res[context.name] = recipients_for_context(
session, config, context, message)
return res
def recipients_for_context(session, config, context, message):
""" Returns the recipients for a given fedmsg message and stated context.
Context may be either the name of a context or an instance of
fmn.lib.models.Context.
"""
if isinstance(context, basestring):
context = session.query(fmn.lib.models.Context)\
.filter_by(name=context).one()
return context.recipients(session, config, message)
def load_filters(root='fmn.filters'):
""" Load the big list of allowed callable filters. """
module = __import__(root, fromlist=[root.split('.')[0]])
filters = {}
for name in dir(module):
obj = getattr(module, name)
if not callable(obj):
continue
log.info("Found filter %r %r" % (name, obj))
filters[name] = obj
return {root: filters}
|
Restructure the valid_paths list into a dict.
|
Restructure the valid_paths list into a dict.
|
Python
|
lgpl-2.1
|
jeremycline/fmn,jeremycline/fmn,jeremycline/fmn
|
""" fedmsg-notifications internal API """
import fmn.lib.models
import logging
log = logging.getLogger(__name__)
def recipients(session, config, message):
""" The main API function.
Accepts a fedmsg message as an argument.
Returns a dict mapping context names to lists of recipients.
"""
res = {}
for context in session.query(fmn.lib.models.Context).all():
res[context.name] = recipients_for_context(
session, config, context, message)
return res
def recipients_for_context(session, config, context, message):
""" Returns the recipients for a given fedmsg message and stated context.
Context may be either the name of a context or an instance of
fmn.lib.models.Context.
"""
if isinstance(context, basestring):
context = session.query(fmn.lib.models.Context)\
.filter_by(name=context).one()
return context.recipients(session, config, message)
def load_filters(root='fmn.filters'):
""" Load the big list of allowed callable filters. """
module = __import__(root, fromlist=[root.split('.')[0]])
filters = []
for name in dir(module):
obj = getattr(module, name)
if not callable(obj):
continue
log.info("Found filter %r %r" % (name, obj))
filters.append(obj)
return filters
Restructure the valid_paths list into a dict.
|
""" fedmsg-notifications internal API """
import fmn.lib.models
import logging
log = logging.getLogger(__name__)
def recipients(session, config, message):
""" The main API function.
Accepts a fedmsg message as an argument.
Returns a dict mapping context names to lists of recipients.
"""
res = {}
for context in session.query(fmn.lib.models.Context).all():
res[context.name] = recipients_for_context(
session, config, context, message)
return res
def recipients_for_context(session, config, context, message):
""" Returns the recipients for a given fedmsg message and stated context.
Context may be either the name of a context or an instance of
fmn.lib.models.Context.
"""
if isinstance(context, basestring):
context = session.query(fmn.lib.models.Context)\
.filter_by(name=context).one()
return context.recipients(session, config, message)
def load_filters(root='fmn.filters'):
""" Load the big list of allowed callable filters. """
module = __import__(root, fromlist=[root.split('.')[0]])
filters = {}
for name in dir(module):
obj = getattr(module, name)
if not callable(obj):
continue
log.info("Found filter %r %r" % (name, obj))
filters[name] = obj
return {root: filters}
|
<commit_before>""" fedmsg-notifications internal API """
import fmn.lib.models
import logging
log = logging.getLogger(__name__)
def recipients(session, config, message):
""" The main API function.
Accepts a fedmsg message as an argument.
Returns a dict mapping context names to lists of recipients.
"""
res = {}
for context in session.query(fmn.lib.models.Context).all():
res[context.name] = recipients_for_context(
session, config, context, message)
return res
def recipients_for_context(session, config, context, message):
""" Returns the recipients for a given fedmsg message and stated context.
Context may be either the name of a context or an instance of
fmn.lib.models.Context.
"""
if isinstance(context, basestring):
context = session.query(fmn.lib.models.Context)\
.filter_by(name=context).one()
return context.recipients(session, config, message)
def load_filters(root='fmn.filters'):
""" Load the big list of allowed callable filters. """
module = __import__(root, fromlist=[root.split('.')[0]])
filters = []
for name in dir(module):
obj = getattr(module, name)
if not callable(obj):
continue
log.info("Found filter %r %r" % (name, obj))
filters.append(obj)
return filters
<commit_msg>Restructure the valid_paths list into a dict.<commit_after>
|
""" fedmsg-notifications internal API """
import fmn.lib.models
import logging
log = logging.getLogger(__name__)
def recipients(session, config, message):
""" The main API function.
Accepts a fedmsg message as an argument.
Returns a dict mapping context names to lists of recipients.
"""
res = {}
for context in session.query(fmn.lib.models.Context).all():
res[context.name] = recipients_for_context(
session, config, context, message)
return res
def recipients_for_context(session, config, context, message):
""" Returns the recipients for a given fedmsg message and stated context.
Context may be either the name of a context or an instance of
fmn.lib.models.Context.
"""
if isinstance(context, basestring):
context = session.query(fmn.lib.models.Context)\
.filter_by(name=context).one()
return context.recipients(session, config, message)
def load_filters(root='fmn.filters'):
""" Load the big list of allowed callable filters. """
module = __import__(root, fromlist=[root.split('.')[0]])
filters = {}
for name in dir(module):
obj = getattr(module, name)
if not callable(obj):
continue
log.info("Found filter %r %r" % (name, obj))
filters[name] = obj
return {root: filters}
|
""" fedmsg-notifications internal API """
import fmn.lib.models
import logging
log = logging.getLogger(__name__)
def recipients(session, config, message):
""" The main API function.
Accepts a fedmsg message as an argument.
Returns a dict mapping context names to lists of recipients.
"""
res = {}
for context in session.query(fmn.lib.models.Context).all():
res[context.name] = recipients_for_context(
session, config, context, message)
return res
def recipients_for_context(session, config, context, message):
""" Returns the recipients for a given fedmsg message and stated context.
Context may be either the name of a context or an instance of
fmn.lib.models.Context.
"""
if isinstance(context, basestring):
context = session.query(fmn.lib.models.Context)\
.filter_by(name=context).one()
return context.recipients(session, config, message)
def load_filters(root='fmn.filters'):
""" Load the big list of allowed callable filters. """
module = __import__(root, fromlist=[root.split('.')[0]])
filters = []
for name in dir(module):
obj = getattr(module, name)
if not callable(obj):
continue
log.info("Found filter %r %r" % (name, obj))
filters.append(obj)
return filters
Restructure the valid_paths list into a dict.""" fedmsg-notifications internal API """
import fmn.lib.models
import logging
log = logging.getLogger(__name__)
def recipients(session, config, message):
""" The main API function.
Accepts a fedmsg message as an argument.
Returns a dict mapping context names to lists of recipients.
"""
res = {}
for context in session.query(fmn.lib.models.Context).all():
res[context.name] = recipients_for_context(
session, config, context, message)
return res
def recipients_for_context(session, config, context, message):
""" Returns the recipients for a given fedmsg message and stated context.
Context may be either the name of a context or an instance of
fmn.lib.models.Context.
"""
if isinstance(context, basestring):
context = session.query(fmn.lib.models.Context)\
.filter_by(name=context).one()
return context.recipients(session, config, message)
def load_filters(root='fmn.filters'):
""" Load the big list of allowed callable filters. """
module = __import__(root, fromlist=[root.split('.')[0]])
filters = {}
for name in dir(module):
obj = getattr(module, name)
if not callable(obj):
continue
log.info("Found filter %r %r" % (name, obj))
filters[name] = obj
return {root: filters}
|
<commit_before>""" fedmsg-notifications internal API """
import fmn.lib.models
import logging
log = logging.getLogger(__name__)
def recipients(session, config, message):
""" The main API function.
Accepts a fedmsg message as an argument.
Returns a dict mapping context names to lists of recipients.
"""
res = {}
for context in session.query(fmn.lib.models.Context).all():
res[context.name] = recipients_for_context(
session, config, context, message)
return res
def recipients_for_context(session, config, context, message):
""" Returns the recipients for a given fedmsg message and stated context.
Context may be either the name of a context or an instance of
fmn.lib.models.Context.
"""
if isinstance(context, basestring):
context = session.query(fmn.lib.models.Context)\
.filter_by(name=context).one()
return context.recipients(session, config, message)
def load_filters(root='fmn.filters'):
""" Load the big list of allowed callable filters. """
module = __import__(root, fromlist=[root.split('.')[0]])
filters = []
for name in dir(module):
obj = getattr(module, name)
if not callable(obj):
continue
log.info("Found filter %r %r" % (name, obj))
filters.append(obj)
return filters
<commit_msg>Restructure the valid_paths list into a dict.<commit_after>""" fedmsg-notifications internal API """
import fmn.lib.models
import logging
log = logging.getLogger(__name__)
def recipients(session, config, message):
""" The main API function.
Accepts a fedmsg message as an argument.
Returns a dict mapping context names to lists of recipients.
"""
res = {}
for context in session.query(fmn.lib.models.Context).all():
res[context.name] = recipients_for_context(
session, config, context, message)
return res
def recipients_for_context(session, config, context, message):
""" Returns the recipients for a given fedmsg message and stated context.
Context may be either the name of a context or an instance of
fmn.lib.models.Context.
"""
if isinstance(context, basestring):
context = session.query(fmn.lib.models.Context)\
.filter_by(name=context).one()
return context.recipients(session, config, message)
def load_filters(root='fmn.filters'):
""" Load the big list of allowed callable filters. """
module = __import__(root, fromlist=[root.split('.')[0]])
filters = {}
for name in dir(module):
obj = getattr(module, name)
if not callable(obj):
continue
log.info("Found filter %r %r" % (name, obj))
filters[name] = obj
return {root: filters}
|
cf6f8ad122c8b113027585be1a8dd93affffc496
|
pigeon_flask.py
|
pigeon_flask.py
|
#!/usr/bin/env python
from flask import Flask, abort, make_response, request
from pigeon import PigeonStore
from socket import inet_aton, error as socket_error
app = Flask(__name__)
@app.before_first_request
def open_store():
global p
p = PigeonStore()
@app.route('/lookup')
def lookup():
ip = request.args['ip']
try:
k = inet_aton(ip)
except socket_error:
abort(400)
response = make_response(p.lookup(k))
response.headers['Content-type'] = 'application/json'
return response
if __name__ == '__main__':
import argparse
import sys
parser = argparse.ArgumentParser()
parser.add_argument('--host', default='localhost')
parser.add_argument('--port', default=5555, type=int)
parser.add_argument('--debug', default=False, action='store_true')
args = parser.parse_args()
try:
app.run(**vars(args))
except KeyboardInterrupt:
sys.stderr.write("Aborting...\n")
|
#!/usr/bin/env python
from flask import Flask, abort, make_response, request
from pigeon import PigeonStore
from socket import inet_aton, error as socket_error
app = Flask(__name__)
@app.before_first_request
def open_store():
global p
p = PigeonStore()
@app.route('/lookup')
def lookup():
ip = request.args['ip']
try:
k = inet_aton(ip)
except socket_error:
abort(400)
info_as_json = p.lookup(k)
if info_as_json is None:
abort(404)
response = make_response(info_as_json)
response.headers['Content-type'] = 'application/json'
return response
if __name__ == '__main__':
import argparse
import sys
parser = argparse.ArgumentParser()
parser.add_argument('--host', default='localhost')
parser.add_argument('--port', default=5555, type=int)
parser.add_argument('--debug', default=False, action='store_true')
args = parser.parse_args()
try:
app.run(**vars(args))
except KeyboardInterrupt:
sys.stderr.write("Aborting...\n")
|
Return 404 NOT FOUND if no match could be found
|
Return 404 NOT FOUND if no match could be found
|
Python
|
bsd-3-clause
|
wbolster/whip
|
#!/usr/bin/env python
from flask import Flask, abort, make_response, request
from pigeon import PigeonStore
from socket import inet_aton, error as socket_error
app = Flask(__name__)
@app.before_first_request
def open_store():
global p
p = PigeonStore()
@app.route('/lookup')
def lookup():
ip = request.args['ip']
try:
k = inet_aton(ip)
except socket_error:
abort(400)
response = make_response(p.lookup(k))
response.headers['Content-type'] = 'application/json'
return response
if __name__ == '__main__':
import argparse
import sys
parser = argparse.ArgumentParser()
parser.add_argument('--host', default='localhost')
parser.add_argument('--port', default=5555, type=int)
parser.add_argument('--debug', default=False, action='store_true')
args = parser.parse_args()
try:
app.run(**vars(args))
except KeyboardInterrupt:
sys.stderr.write("Aborting...\n")
Return 404 NOT FOUND if no match could be found
|
#!/usr/bin/env python
from flask import Flask, abort, make_response, request
from pigeon import PigeonStore
from socket import inet_aton, error as socket_error
app = Flask(__name__)
@app.before_first_request
def open_store():
global p
p = PigeonStore()
@app.route('/lookup')
def lookup():
ip = request.args['ip']
try:
k = inet_aton(ip)
except socket_error:
abort(400)
info_as_json = p.lookup(k)
if info_as_json is None:
abort(404)
response = make_response(info_as_json)
response.headers['Content-type'] = 'application/json'
return response
if __name__ == '__main__':
import argparse
import sys
parser = argparse.ArgumentParser()
parser.add_argument('--host', default='localhost')
parser.add_argument('--port', default=5555, type=int)
parser.add_argument('--debug', default=False, action='store_true')
args = parser.parse_args()
try:
app.run(**vars(args))
except KeyboardInterrupt:
sys.stderr.write("Aborting...\n")
|
<commit_before>#!/usr/bin/env python
from flask import Flask, abort, make_response, request
from pigeon import PigeonStore
from socket import inet_aton, error as socket_error
app = Flask(__name__)
@app.before_first_request
def open_store():
global p
p = PigeonStore()
@app.route('/lookup')
def lookup():
ip = request.args['ip']
try:
k = inet_aton(ip)
except socket_error:
abort(400)
response = make_response(p.lookup(k))
response.headers['Content-type'] = 'application/json'
return response
if __name__ == '__main__':
import argparse
import sys
parser = argparse.ArgumentParser()
parser.add_argument('--host', default='localhost')
parser.add_argument('--port', default=5555, type=int)
parser.add_argument('--debug', default=False, action='store_true')
args = parser.parse_args()
try:
app.run(**vars(args))
except KeyboardInterrupt:
sys.stderr.write("Aborting...\n")
<commit_msg>Return 404 NOT FOUND if no match could be found<commit_after>
|
#!/usr/bin/env python
from flask import Flask, abort, make_response, request
from pigeon import PigeonStore
from socket import inet_aton, error as socket_error
app = Flask(__name__)
@app.before_first_request
def open_store():
global p
p = PigeonStore()
@app.route('/lookup')
def lookup():
ip = request.args['ip']
try:
k = inet_aton(ip)
except socket_error:
abort(400)
info_as_json = p.lookup(k)
if info_as_json is None:
abort(404)
response = make_response(info_as_json)
response.headers['Content-type'] = 'application/json'
return response
if __name__ == '__main__':
import argparse
import sys
parser = argparse.ArgumentParser()
parser.add_argument('--host', default='localhost')
parser.add_argument('--port', default=5555, type=int)
parser.add_argument('--debug', default=False, action='store_true')
args = parser.parse_args()
try:
app.run(**vars(args))
except KeyboardInterrupt:
sys.stderr.write("Aborting...\n")
|
#!/usr/bin/env python
from flask import Flask, abort, make_response, request
from pigeon import PigeonStore
from socket import inet_aton, error as socket_error
app = Flask(__name__)
@app.before_first_request
def open_store():
global p
p = PigeonStore()
@app.route('/lookup')
def lookup():
ip = request.args['ip']
try:
k = inet_aton(ip)
except socket_error:
abort(400)
response = make_response(p.lookup(k))
response.headers['Content-type'] = 'application/json'
return response
if __name__ == '__main__':
import argparse
import sys
parser = argparse.ArgumentParser()
parser.add_argument('--host', default='localhost')
parser.add_argument('--port', default=5555, type=int)
parser.add_argument('--debug', default=False, action='store_true')
args = parser.parse_args()
try:
app.run(**vars(args))
except KeyboardInterrupt:
sys.stderr.write("Aborting...\n")
Return 404 NOT FOUND if no match could be found#!/usr/bin/env python
from flask import Flask, abort, make_response, request
from pigeon import PigeonStore
from socket import inet_aton, error as socket_error
app = Flask(__name__)
@app.before_first_request
def open_store():
global p
p = PigeonStore()
@app.route('/lookup')
def lookup():
ip = request.args['ip']
try:
k = inet_aton(ip)
except socket_error:
abort(400)
info_as_json = p.lookup(k)
if info_as_json is None:
abort(404)
response = make_response(info_as_json)
response.headers['Content-type'] = 'application/json'
return response
if __name__ == '__main__':
import argparse
import sys
parser = argparse.ArgumentParser()
parser.add_argument('--host', default='localhost')
parser.add_argument('--port', default=5555, type=int)
parser.add_argument('--debug', default=False, action='store_true')
args = parser.parse_args()
try:
app.run(**vars(args))
except KeyboardInterrupt:
sys.stderr.write("Aborting...\n")
|
<commit_before>#!/usr/bin/env python
from flask import Flask, abort, make_response, request
from pigeon import PigeonStore
from socket import inet_aton, error as socket_error
app = Flask(__name__)
@app.before_first_request
def open_store():
global p
p = PigeonStore()
@app.route('/lookup')
def lookup():
ip = request.args['ip']
try:
k = inet_aton(ip)
except socket_error:
abort(400)
response = make_response(p.lookup(k))
response.headers['Content-type'] = 'application/json'
return response
if __name__ == '__main__':
import argparse
import sys
parser = argparse.ArgumentParser()
parser.add_argument('--host', default='localhost')
parser.add_argument('--port', default=5555, type=int)
parser.add_argument('--debug', default=False, action='store_true')
args = parser.parse_args()
try:
app.run(**vars(args))
except KeyboardInterrupt:
sys.stderr.write("Aborting...\n")
<commit_msg>Return 404 NOT FOUND if no match could be found<commit_after>#!/usr/bin/env python
from flask import Flask, abort, make_response, request
from pigeon import PigeonStore
from socket import inet_aton, error as socket_error
app = Flask(__name__)
@app.before_first_request
def open_store():
global p
p = PigeonStore()
@app.route('/lookup')
def lookup():
ip = request.args['ip']
try:
k = inet_aton(ip)
except socket_error:
abort(400)
info_as_json = p.lookup(k)
if info_as_json is None:
abort(404)
response = make_response(info_as_json)
response.headers['Content-type'] = 'application/json'
return response
if __name__ == '__main__':
import argparse
import sys
parser = argparse.ArgumentParser()
parser.add_argument('--host', default='localhost')
parser.add_argument('--port', default=5555, type=int)
parser.add_argument('--debug', default=False, action='store_true')
args = parser.parse_args()
try:
app.run(**vars(args))
except KeyboardInterrupt:
sys.stderr.write("Aborting...\n")
|
f264a06db669df1017df60d932b301dac7208233
|
sqk/datasets/models.py
|
sqk/datasets/models.py
|
from django.db import models
from django.utils import timezone
class Dataset(models.Model):
name = models.CharField(max_length=100, default='')
description = models.CharField(max_length=500, default='')
source = models.FileField(upload_to='data_sources')
created_at = models.DateTimeField('created at', default=timezone.now())
def __unicode__(self):
return self.name
class Label(models.Model):
label = models.CharField(max_length=100, default='unlabeled')
def __unicode__(self):
return self.label
class Instance(models.Model):
dataset = models.ForeignKey(Dataset, related_name='instances')
label = models.ForeignKey(Label, default=0, related_name='instances')
name = models.CharField(max_length=100,
default='unnamed')
def __unicode__(self):
return self.name
class Feature(models.Model):
datasets = models.ManyToManyField(Dataset,
related_name='features')
instances = models.ManyToManyField(
Instance,
null=True,
related_name='features')
name = models.CharField(max_length=100, unique=True)
def __unicode__(self):
return self.name
class Value(models.Model):
feature = models.ForeignKey(Feature, related_name='values')
instance = models.ForeignKey(Instance, related_name='values')
value = models.FloatField()
def __unicode__(self):
return unicode(self.value)
|
from django.db import models
from django.utils import timezone
class Dataset(models.Model):
name = models.CharField(max_length=100, default='')
description = models.CharField(max_length=500, default='')
source = models.FileField(upload_to='data_sources')
created_at = models.DateTimeField('created at', default=timezone.now())
def __unicode__(self):
return self.name
class Label(models.Model):
label = models.CharField(max_length=100, default='unlabeled')
def __unicode__(self):
return self.label
class Instance(models.Model):
dataset = models.ForeignKey(Dataset, related_name='instances')
label = models.ForeignKey(Label, default=0, related_name='instances')
name = models.CharField(max_length=100,
default='unnamed')
def __unicode__(self):
return self.name
class Feature(models.Model):
datasets = models.ManyToManyField(Dataset,
related_name='features')
instances = models.ManyToManyField(
Instance,
null=True,
related_name='features')
name = models.CharField(max_length=100, unique=True)
is_label_name = models.BooleanField(default=False)
def __unicode__(self):
return self.name
class Value(models.Model):
feature = models.ForeignKey(Feature, related_name='values')
instance = models.ForeignKey(Instance, related_name='values')
value = models.FloatField()
def __unicode__(self):
return unicode(self.value)
|
Add is_label_name field to Feature model
|
Add is_label_name field to Feature model
|
Python
|
bsd-3-clause
|
sloria/sepal,sloria/sepal,sloria/sepal,sloria/sepal,sloria/sepal
|
from django.db import models
from django.utils import timezone
class Dataset(models.Model):
name = models.CharField(max_length=100, default='')
description = models.CharField(max_length=500, default='')
source = models.FileField(upload_to='data_sources')
created_at = models.DateTimeField('created at', default=timezone.now())
def __unicode__(self):
return self.name
class Label(models.Model):
label = models.CharField(max_length=100, default='unlabeled')
def __unicode__(self):
return self.label
class Instance(models.Model):
dataset = models.ForeignKey(Dataset, related_name='instances')
label = models.ForeignKey(Label, default=0, related_name='instances')
name = models.CharField(max_length=100,
default='unnamed')
def __unicode__(self):
return self.name
class Feature(models.Model):
datasets = models.ManyToManyField(Dataset,
related_name='features')
instances = models.ManyToManyField(
Instance,
null=True,
related_name='features')
name = models.CharField(max_length=100, unique=True)
def __unicode__(self):
return self.name
class Value(models.Model):
feature = models.ForeignKey(Feature, related_name='values')
instance = models.ForeignKey(Instance, related_name='values')
value = models.FloatField()
def __unicode__(self):
return unicode(self.value)
Add is_label_name field to Feature model
|
from django.db import models
from django.utils import timezone
class Dataset(models.Model):
name = models.CharField(max_length=100, default='')
description = models.CharField(max_length=500, default='')
source = models.FileField(upload_to='data_sources')
created_at = models.DateTimeField('created at', default=timezone.now())
def __unicode__(self):
return self.name
class Label(models.Model):
label = models.CharField(max_length=100, default='unlabeled')
def __unicode__(self):
return self.label
class Instance(models.Model):
dataset = models.ForeignKey(Dataset, related_name='instances')
label = models.ForeignKey(Label, default=0, related_name='instances')
name = models.CharField(max_length=100,
default='unnamed')
def __unicode__(self):
return self.name
class Feature(models.Model):
datasets = models.ManyToManyField(Dataset,
related_name='features')
instances = models.ManyToManyField(
Instance,
null=True,
related_name='features')
name = models.CharField(max_length=100, unique=True)
is_label_name = models.BooleanField(default=False)
def __unicode__(self):
return self.name
class Value(models.Model):
feature = models.ForeignKey(Feature, related_name='values')
instance = models.ForeignKey(Instance, related_name='values')
value = models.FloatField()
def __unicode__(self):
return unicode(self.value)
|
<commit_before>from django.db import models
from django.utils import timezone
class Dataset(models.Model):
name = models.CharField(max_length=100, default='')
description = models.CharField(max_length=500, default='')
source = models.FileField(upload_to='data_sources')
created_at = models.DateTimeField('created at', default=timezone.now())
def __unicode__(self):
return self.name
class Label(models.Model):
label = models.CharField(max_length=100, default='unlabeled')
def __unicode__(self):
return self.label
class Instance(models.Model):
dataset = models.ForeignKey(Dataset, related_name='instances')
label = models.ForeignKey(Label, default=0, related_name='instances')
name = models.CharField(max_length=100,
default='unnamed')
def __unicode__(self):
return self.name
class Feature(models.Model):
datasets = models.ManyToManyField(Dataset,
related_name='features')
instances = models.ManyToManyField(
Instance,
null=True,
related_name='features')
name = models.CharField(max_length=100, unique=True)
def __unicode__(self):
return self.name
class Value(models.Model):
feature = models.ForeignKey(Feature, related_name='values')
instance = models.ForeignKey(Instance, related_name='values')
value = models.FloatField()
def __unicode__(self):
return unicode(self.value)
<commit_msg>Add is_label_name field to Feature model<commit_after>
|
from django.db import models
from django.utils import timezone
class Dataset(models.Model):
name = models.CharField(max_length=100, default='')
description = models.CharField(max_length=500, default='')
source = models.FileField(upload_to='data_sources')
created_at = models.DateTimeField('created at', default=timezone.now())
def __unicode__(self):
return self.name
class Label(models.Model):
label = models.CharField(max_length=100, default='unlabeled')
def __unicode__(self):
return self.label
class Instance(models.Model):
dataset = models.ForeignKey(Dataset, related_name='instances')
label = models.ForeignKey(Label, default=0, related_name='instances')
name = models.CharField(max_length=100,
default='unnamed')
def __unicode__(self):
return self.name
class Feature(models.Model):
datasets = models.ManyToManyField(Dataset,
related_name='features')
instances = models.ManyToManyField(
Instance,
null=True,
related_name='features')
name = models.CharField(max_length=100, unique=True)
is_label_name = models.BooleanField(default=False)
def __unicode__(self):
return self.name
class Value(models.Model):
feature = models.ForeignKey(Feature, related_name='values')
instance = models.ForeignKey(Instance, related_name='values')
value = models.FloatField()
def __unicode__(self):
return unicode(self.value)
|
from django.db import models
from django.utils import timezone
class Dataset(models.Model):
name = models.CharField(max_length=100, default='')
description = models.CharField(max_length=500, default='')
source = models.FileField(upload_to='data_sources')
created_at = models.DateTimeField('created at', default=timezone.now())
def __unicode__(self):
return self.name
class Label(models.Model):
label = models.CharField(max_length=100, default='unlabeled')
def __unicode__(self):
return self.label
class Instance(models.Model):
dataset = models.ForeignKey(Dataset, related_name='instances')
label = models.ForeignKey(Label, default=0, related_name='instances')
name = models.CharField(max_length=100,
default='unnamed')
def __unicode__(self):
return self.name
class Feature(models.Model):
datasets = models.ManyToManyField(Dataset,
related_name='features')
instances = models.ManyToManyField(
Instance,
null=True,
related_name='features')
name = models.CharField(max_length=100, unique=True)
def __unicode__(self):
return self.name
class Value(models.Model):
feature = models.ForeignKey(Feature, related_name='values')
instance = models.ForeignKey(Instance, related_name='values')
value = models.FloatField()
def __unicode__(self):
return unicode(self.value)
Add is_label_name field to Feature modelfrom django.db import models
from django.utils import timezone
class Dataset(models.Model):
name = models.CharField(max_length=100, default='')
description = models.CharField(max_length=500, default='')
source = models.FileField(upload_to='data_sources')
created_at = models.DateTimeField('created at', default=timezone.now())
def __unicode__(self):
return self.name
class Label(models.Model):
label = models.CharField(max_length=100, default='unlabeled')
def __unicode__(self):
return self.label
class Instance(models.Model):
dataset = models.ForeignKey(Dataset, related_name='instances')
label = models.ForeignKey(Label, default=0, related_name='instances')
name = models.CharField(max_length=100,
default='unnamed')
def __unicode__(self):
return self.name
class Feature(models.Model):
datasets = models.ManyToManyField(Dataset,
related_name='features')
instances = models.ManyToManyField(
Instance,
null=True,
related_name='features')
name = models.CharField(max_length=100, unique=True)
is_label_name = models.BooleanField(default=False)
def __unicode__(self):
return self.name
class Value(models.Model):
feature = models.ForeignKey(Feature, related_name='values')
instance = models.ForeignKey(Instance, related_name='values')
value = models.FloatField()
def __unicode__(self):
return unicode(self.value)
|
<commit_before>from django.db import models
from django.utils import timezone
class Dataset(models.Model):
name = models.CharField(max_length=100, default='')
description = models.CharField(max_length=500, default='')
source = models.FileField(upload_to='data_sources')
created_at = models.DateTimeField('created at', default=timezone.now())
def __unicode__(self):
return self.name
class Label(models.Model):
label = models.CharField(max_length=100, default='unlabeled')
def __unicode__(self):
return self.label
class Instance(models.Model):
dataset = models.ForeignKey(Dataset, related_name='instances')
label = models.ForeignKey(Label, default=0, related_name='instances')
name = models.CharField(max_length=100,
default='unnamed')
def __unicode__(self):
return self.name
class Feature(models.Model):
datasets = models.ManyToManyField(Dataset,
related_name='features')
instances = models.ManyToManyField(
Instance,
null=True,
related_name='features')
name = models.CharField(max_length=100, unique=True)
def __unicode__(self):
return self.name
class Value(models.Model):
feature = models.ForeignKey(Feature, related_name='values')
instance = models.ForeignKey(Instance, related_name='values')
value = models.FloatField()
def __unicode__(self):
return unicode(self.value)
<commit_msg>Add is_label_name field to Feature model<commit_after>from django.db import models
from django.utils import timezone
class Dataset(models.Model):
name = models.CharField(max_length=100, default='')
description = models.CharField(max_length=500, default='')
source = models.FileField(upload_to='data_sources')
created_at = models.DateTimeField('created at', default=timezone.now())
def __unicode__(self):
return self.name
class Label(models.Model):
label = models.CharField(max_length=100, default='unlabeled')
def __unicode__(self):
return self.label
class Instance(models.Model):
dataset = models.ForeignKey(Dataset, related_name='instances')
label = models.ForeignKey(Label, default=0, related_name='instances')
name = models.CharField(max_length=100,
default='unnamed')
def __unicode__(self):
return self.name
class Feature(models.Model):
datasets = models.ManyToManyField(Dataset,
related_name='features')
instances = models.ManyToManyField(
Instance,
null=True,
related_name='features')
name = models.CharField(max_length=100, unique=True)
is_label_name = models.BooleanField(default=False)
def __unicode__(self):
return self.name
class Value(models.Model):
feature = models.ForeignKey(Feature, related_name='values')
instance = models.ForeignKey(Instance, related_name='values')
value = models.FloatField()
def __unicode__(self):
return unicode(self.value)
|
26833c5d41bb3611aa61655c28da4d40b173712e
|
Orange/tests/test_preprocess.py
|
Orange/tests/test_preprocess.py
|
import unittest
from mock import Mock, MagicMock, patch
import Orange
class TestPreprocess(unittest.TestCase):
def test_read_data_calls_reader(self):
class MockPreprocessor(Orange.preprocess.preprocess.Preprocess):
__init__ = Mock()
__call__ = Mock()
@classmethod
def reset(cls):
cls.__init__.reset_mock()
cls.__call__.reset_mock()
return MockPreprocessor
table = Mock(Orange.data.Table)
MockPreprocessor(table, 1, 2, a=3)
MockPreprocessor.__init__.assert_called_with(1, 2, {'a': 3})
MockPreprocessor.__call__.assert_called_with(table)
MockPreprocessor.reset()
MockPreprocessor = create_mock()
MockPreprocessor(1, 2, a=3)
MockPreprocessor.__init__.assert_called_with(1, 2, a=3)
self.assertEqual(MockPreprocessor.__call__.call_count(), 0)
MockPreprocessor = create_mock()
MockPreprocessor(a=3)
MockPreprocessor.__init__.assert_called_with(a=3)
self.assertEqual(MockPreprocessor.__call__.call_count(), 0)
MockPreprocessor = create_mock()
MockPreprocessor()
MockPreprocessor.__init__.assert_called_with()
self.assertEqual(MockPreprocessor.__call__.call_count(), 0)
|
import unittest
from mock import Mock, MagicMock, patch
import Orange
class TestPreprocess(unittest.TestCase):
def test_read_data_calls_reader(self):
class MockPreprocessor(Orange.preprocess.preprocess.Preprocess):
__init__ = Mock(return_value=None)
__call__ = Mock()
@classmethod
def reset(cls):
cls.__init__.reset_mock()
cls.__call__.reset_mock()
table = Mock(Orange.data.Table)
MockPreprocessor(table, 1, 2, a=3)
MockPreprocessor.__init__.assert_called_with(1, 2, a=3)
MockPreprocessor.__call__.assert_called_with(table)
MockPreprocessor.reset()
MockPreprocessor(1, 2, a=3)
MockPreprocessor.__init__.assert_called_with(1, 2, a=3)
self.assertEqual(MockPreprocessor.__call__.call_count, 0)
MockPreprocessor(a=3)
MockPreprocessor.__init__.assert_called_with(a=3)
self.assertEqual(MockPreprocessor.__call__.call_count, 0)
MockPreprocessor()
MockPreprocessor.__init__.assert_called_with()
self.assertEqual(MockPreprocessor.__call__.call_count, 0)
|
Fix tests for Preprocess constructors
|
Fix tests for Preprocess constructors
|
Python
|
bsd-2-clause
|
qusp/orange3,qusp/orange3,qPCR4vir/orange3,qusp/orange3,qPCR4vir/orange3,kwikadi/orange3,qPCR4vir/orange3,qPCR4vir/orange3,marinkaz/orange3,marinkaz/orange3,cheral/orange3,qPCR4vir/orange3,qPCR4vir/orange3,kwikadi/orange3,cheral/orange3,kwikadi/orange3,marinkaz/orange3,qusp/orange3,kwikadi/orange3,cheral/orange3,marinkaz/orange3,cheral/orange3,cheral/orange3,kwikadi/orange3,marinkaz/orange3,kwikadi/orange3,cheral/orange3,marinkaz/orange3
|
import unittest
from mock import Mock, MagicMock, patch
import Orange
class TestPreprocess(unittest.TestCase):
def test_read_data_calls_reader(self):
class MockPreprocessor(Orange.preprocess.preprocess.Preprocess):
__init__ = Mock()
__call__ = Mock()
@classmethod
def reset(cls):
cls.__init__.reset_mock()
cls.__call__.reset_mock()
return MockPreprocessor
table = Mock(Orange.data.Table)
MockPreprocessor(table, 1, 2, a=3)
MockPreprocessor.__init__.assert_called_with(1, 2, {'a': 3})
MockPreprocessor.__call__.assert_called_with(table)
MockPreprocessor.reset()
MockPreprocessor = create_mock()
MockPreprocessor(1, 2, a=3)
MockPreprocessor.__init__.assert_called_with(1, 2, a=3)
self.assertEqual(MockPreprocessor.__call__.call_count(), 0)
MockPreprocessor = create_mock()
MockPreprocessor(a=3)
MockPreprocessor.__init__.assert_called_with(a=3)
self.assertEqual(MockPreprocessor.__call__.call_count(), 0)
MockPreprocessor = create_mock()
MockPreprocessor()
MockPreprocessor.__init__.assert_called_with()
self.assertEqual(MockPreprocessor.__call__.call_count(), 0)
Fix tests for Preprocess constructors
|
import unittest
from mock import Mock, MagicMock, patch
import Orange
class TestPreprocess(unittest.TestCase):
def test_read_data_calls_reader(self):
class MockPreprocessor(Orange.preprocess.preprocess.Preprocess):
__init__ = Mock(return_value=None)
__call__ = Mock()
@classmethod
def reset(cls):
cls.__init__.reset_mock()
cls.__call__.reset_mock()
table = Mock(Orange.data.Table)
MockPreprocessor(table, 1, 2, a=3)
MockPreprocessor.__init__.assert_called_with(1, 2, a=3)
MockPreprocessor.__call__.assert_called_with(table)
MockPreprocessor.reset()
MockPreprocessor(1, 2, a=3)
MockPreprocessor.__init__.assert_called_with(1, 2, a=3)
self.assertEqual(MockPreprocessor.__call__.call_count, 0)
MockPreprocessor(a=3)
MockPreprocessor.__init__.assert_called_with(a=3)
self.assertEqual(MockPreprocessor.__call__.call_count, 0)
MockPreprocessor()
MockPreprocessor.__init__.assert_called_with()
self.assertEqual(MockPreprocessor.__call__.call_count, 0)
|
<commit_before>import unittest
from mock import Mock, MagicMock, patch
import Orange
class TestPreprocess(unittest.TestCase):
def test_read_data_calls_reader(self):
class MockPreprocessor(Orange.preprocess.preprocess.Preprocess):
__init__ = Mock()
__call__ = Mock()
@classmethod
def reset(cls):
cls.__init__.reset_mock()
cls.__call__.reset_mock()
return MockPreprocessor
table = Mock(Orange.data.Table)
MockPreprocessor(table, 1, 2, a=3)
MockPreprocessor.__init__.assert_called_with(1, 2, {'a': 3})
MockPreprocessor.__call__.assert_called_with(table)
MockPreprocessor.reset()
MockPreprocessor = create_mock()
MockPreprocessor(1, 2, a=3)
MockPreprocessor.__init__.assert_called_with(1, 2, a=3)
self.assertEqual(MockPreprocessor.__call__.call_count(), 0)
MockPreprocessor = create_mock()
MockPreprocessor(a=3)
MockPreprocessor.__init__.assert_called_with(a=3)
self.assertEqual(MockPreprocessor.__call__.call_count(), 0)
MockPreprocessor = create_mock()
MockPreprocessor()
MockPreprocessor.__init__.assert_called_with()
self.assertEqual(MockPreprocessor.__call__.call_count(), 0)
<commit_msg>Fix tests for Preprocess constructors<commit_after>
|
import unittest
from mock import Mock, MagicMock, patch
import Orange
class TestPreprocess(unittest.TestCase):
def test_read_data_calls_reader(self):
class MockPreprocessor(Orange.preprocess.preprocess.Preprocess):
__init__ = Mock(return_value=None)
__call__ = Mock()
@classmethod
def reset(cls):
cls.__init__.reset_mock()
cls.__call__.reset_mock()
table = Mock(Orange.data.Table)
MockPreprocessor(table, 1, 2, a=3)
MockPreprocessor.__init__.assert_called_with(1, 2, a=3)
MockPreprocessor.__call__.assert_called_with(table)
MockPreprocessor.reset()
MockPreprocessor(1, 2, a=3)
MockPreprocessor.__init__.assert_called_with(1, 2, a=3)
self.assertEqual(MockPreprocessor.__call__.call_count, 0)
MockPreprocessor(a=3)
MockPreprocessor.__init__.assert_called_with(a=3)
self.assertEqual(MockPreprocessor.__call__.call_count, 0)
MockPreprocessor()
MockPreprocessor.__init__.assert_called_with()
self.assertEqual(MockPreprocessor.__call__.call_count, 0)
|
import unittest
from mock import Mock, MagicMock, patch
import Orange
class TestPreprocess(unittest.TestCase):
def test_read_data_calls_reader(self):
class MockPreprocessor(Orange.preprocess.preprocess.Preprocess):
__init__ = Mock()
__call__ = Mock()
@classmethod
def reset(cls):
cls.__init__.reset_mock()
cls.__call__.reset_mock()
return MockPreprocessor
table = Mock(Orange.data.Table)
MockPreprocessor(table, 1, 2, a=3)
MockPreprocessor.__init__.assert_called_with(1, 2, {'a': 3})
MockPreprocessor.__call__.assert_called_with(table)
MockPreprocessor.reset()
MockPreprocessor = create_mock()
MockPreprocessor(1, 2, a=3)
MockPreprocessor.__init__.assert_called_with(1, 2, a=3)
self.assertEqual(MockPreprocessor.__call__.call_count(), 0)
MockPreprocessor = create_mock()
MockPreprocessor(a=3)
MockPreprocessor.__init__.assert_called_with(a=3)
self.assertEqual(MockPreprocessor.__call__.call_count(), 0)
MockPreprocessor = create_mock()
MockPreprocessor()
MockPreprocessor.__init__.assert_called_with()
self.assertEqual(MockPreprocessor.__call__.call_count(), 0)
Fix tests for Preprocess constructorsimport unittest
from mock import Mock, MagicMock, patch
import Orange
class TestPreprocess(unittest.TestCase):
def test_read_data_calls_reader(self):
class MockPreprocessor(Orange.preprocess.preprocess.Preprocess):
__init__ = Mock(return_value=None)
__call__ = Mock()
@classmethod
def reset(cls):
cls.__init__.reset_mock()
cls.__call__.reset_mock()
table = Mock(Orange.data.Table)
MockPreprocessor(table, 1, 2, a=3)
MockPreprocessor.__init__.assert_called_with(1, 2, a=3)
MockPreprocessor.__call__.assert_called_with(table)
MockPreprocessor.reset()
MockPreprocessor(1, 2, a=3)
MockPreprocessor.__init__.assert_called_with(1, 2, a=3)
self.assertEqual(MockPreprocessor.__call__.call_count, 0)
MockPreprocessor(a=3)
MockPreprocessor.__init__.assert_called_with(a=3)
self.assertEqual(MockPreprocessor.__call__.call_count, 0)
MockPreprocessor()
MockPreprocessor.__init__.assert_called_with()
self.assertEqual(MockPreprocessor.__call__.call_count, 0)
|
<commit_before>import unittest
from mock import Mock, MagicMock, patch
import Orange
class TestPreprocess(unittest.TestCase):
def test_read_data_calls_reader(self):
class MockPreprocessor(Orange.preprocess.preprocess.Preprocess):
__init__ = Mock()
__call__ = Mock()
@classmethod
def reset(cls):
cls.__init__.reset_mock()
cls.__call__.reset_mock()
return MockPreprocessor
table = Mock(Orange.data.Table)
MockPreprocessor(table, 1, 2, a=3)
MockPreprocessor.__init__.assert_called_with(1, 2, {'a': 3})
MockPreprocessor.__call__.assert_called_with(table)
MockPreprocessor.reset()
MockPreprocessor = create_mock()
MockPreprocessor(1, 2, a=3)
MockPreprocessor.__init__.assert_called_with(1, 2, a=3)
self.assertEqual(MockPreprocessor.__call__.call_count(), 0)
MockPreprocessor = create_mock()
MockPreprocessor(a=3)
MockPreprocessor.__init__.assert_called_with(a=3)
self.assertEqual(MockPreprocessor.__call__.call_count(), 0)
MockPreprocessor = create_mock()
MockPreprocessor()
MockPreprocessor.__init__.assert_called_with()
self.assertEqual(MockPreprocessor.__call__.call_count(), 0)
<commit_msg>Fix tests for Preprocess constructors<commit_after>import unittest
from mock import Mock, MagicMock, patch
import Orange
class TestPreprocess(unittest.TestCase):
def test_read_data_calls_reader(self):
class MockPreprocessor(Orange.preprocess.preprocess.Preprocess):
__init__ = Mock(return_value=None)
__call__ = Mock()
@classmethod
def reset(cls):
cls.__init__.reset_mock()
cls.__call__.reset_mock()
table = Mock(Orange.data.Table)
MockPreprocessor(table, 1, 2, a=3)
MockPreprocessor.__init__.assert_called_with(1, 2, a=3)
MockPreprocessor.__call__.assert_called_with(table)
MockPreprocessor.reset()
MockPreprocessor(1, 2, a=3)
MockPreprocessor.__init__.assert_called_with(1, 2, a=3)
self.assertEqual(MockPreprocessor.__call__.call_count, 0)
MockPreprocessor(a=3)
MockPreprocessor.__init__.assert_called_with(a=3)
self.assertEqual(MockPreprocessor.__call__.call_count, 0)
MockPreprocessor()
MockPreprocessor.__init__.assert_called_with()
self.assertEqual(MockPreprocessor.__call__.call_count, 0)
|
23a8943d2e3688753371b08c490aaae2052eb356
|
ckanext/mapactionevent/logic/action/create.py
|
ckanext/mapactionevent/logic/action/create.py
|
import ckan.logic as logic
import ckan.plugins.toolkit as toolkit
def event_create(context, data_dict):
""" Creates a 'event' type group with a custom unique identifier for the
event """
if data_dict.get('name'):
name = data_dict.get('name')
else:
# Generate a new operation ID
existing_events = toolkit.get_action('group_list')(
context,
{'type': 'event', 'sort': 'name desc'})
if len(existing_events) == 0:
event_code = 1
else:
event_code = int(existing_events[0]) + 1
name = str(event_code).zfill(5)
data_dict.update({
'name': name,
'type':'event'
})
try:
foo = toolkit.get_action('group_create')(
context,
data_dict=data_dict)
except (logic.NotFound) as e:
raise toolkit.ValidationError("foo %s" % e)
return foo
|
import ckan.logic as logic
import ckan.plugins.toolkit as toolkit
def event_create(context, data_dict):
""" Creates a 'event' type group with a custom unique identifier for the
event """
if data_dict.get('name'):
name = data_dict.get('name')
else:
# Generate a new operation ID
existing_events = toolkit.get_action('group_list')(
context,
{'type': 'event', 'sort': 'name desc'})
event_code = 1 #default value, if there are no existing numericly named events
for event in existing_events:
if event.isdigit():
event_code = int(event) + 1
break
name = str(event_code).zfill(5)
data_dict.update({
'name': name,
'type':'event'
})
try:
foo = toolkit.get_action('group_create')(
context,
data_dict=data_dict)
except (logic.NotFound) as e:
raise toolkit.ValidationError("foo %s" % e)
return foo
|
Make auto-incrementing event names work with a mixture of numeric and non-numeric event names
|
Make auto-incrementing event names work with a mixture of numeric and non-numeric event names
|
Python
|
agpl-3.0
|
aptivate/ckanext-mapactionevent,aptivate/ckanext-mapactionevent,aptivate/ckanext-mapactionevent
|
import ckan.logic as logic
import ckan.plugins.toolkit as toolkit
def event_create(context, data_dict):
""" Creates a 'event' type group with a custom unique identifier for the
event """
if data_dict.get('name'):
name = data_dict.get('name')
else:
# Generate a new operation ID
existing_events = toolkit.get_action('group_list')(
context,
{'type': 'event', 'sort': 'name desc'})
if len(existing_events) == 0:
event_code = 1
else:
event_code = int(existing_events[0]) + 1
name = str(event_code).zfill(5)
data_dict.update({
'name': name,
'type':'event'
})
try:
foo = toolkit.get_action('group_create')(
context,
data_dict=data_dict)
except (logic.NotFound) as e:
raise toolkit.ValidationError("foo %s" % e)
return foo
Make auto-incrementing event names work with a mixture of numeric and non-numeric event names
|
import ckan.logic as logic
import ckan.plugins.toolkit as toolkit
def event_create(context, data_dict):
""" Creates a 'event' type group with a custom unique identifier for the
event """
if data_dict.get('name'):
name = data_dict.get('name')
else:
# Generate a new operation ID
existing_events = toolkit.get_action('group_list')(
context,
{'type': 'event', 'sort': 'name desc'})
event_code = 1 #default value, if there are no existing numericly named events
for event in existing_events:
if event.isdigit():
event_code = int(event) + 1
break
name = str(event_code).zfill(5)
data_dict.update({
'name': name,
'type':'event'
})
try:
foo = toolkit.get_action('group_create')(
context,
data_dict=data_dict)
except (logic.NotFound) as e:
raise toolkit.ValidationError("foo %s" % e)
return foo
|
<commit_before>import ckan.logic as logic
import ckan.plugins.toolkit as toolkit
def event_create(context, data_dict):
""" Creates a 'event' type group with a custom unique identifier for the
event """
if data_dict.get('name'):
name = data_dict.get('name')
else:
# Generate a new operation ID
existing_events = toolkit.get_action('group_list')(
context,
{'type': 'event', 'sort': 'name desc'})
if len(existing_events) == 0:
event_code = 1
else:
event_code = int(existing_events[0]) + 1
name = str(event_code).zfill(5)
data_dict.update({
'name': name,
'type':'event'
})
try:
foo = toolkit.get_action('group_create')(
context,
data_dict=data_dict)
except (logic.NotFound) as e:
raise toolkit.ValidationError("foo %s" % e)
return foo
<commit_msg>Make auto-incrementing event names work with a mixture of numeric and non-numeric event names<commit_after>
|
import ckan.logic as logic
import ckan.plugins.toolkit as toolkit
def event_create(context, data_dict):
""" Creates a 'event' type group with a custom unique identifier for the
event """
if data_dict.get('name'):
name = data_dict.get('name')
else:
# Generate a new operation ID
existing_events = toolkit.get_action('group_list')(
context,
{'type': 'event', 'sort': 'name desc'})
event_code = 1 #default value, if there are no existing numericly named events
for event in existing_events:
if event.isdigit():
event_code = int(event) + 1
break
name = str(event_code).zfill(5)
data_dict.update({
'name': name,
'type':'event'
})
try:
foo = toolkit.get_action('group_create')(
context,
data_dict=data_dict)
except (logic.NotFound) as e:
raise toolkit.ValidationError("foo %s" % e)
return foo
|
import ckan.logic as logic
import ckan.plugins.toolkit as toolkit
def event_create(context, data_dict):
""" Creates a 'event' type group with a custom unique identifier for the
event """
if data_dict.get('name'):
name = data_dict.get('name')
else:
# Generate a new operation ID
existing_events = toolkit.get_action('group_list')(
context,
{'type': 'event', 'sort': 'name desc'})
if len(existing_events) == 0:
event_code = 1
else:
event_code = int(existing_events[0]) + 1
name = str(event_code).zfill(5)
data_dict.update({
'name': name,
'type':'event'
})
try:
foo = toolkit.get_action('group_create')(
context,
data_dict=data_dict)
except (logic.NotFound) as e:
raise toolkit.ValidationError("foo %s" % e)
return foo
Make auto-incrementing event names work with a mixture of numeric and non-numeric event namesimport ckan.logic as logic
import ckan.plugins.toolkit as toolkit
def event_create(context, data_dict):
""" Creates a 'event' type group with a custom unique identifier for the
event """
if data_dict.get('name'):
name = data_dict.get('name')
else:
# Generate a new operation ID
existing_events = toolkit.get_action('group_list')(
context,
{'type': 'event', 'sort': 'name desc'})
event_code = 1 #default value, if there are no existing numericly named events
for event in existing_events:
if event.isdigit():
event_code = int(event) + 1
break
name = str(event_code).zfill(5)
data_dict.update({
'name': name,
'type':'event'
})
try:
foo = toolkit.get_action('group_create')(
context,
data_dict=data_dict)
except (logic.NotFound) as e:
raise toolkit.ValidationError("foo %s" % e)
return foo
|
<commit_before>import ckan.logic as logic
import ckan.plugins.toolkit as toolkit
def event_create(context, data_dict):
""" Creates a 'event' type group with a custom unique identifier for the
event """
if data_dict.get('name'):
name = data_dict.get('name')
else:
# Generate a new operation ID
existing_events = toolkit.get_action('group_list')(
context,
{'type': 'event', 'sort': 'name desc'})
if len(existing_events) == 0:
event_code = 1
else:
event_code = int(existing_events[0]) + 1
name = str(event_code).zfill(5)
data_dict.update({
'name': name,
'type':'event'
})
try:
foo = toolkit.get_action('group_create')(
context,
data_dict=data_dict)
except (logic.NotFound) as e:
raise toolkit.ValidationError("foo %s" % e)
return foo
<commit_msg>Make auto-incrementing event names work with a mixture of numeric and non-numeric event names<commit_after>import ckan.logic as logic
import ckan.plugins.toolkit as toolkit
def event_create(context, data_dict):
""" Creates a 'event' type group with a custom unique identifier for the
event """
if data_dict.get('name'):
name = data_dict.get('name')
else:
# Generate a new operation ID
existing_events = toolkit.get_action('group_list')(
context,
{'type': 'event', 'sort': 'name desc'})
event_code = 1 #default value, if there are no existing numericly named events
for event in existing_events:
if event.isdigit():
event_code = int(event) + 1
break
name = str(event_code).zfill(5)
data_dict.update({
'name': name,
'type':'event'
})
try:
foo = toolkit.get_action('group_create')(
context,
data_dict=data_dict)
except (logic.NotFound) as e:
raise toolkit.ValidationError("foo %s" % e)
return foo
|
f306f78304d657a163b2a03284c83afc09271e2b
|
populous/cli.py
|
populous/cli.py
|
import click
@click.group()
@click.version_option()
def cli():
pass
|
import click
from .loader import load_yaml
from .blueprint import Blueprint
@click.group()
@click.version_option()
def cli():
pass
@cli.command()
@click.argument('files', nargs=-1)
def predict(files):
"""
Predict how many objects will be created if the given files are used.
"""
blueprint = Blueprint.from_description(load_yaml(*files))
for name, item in blueprint.items.items():
click.echo("{name}: {count} {by}".format(
name=name, count=item.count.number,
by="by {}".format(item.count.by) if item.count.by else ""
))
|
Add a naive implementation of the predict command
|
Add a naive implementation of the predict command
|
Python
|
mit
|
novafloss/populous
|
import click
@click.group()
@click.version_option()
def cli():
pass
Add a naive implementation of the predict command
|
import click
from .loader import load_yaml
from .blueprint import Blueprint
@click.group()
@click.version_option()
def cli():
pass
@cli.command()
@click.argument('files', nargs=-1)
def predict(files):
"""
Predict how many objects will be created if the given files are used.
"""
blueprint = Blueprint.from_description(load_yaml(*files))
for name, item in blueprint.items.items():
click.echo("{name}: {count} {by}".format(
name=name, count=item.count.number,
by="by {}".format(item.count.by) if item.count.by else ""
))
|
<commit_before>import click
@click.group()
@click.version_option()
def cli():
pass
<commit_msg>Add a naive implementation of the predict command<commit_after>
|
import click
from .loader import load_yaml
from .blueprint import Blueprint
@click.group()
@click.version_option()
def cli():
pass
@cli.command()
@click.argument('files', nargs=-1)
def predict(files):
"""
Predict how many objects will be created if the given files are used.
"""
blueprint = Blueprint.from_description(load_yaml(*files))
for name, item in blueprint.items.items():
click.echo("{name}: {count} {by}".format(
name=name, count=item.count.number,
by="by {}".format(item.count.by) if item.count.by else ""
))
|
import click
@click.group()
@click.version_option()
def cli():
pass
Add a naive implementation of the predict commandimport click
from .loader import load_yaml
from .blueprint import Blueprint
@click.group()
@click.version_option()
def cli():
pass
@cli.command()
@click.argument('files', nargs=-1)
def predict(files):
"""
Predict how many objects will be created if the given files are used.
"""
blueprint = Blueprint.from_description(load_yaml(*files))
for name, item in blueprint.items.items():
click.echo("{name}: {count} {by}".format(
name=name, count=item.count.number,
by="by {}".format(item.count.by) if item.count.by else ""
))
|
<commit_before>import click
@click.group()
@click.version_option()
def cli():
pass
<commit_msg>Add a naive implementation of the predict command<commit_after>import click
from .loader import load_yaml
from .blueprint import Blueprint
@click.group()
@click.version_option()
def cli():
pass
@cli.command()
@click.argument('files', nargs=-1)
def predict(files):
"""
Predict how many objects will be created if the given files are used.
"""
blueprint = Blueprint.from_description(load_yaml(*files))
for name, item in blueprint.items.items():
click.echo("{name}: {count} {by}".format(
name=name, count=item.count.number,
by="by {}".format(item.count.by) if item.count.by else ""
))
|
0ac8062ea2c16edcc5d81c14976413a3ddde43b6
|
clowder_test/clowder_test/clowder_test_app.py
|
clowder_test/clowder_test/clowder_test_app.py
|
# -*- coding: utf-8 -*-
"""Clowder test command line app
.. codeauthor:: Joe Decapo <joe@polka.cat>
"""
from __future__ import print_function
import os
import colorama
from cement.core.foundation import CementApp
from clowder_test import ROOT_DIR
from clowder_test.cli.base_controller import BaseController
from clowder_test.cli.cats_controller import CatsController
from clowder_test.cli.cocos2d_controller import Cocos2dController
from clowder_test.cli.llvm_controller import LLVMController
from clowder_test.cli.swift_controller import SwiftController
from clowder_test.execute import execute_command
class ClowderApp(CementApp):
"""Clowder command CLI app"""
class Meta:
"""Clowder command CLI Meta configuration"""
label = 'clowder'
extensions = ['argcomplete']
base_controller = 'base'
handlers = [
BaseController,
CatsController,
Cocos2dController,
LLVMController,
SwiftController
]
def main():
"""Clowder command CLI main function"""
print()
execute_command('./setup_local_test_directory.sh', os.path.join(ROOT_DIR, 'test', 'scripts'))
with ClowderApp() as app:
app.run()
if __name__ == '__main__':
colorama.init()
main()
|
# -*- coding: utf-8 -*-
"""Clowder test command line app
.. codeauthor:: Joe Decapo <joe@polka.cat>
"""
from __future__ import print_function
import os
import colorama
from cement.core.foundation import CementApp
from clowder_test import ROOT_DIR
from clowder_test.cli.base_controller import BaseController
from clowder_test.cli.cats_controller import CatsController
from clowder_test.cli.cocos2d_controller import Cocos2dController
from clowder_test.cli.llvm_controller import LLVMController
from clowder_test.cli.swift_controller import SwiftController
from clowder_test.execute import execute_command
def post_argument_parsing_hook(app):
execute_command('./setup_local_test_directory.sh', os.path.join(ROOT_DIR, 'test', 'scripts'))
class ClowderApp(CementApp):
"""Clowder command CLI app"""
class Meta:
"""Clowder command CLI Meta configuration"""
label = 'clowder'
extensions = ['argcomplete']
base_controller = 'base'
hooks = [
('post_argument_parsing', post_argument_parsing_hook)
]
handlers = [
BaseController,
CatsController,
Cocos2dController,
LLVMController,
SwiftController
]
def main():
"""Clowder command CLI main function"""
print()
with ClowderApp() as app:
app.run()
if __name__ == '__main__':
colorama.init()
main()
|
Move clowder_test directory setup to post arg parse hook
|
Move clowder_test directory setup to post arg parse hook
|
Python
|
mit
|
JrGoodle/clowder,JrGoodle/clowder,JrGoodle/clowder
|
# -*- coding: utf-8 -*-
"""Clowder test command line app
.. codeauthor:: Joe Decapo <joe@polka.cat>
"""
from __future__ import print_function
import os
import colorama
from cement.core.foundation import CementApp
from clowder_test import ROOT_DIR
from clowder_test.cli.base_controller import BaseController
from clowder_test.cli.cats_controller import CatsController
from clowder_test.cli.cocos2d_controller import Cocos2dController
from clowder_test.cli.llvm_controller import LLVMController
from clowder_test.cli.swift_controller import SwiftController
from clowder_test.execute import execute_command
class ClowderApp(CementApp):
"""Clowder command CLI app"""
class Meta:
"""Clowder command CLI Meta configuration"""
label = 'clowder'
extensions = ['argcomplete']
base_controller = 'base'
handlers = [
BaseController,
CatsController,
Cocos2dController,
LLVMController,
SwiftController
]
def main():
"""Clowder command CLI main function"""
print()
execute_command('./setup_local_test_directory.sh', os.path.join(ROOT_DIR, 'test', 'scripts'))
with ClowderApp() as app:
app.run()
if __name__ == '__main__':
colorama.init()
main()
Move clowder_test directory setup to post arg parse hook
|
# -*- coding: utf-8 -*-
"""Clowder test command line app
.. codeauthor:: Joe Decapo <joe@polka.cat>
"""
from __future__ import print_function
import os
import colorama
from cement.core.foundation import CementApp
from clowder_test import ROOT_DIR
from clowder_test.cli.base_controller import BaseController
from clowder_test.cli.cats_controller import CatsController
from clowder_test.cli.cocos2d_controller import Cocos2dController
from clowder_test.cli.llvm_controller import LLVMController
from clowder_test.cli.swift_controller import SwiftController
from clowder_test.execute import execute_command
def post_argument_parsing_hook(app):
execute_command('./setup_local_test_directory.sh', os.path.join(ROOT_DIR, 'test', 'scripts'))
class ClowderApp(CementApp):
"""Clowder command CLI app"""
class Meta:
"""Clowder command CLI Meta configuration"""
label = 'clowder'
extensions = ['argcomplete']
base_controller = 'base'
hooks = [
('post_argument_parsing', post_argument_parsing_hook)
]
handlers = [
BaseController,
CatsController,
Cocos2dController,
LLVMController,
SwiftController
]
def main():
"""Clowder command CLI main function"""
print()
with ClowderApp() as app:
app.run()
if __name__ == '__main__':
colorama.init()
main()
|
<commit_before># -*- coding: utf-8 -*-
"""Clowder test command line app
.. codeauthor:: Joe Decapo <joe@polka.cat>
"""
from __future__ import print_function
import os
import colorama
from cement.core.foundation import CementApp
from clowder_test import ROOT_DIR
from clowder_test.cli.base_controller import BaseController
from clowder_test.cli.cats_controller import CatsController
from clowder_test.cli.cocos2d_controller import Cocos2dController
from clowder_test.cli.llvm_controller import LLVMController
from clowder_test.cli.swift_controller import SwiftController
from clowder_test.execute import execute_command
class ClowderApp(CementApp):
"""Clowder command CLI app"""
class Meta:
"""Clowder command CLI Meta configuration"""
label = 'clowder'
extensions = ['argcomplete']
base_controller = 'base'
handlers = [
BaseController,
CatsController,
Cocos2dController,
LLVMController,
SwiftController
]
def main():
"""Clowder command CLI main function"""
print()
execute_command('./setup_local_test_directory.sh', os.path.join(ROOT_DIR, 'test', 'scripts'))
with ClowderApp() as app:
app.run()
if __name__ == '__main__':
colorama.init()
main()
<commit_msg>Move clowder_test directory setup to post arg parse hook<commit_after>
|
# -*- coding: utf-8 -*-
"""Clowder test command line app
.. codeauthor:: Joe Decapo <joe@polka.cat>
"""
from __future__ import print_function
import os
import colorama
from cement.core.foundation import CementApp
from clowder_test import ROOT_DIR
from clowder_test.cli.base_controller import BaseController
from clowder_test.cli.cats_controller import CatsController
from clowder_test.cli.cocos2d_controller import Cocos2dController
from clowder_test.cli.llvm_controller import LLVMController
from clowder_test.cli.swift_controller import SwiftController
from clowder_test.execute import execute_command
def post_argument_parsing_hook(app):
execute_command('./setup_local_test_directory.sh', os.path.join(ROOT_DIR, 'test', 'scripts'))
class ClowderApp(CementApp):
"""Clowder command CLI app"""
class Meta:
"""Clowder command CLI Meta configuration"""
label = 'clowder'
extensions = ['argcomplete']
base_controller = 'base'
hooks = [
('post_argument_parsing', post_argument_parsing_hook)
]
handlers = [
BaseController,
CatsController,
Cocos2dController,
LLVMController,
SwiftController
]
def main():
"""Clowder command CLI main function"""
print()
with ClowderApp() as app:
app.run()
if __name__ == '__main__':
colorama.init()
main()
|
# -*- coding: utf-8 -*-
"""Clowder test command line app
.. codeauthor:: Joe Decapo <joe@polka.cat>
"""
from __future__ import print_function
import os
import colorama
from cement.core.foundation import CementApp
from clowder_test import ROOT_DIR
from clowder_test.cli.base_controller import BaseController
from clowder_test.cli.cats_controller import CatsController
from clowder_test.cli.cocos2d_controller import Cocos2dController
from clowder_test.cli.llvm_controller import LLVMController
from clowder_test.cli.swift_controller import SwiftController
from clowder_test.execute import execute_command
class ClowderApp(CementApp):
"""Clowder command CLI app"""
class Meta:
"""Clowder command CLI Meta configuration"""
label = 'clowder'
extensions = ['argcomplete']
base_controller = 'base'
handlers = [
BaseController,
CatsController,
Cocos2dController,
LLVMController,
SwiftController
]
def main():
"""Clowder command CLI main function"""
print()
execute_command('./setup_local_test_directory.sh', os.path.join(ROOT_DIR, 'test', 'scripts'))
with ClowderApp() as app:
app.run()
if __name__ == '__main__':
colorama.init()
main()
Move clowder_test directory setup to post arg parse hook# -*- coding: utf-8 -*-
"""Clowder test command line app
.. codeauthor:: Joe Decapo <joe@polka.cat>
"""
from __future__ import print_function
import os
import colorama
from cement.core.foundation import CementApp
from clowder_test import ROOT_DIR
from clowder_test.cli.base_controller import BaseController
from clowder_test.cli.cats_controller import CatsController
from clowder_test.cli.cocos2d_controller import Cocos2dController
from clowder_test.cli.llvm_controller import LLVMController
from clowder_test.cli.swift_controller import SwiftController
from clowder_test.execute import execute_command
def post_argument_parsing_hook(app):
execute_command('./setup_local_test_directory.sh', os.path.join(ROOT_DIR, 'test', 'scripts'))
class ClowderApp(CementApp):
"""Clowder command CLI app"""
class Meta:
"""Clowder command CLI Meta configuration"""
label = 'clowder'
extensions = ['argcomplete']
base_controller = 'base'
hooks = [
('post_argument_parsing', post_argument_parsing_hook)
]
handlers = [
BaseController,
CatsController,
Cocos2dController,
LLVMController,
SwiftController
]
def main():
"""Clowder command CLI main function"""
print()
with ClowderApp() as app:
app.run()
if __name__ == '__main__':
colorama.init()
main()
|
<commit_before># -*- coding: utf-8 -*-
"""Clowder test command line app
.. codeauthor:: Joe Decapo <joe@polka.cat>
"""
from __future__ import print_function
import os
import colorama
from cement.core.foundation import CementApp
from clowder_test import ROOT_DIR
from clowder_test.cli.base_controller import BaseController
from clowder_test.cli.cats_controller import CatsController
from clowder_test.cli.cocos2d_controller import Cocos2dController
from clowder_test.cli.llvm_controller import LLVMController
from clowder_test.cli.swift_controller import SwiftController
from clowder_test.execute import execute_command
class ClowderApp(CementApp):
"""Clowder command CLI app"""
class Meta:
"""Clowder command CLI Meta configuration"""
label = 'clowder'
extensions = ['argcomplete']
base_controller = 'base'
handlers = [
BaseController,
CatsController,
Cocos2dController,
LLVMController,
SwiftController
]
def main():
"""Clowder command CLI main function"""
print()
execute_command('./setup_local_test_directory.sh', os.path.join(ROOT_DIR, 'test', 'scripts'))
with ClowderApp() as app:
app.run()
if __name__ == '__main__':
colorama.init()
main()
<commit_msg>Move clowder_test directory setup to post arg parse hook<commit_after># -*- coding: utf-8 -*-
"""Clowder test command line app
.. codeauthor:: Joe Decapo <joe@polka.cat>
"""
from __future__ import print_function
import os
import colorama
from cement.core.foundation import CementApp
from clowder_test import ROOT_DIR
from clowder_test.cli.base_controller import BaseController
from clowder_test.cli.cats_controller import CatsController
from clowder_test.cli.cocos2d_controller import Cocos2dController
from clowder_test.cli.llvm_controller import LLVMController
from clowder_test.cli.swift_controller import SwiftController
from clowder_test.execute import execute_command
def post_argument_parsing_hook(app):
execute_command('./setup_local_test_directory.sh', os.path.join(ROOT_DIR, 'test', 'scripts'))
class ClowderApp(CementApp):
"""Clowder command CLI app"""
class Meta:
"""Clowder command CLI Meta configuration"""
label = 'clowder'
extensions = ['argcomplete']
base_controller = 'base'
hooks = [
('post_argument_parsing', post_argument_parsing_hook)
]
handlers = [
BaseController,
CatsController,
Cocos2dController,
LLVMController,
SwiftController
]
def main():
"""Clowder command CLI main function"""
print()
with ClowderApp() as app:
app.run()
if __name__ == '__main__':
colorama.init()
main()
|
550c0445e5763354df6fbcac611dc5469014e13c
|
moocng/teacheradmin/forms.py
|
moocng/teacheradmin/forms.py
|
# Copyright 2012 Rooter Analysis S.L.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.forms import ModelForm
from django.forms.util import ErrorDict
from moocng.courses.models import Course
class CourseForm(ModelForm):
class Meta:
model = Course
exclude = ('slug', 'teachers',)
def get_pretty_errors(self):
errors = ErrorDict()
for k, v in self.errors.items():
name = self.fields[k].label
errors[name] = v
return errors
|
# Copyright 2012 Rooter Analysis S.L.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.forms import ModelForm
from django.forms.util import ErrorDict
from moocng.courses.models import Course
class CourseForm(ModelForm):
class Meta:
model = Course
exclude = ('slug', 'teachers', 'owner',)
def get_pretty_errors(self):
errors = ErrorDict()
for k, v in self.errors.items():
name = self.fields[k].label
errors[name] = v
return errors
|
Fix problem related to the teacheradmin form, exclude owner
|
Fix problem related to the teacheradmin form, exclude owner
|
Python
|
apache-2.0
|
GeographicaGS/moocng,OpenMOOC/moocng,OpenMOOC/moocng,GeographicaGS/moocng,GeographicaGS/moocng,GeographicaGS/moocng
|
# Copyright 2012 Rooter Analysis S.L.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.forms import ModelForm
from django.forms.util import ErrorDict
from moocng.courses.models import Course
class CourseForm(ModelForm):
class Meta:
model = Course
exclude = ('slug', 'teachers',)
def get_pretty_errors(self):
errors = ErrorDict()
for k, v in self.errors.items():
name = self.fields[k].label
errors[name] = v
return errors
Fix problem related to the teacheradmin form, exclude owner
|
# Copyright 2012 Rooter Analysis S.L.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.forms import ModelForm
from django.forms.util import ErrorDict
from moocng.courses.models import Course
class CourseForm(ModelForm):
class Meta:
model = Course
exclude = ('slug', 'teachers', 'owner',)
def get_pretty_errors(self):
errors = ErrorDict()
for k, v in self.errors.items():
name = self.fields[k].label
errors[name] = v
return errors
|
<commit_before># Copyright 2012 Rooter Analysis S.L.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.forms import ModelForm
from django.forms.util import ErrorDict
from moocng.courses.models import Course
class CourseForm(ModelForm):
class Meta:
model = Course
exclude = ('slug', 'teachers',)
def get_pretty_errors(self):
errors = ErrorDict()
for k, v in self.errors.items():
name = self.fields[k].label
errors[name] = v
return errors
<commit_msg>Fix problem related to the teacheradmin form, exclude owner<commit_after>
|
# Copyright 2012 Rooter Analysis S.L.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.forms import ModelForm
from django.forms.util import ErrorDict
from moocng.courses.models import Course
class CourseForm(ModelForm):
class Meta:
model = Course
exclude = ('slug', 'teachers', 'owner',)
def get_pretty_errors(self):
errors = ErrorDict()
for k, v in self.errors.items():
name = self.fields[k].label
errors[name] = v
return errors
|
# Copyright 2012 Rooter Analysis S.L.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.forms import ModelForm
from django.forms.util import ErrorDict
from moocng.courses.models import Course
class CourseForm(ModelForm):
class Meta:
model = Course
exclude = ('slug', 'teachers',)
def get_pretty_errors(self):
errors = ErrorDict()
for k, v in self.errors.items():
name = self.fields[k].label
errors[name] = v
return errors
Fix problem related to the teacheradmin form, exclude owner# Copyright 2012 Rooter Analysis S.L.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.forms import ModelForm
from django.forms.util import ErrorDict
from moocng.courses.models import Course
class CourseForm(ModelForm):
class Meta:
model = Course
exclude = ('slug', 'teachers', 'owner',)
def get_pretty_errors(self):
errors = ErrorDict()
for k, v in self.errors.items():
name = self.fields[k].label
errors[name] = v
return errors
|
<commit_before># Copyright 2012 Rooter Analysis S.L.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.forms import ModelForm
from django.forms.util import ErrorDict
from moocng.courses.models import Course
class CourseForm(ModelForm):
class Meta:
model = Course
exclude = ('slug', 'teachers',)
def get_pretty_errors(self):
errors = ErrorDict()
for k, v in self.errors.items():
name = self.fields[k].label
errors[name] = v
return errors
<commit_msg>Fix problem related to the teacheradmin form, exclude owner<commit_after># Copyright 2012 Rooter Analysis S.L.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.forms import ModelForm
from django.forms.util import ErrorDict
from moocng.courses.models import Course
class CourseForm(ModelForm):
class Meta:
model = Course
exclude = ('slug', 'teachers', 'owner',)
def get_pretty_errors(self):
errors = ErrorDict()
for k, v in self.errors.items():
name = self.fields[k].label
errors[name] = v
return errors
|
8740092c5c6d1fababdd44a7e9e4dbc180403e09
|
mrp_auto_prod/__openerp__.py
|
mrp_auto_prod/__openerp__.py
|
# -*- coding: utf-8 -*-
{
'name': 'MRP auto production',
'version': '0.1',
'author': 'ADHOC',
'category': 'Localization/Argentina',
'website': 'www.adhoc.com.ar',
'license': 'AGPL-3',
'description':
"""
Para probar instalar tambien "sale" y "procurement_jit_stock"
""",
'depends': [
'mrp',
'procurement_jit_stock',
],
'demo_xml': [
'mrp_demo.xml',
],
'test': [],
'data': [
'mrp_view.xml',
],
'active': False,
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
# -*- coding: utf-8 -*-
{
'name': 'MRP auto production',
'version': '0.1',
'author': 'ADHOC',
'category': 'Localization/Argentina',
'website': 'www.adhoc.com.ar',
'license': 'AGPL-3',
'description': """
Para probar instalar tambien "sale" y "procurement_jit_stock"
""",
'depends': [
'mrp',
'procurement_jit_stock',
],
'demo': [
# TODO to fix data to pass test
# 'mrp_demo.xml',
],
'test': [],
'data': [
'mrp_view.xml',
],
'active': False,
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
FIX disable mrp auto prod demo data
|
FIX disable mrp auto prod demo data
|
Python
|
agpl-3.0
|
dvitme/odoo-addons,ClearCorp/account-financial-tools,maljac/odoo-addons,adhoc-dev/odoo-addons,ingadhoc/product,bmya/odoo-addons,jorsea/odoo-addons,ingadhoc/odoo-addons,syci/ingadhoc-odoo-addons,sysadminmatmoz/ingadhoc,ingadhoc/account-financial-tools,bmya/odoo-addons,sysadminmatmoz/ingadhoc,ingadhoc/account-payment,adhoc-dev/odoo-addons,ingadhoc/account-invoicing,ingadhoc/account-analytic,adhoc-dev/odoo-addons,sysadminmatmoz/ingadhoc,ingadhoc/partner,maljac/odoo-addons,ingadhoc/stock,ClearCorp/account-financial-tools,HBEE/odoo-addons,jorsea/odoo-addons,ingadhoc/odoo-addons,maljac/odoo-addons,syci/ingadhoc-odoo-addons,adhoc-dev/account-financial-tools,ingadhoc/sale,ingadhoc/sale,bmya/odoo-addons,HBEE/odoo-addons,syci/ingadhoc-odoo-addons,adhoc-dev/account-financial-tools,dvitme/odoo-addons,jorsea/odoo-addons,ingadhoc/product,HBEE/odoo-addons,ingadhoc/sale,dvitme/odoo-addons,ingadhoc/odoo-addons,ingadhoc/sale
|
# -*- coding: utf-8 -*-
{
'name': 'MRP auto production',
'version': '0.1',
'author': 'ADHOC',
'category': 'Localization/Argentina',
'website': 'www.adhoc.com.ar',
'license': 'AGPL-3',
'description':
"""
Para probar instalar tambien "sale" y "procurement_jit_stock"
""",
'depends': [
'mrp',
'procurement_jit_stock',
],
'demo_xml': [
'mrp_demo.xml',
],
'test': [],
'data': [
'mrp_view.xml',
],
'active': False,
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
FIX disable mrp auto prod demo data
|
# -*- coding: utf-8 -*-
{
'name': 'MRP auto production',
'version': '0.1',
'author': 'ADHOC',
'category': 'Localization/Argentina',
'website': 'www.adhoc.com.ar',
'license': 'AGPL-3',
'description': """
Para probar instalar tambien "sale" y "procurement_jit_stock"
""",
'depends': [
'mrp',
'procurement_jit_stock',
],
'demo': [
# TODO to fix data to pass test
# 'mrp_demo.xml',
],
'test': [],
'data': [
'mrp_view.xml',
],
'active': False,
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
<commit_before># -*- coding: utf-8 -*-
{
'name': 'MRP auto production',
'version': '0.1',
'author': 'ADHOC',
'category': 'Localization/Argentina',
'website': 'www.adhoc.com.ar',
'license': 'AGPL-3',
'description':
"""
Para probar instalar tambien "sale" y "procurement_jit_stock"
""",
'depends': [
'mrp',
'procurement_jit_stock',
],
'demo_xml': [
'mrp_demo.xml',
],
'test': [],
'data': [
'mrp_view.xml',
],
'active': False,
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
<commit_msg>FIX disable mrp auto prod demo data<commit_after>
|
# -*- coding: utf-8 -*-
{
'name': 'MRP auto production',
'version': '0.1',
'author': 'ADHOC',
'category': 'Localization/Argentina',
'website': 'www.adhoc.com.ar',
'license': 'AGPL-3',
'description': """
Para probar instalar tambien "sale" y "procurement_jit_stock"
""",
'depends': [
'mrp',
'procurement_jit_stock',
],
'demo': [
# TODO to fix data to pass test
# 'mrp_demo.xml',
],
'test': [],
'data': [
'mrp_view.xml',
],
'active': False,
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
# -*- coding: utf-8 -*-
{
'name': 'MRP auto production',
'version': '0.1',
'author': 'ADHOC',
'category': 'Localization/Argentina',
'website': 'www.adhoc.com.ar',
'license': 'AGPL-3',
'description':
"""
Para probar instalar tambien "sale" y "procurement_jit_stock"
""",
'depends': [
'mrp',
'procurement_jit_stock',
],
'demo_xml': [
'mrp_demo.xml',
],
'test': [],
'data': [
'mrp_view.xml',
],
'active': False,
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
FIX disable mrp auto prod demo data# -*- coding: utf-8 -*-
{
'name': 'MRP auto production',
'version': '0.1',
'author': 'ADHOC',
'category': 'Localization/Argentina',
'website': 'www.adhoc.com.ar',
'license': 'AGPL-3',
'description': """
Para probar instalar tambien "sale" y "procurement_jit_stock"
""",
'depends': [
'mrp',
'procurement_jit_stock',
],
'demo': [
# TODO to fix data to pass test
# 'mrp_demo.xml',
],
'test': [],
'data': [
'mrp_view.xml',
],
'active': False,
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
<commit_before># -*- coding: utf-8 -*-
{
'name': 'MRP auto production',
'version': '0.1',
'author': 'ADHOC',
'category': 'Localization/Argentina',
'website': 'www.adhoc.com.ar',
'license': 'AGPL-3',
'description':
"""
Para probar instalar tambien "sale" y "procurement_jit_stock"
""",
'depends': [
'mrp',
'procurement_jit_stock',
],
'demo_xml': [
'mrp_demo.xml',
],
'test': [],
'data': [
'mrp_view.xml',
],
'active': False,
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
<commit_msg>FIX disable mrp auto prod demo data<commit_after># -*- coding: utf-8 -*-
{
'name': 'MRP auto production',
'version': '0.1',
'author': 'ADHOC',
'category': 'Localization/Argentina',
'website': 'www.adhoc.com.ar',
'license': 'AGPL-3',
'description': """
Para probar instalar tambien "sale" y "procurement_jit_stock"
""",
'depends': [
'mrp',
'procurement_jit_stock',
],
'demo': [
# TODO to fix data to pass test
# 'mrp_demo.xml',
],
'test': [],
'data': [
'mrp_view.xml',
],
'active': False,
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
d2692296788e49ecf98f122d907f8ea2ba632975
|
docs/conf.py
|
docs/conf.py
|
import sys
from os.path import dirname, abspath
sys.path.insert(0, dirname(dirname(abspath(__file__))))
from django.conf import settings
settings.configure()
project = 'django-slack'
version = ''
release = ''
copyright = '2014, 2015 Chris Lamb'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
html_title = "%s documentation" % project
html_static_path = []
master_doc = 'index'
exclude_trees = ['_build']
templates_path = ['_templates']
latex_documents = [
('index', '%s.tex' % project, html_title, 'manual'),
]
intersphinx_mapping = {'http://docs.python.org/': None}
|
import sys
from os.path import dirname, abspath
sys.path.insert(0, dirname(dirname(abspath(__file__))))
from django.conf import settings
settings.configure()
project = 'django-slack'
version = ''
release = ''
copyright = '2014, 2015 Chris Lamb'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
html_title = "%s documentation" % project
html_static_path = []
master_doc = 'index'
exclude_trees = ['_build']
templates_path = ['_templates']
latex_documents = [
('index', '%s.tex' % project, html_title, 'manual', True),
]
intersphinx_mapping = {'http://docs.python.org/': None}
|
Add toctree_only to latex_documents to fix sphinx build
|
Add toctree_only to latex_documents to fix sphinx build
|
Python
|
bsd-3-clause
|
lamby/django-slack
|
import sys
from os.path import dirname, abspath
sys.path.insert(0, dirname(dirname(abspath(__file__))))
from django.conf import settings
settings.configure()
project = 'django-slack'
version = ''
release = ''
copyright = '2014, 2015 Chris Lamb'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
html_title = "%s documentation" % project
html_static_path = []
master_doc = 'index'
exclude_trees = ['_build']
templates_path = ['_templates']
latex_documents = [
('index', '%s.tex' % project, html_title, 'manual'),
]
intersphinx_mapping = {'http://docs.python.org/': None}
Add toctree_only to latex_documents to fix sphinx build
|
import sys
from os.path import dirname, abspath
sys.path.insert(0, dirname(dirname(abspath(__file__))))
from django.conf import settings
settings.configure()
project = 'django-slack'
version = ''
release = ''
copyright = '2014, 2015 Chris Lamb'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
html_title = "%s documentation" % project
html_static_path = []
master_doc = 'index'
exclude_trees = ['_build']
templates_path = ['_templates']
latex_documents = [
('index', '%s.tex' % project, html_title, 'manual', True),
]
intersphinx_mapping = {'http://docs.python.org/': None}
|
<commit_before>import sys
from os.path import dirname, abspath
sys.path.insert(0, dirname(dirname(abspath(__file__))))
from django.conf import settings
settings.configure()
project = 'django-slack'
version = ''
release = ''
copyright = '2014, 2015 Chris Lamb'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
html_title = "%s documentation" % project
html_static_path = []
master_doc = 'index'
exclude_trees = ['_build']
templates_path = ['_templates']
latex_documents = [
('index', '%s.tex' % project, html_title, 'manual'),
]
intersphinx_mapping = {'http://docs.python.org/': None}
<commit_msg>Add toctree_only to latex_documents to fix sphinx build<commit_after>
|
import sys
from os.path import dirname, abspath
sys.path.insert(0, dirname(dirname(abspath(__file__))))
from django.conf import settings
settings.configure()
project = 'django-slack'
version = ''
release = ''
copyright = '2014, 2015 Chris Lamb'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
html_title = "%s documentation" % project
html_static_path = []
master_doc = 'index'
exclude_trees = ['_build']
templates_path = ['_templates']
latex_documents = [
('index', '%s.tex' % project, html_title, 'manual', True),
]
intersphinx_mapping = {'http://docs.python.org/': None}
|
import sys
from os.path import dirname, abspath
sys.path.insert(0, dirname(dirname(abspath(__file__))))
from django.conf import settings
settings.configure()
project = 'django-slack'
version = ''
release = ''
copyright = '2014, 2015 Chris Lamb'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
html_title = "%s documentation" % project
html_static_path = []
master_doc = 'index'
exclude_trees = ['_build']
templates_path = ['_templates']
latex_documents = [
('index', '%s.tex' % project, html_title, 'manual'),
]
intersphinx_mapping = {'http://docs.python.org/': None}
Add toctree_only to latex_documents to fix sphinx buildimport sys
from os.path import dirname, abspath
sys.path.insert(0, dirname(dirname(abspath(__file__))))
from django.conf import settings
settings.configure()
project = 'django-slack'
version = ''
release = ''
copyright = '2014, 2015 Chris Lamb'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
html_title = "%s documentation" % project
html_static_path = []
master_doc = 'index'
exclude_trees = ['_build']
templates_path = ['_templates']
latex_documents = [
('index', '%s.tex' % project, html_title, 'manual', True),
]
intersphinx_mapping = {'http://docs.python.org/': None}
|
<commit_before>import sys
from os.path import dirname, abspath
sys.path.insert(0, dirname(dirname(abspath(__file__))))
from django.conf import settings
settings.configure()
project = 'django-slack'
version = ''
release = ''
copyright = '2014, 2015 Chris Lamb'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
html_title = "%s documentation" % project
html_static_path = []
master_doc = 'index'
exclude_trees = ['_build']
templates_path = ['_templates']
latex_documents = [
('index', '%s.tex' % project, html_title, 'manual'),
]
intersphinx_mapping = {'http://docs.python.org/': None}
<commit_msg>Add toctree_only to latex_documents to fix sphinx build<commit_after>import sys
from os.path import dirname, abspath
sys.path.insert(0, dirname(dirname(abspath(__file__))))
from django.conf import settings
settings.configure()
project = 'django-slack'
version = ''
release = ''
copyright = '2014, 2015 Chris Lamb'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
html_title = "%s documentation" % project
html_static_path = []
master_doc = 'index'
exclude_trees = ['_build']
templates_path = ['_templates']
latex_documents = [
('index', '%s.tex' % project, html_title, 'manual', True),
]
intersphinx_mapping = {'http://docs.python.org/': None}
|
bc97d63893858ba8cbcd44f83f4123fdd826ac71
|
addons/bestja_api_user/models.py
|
addons/bestja_api_user/models.py
|
# -*- coding: utf-8 -*-
from openerp import models, fields, api
class User(models.Model):
_inherit = 'res.users'
def __init__(self, pool, cr):
super(User, self).__init__(pool, cr)
self._add_permitted_fields(level='privileged', fields={'email'})
self._add_permitted_fields(level='owner', fields={'email'})
@api.one
def _compute_user_access_level(self):
"""
Access level that the current (logged in) user has for the object.
Either "owner", "admin", "privileged" or None.
"""
super(User, self)._compute_user_access_level()
if not self.user_access_level and self.user_has_groups('bestja_api_user.api_access'):
self.user_access_level = 'privileged'
class Partner(models.Model):
_inherit = 'res.partner'
email = fields.Char(groups='bestja_api_user.api_access') # give access to the email field
|
# -*- coding: utf-8 -*-
from openerp import models, fields, api
class User(models.Model):
_inherit = 'res.users'
def __init__(self, pool, cr):
super(User, self).__init__(pool, cr)
self._add_permitted_fields(level='privileged', fields={'email'})
self._add_permitted_fields(level='owner', fields={'email'})
@api.one
def _compute_user_access_level(self):
"""
Access level that the current (logged in) user has for the object.
Either "owner", "admin", "privileged" or None.
"""
super(User, self)._compute_user_access_level()
if not self.user_access_level and self.user_has_groups('bestja_api_user.api_access'):
self.user_access_level = 'privileged'
class Partner(models.Model):
_inherit = 'res.partner'
email = fields.Char(groups='base.group_system,bestja_api_user.api_access') # give access to the email field
|
Fix for Partner's email not being accessible to administrator
|
Fix for Partner's email not being accessible to administrator
|
Python
|
agpl-3.0
|
EE/bestja,EE/bestja,ludwiktrammer/bestja,ludwiktrammer/bestja,ludwiktrammer/bestja,EE/bestja
|
# -*- coding: utf-8 -*-
from openerp import models, fields, api
class User(models.Model):
_inherit = 'res.users'
def __init__(self, pool, cr):
super(User, self).__init__(pool, cr)
self._add_permitted_fields(level='privileged', fields={'email'})
self._add_permitted_fields(level='owner', fields={'email'})
@api.one
def _compute_user_access_level(self):
"""
Access level that the current (logged in) user has for the object.
Either "owner", "admin", "privileged" or None.
"""
super(User, self)._compute_user_access_level()
if not self.user_access_level and self.user_has_groups('bestja_api_user.api_access'):
self.user_access_level = 'privileged'
class Partner(models.Model):
_inherit = 'res.partner'
email = fields.Char(groups='bestja_api_user.api_access') # give access to the email field
Fix for Partner's email not being accessible to administrator
|
# -*- coding: utf-8 -*-
from openerp import models, fields, api
class User(models.Model):
_inherit = 'res.users'
def __init__(self, pool, cr):
super(User, self).__init__(pool, cr)
self._add_permitted_fields(level='privileged', fields={'email'})
self._add_permitted_fields(level='owner', fields={'email'})
@api.one
def _compute_user_access_level(self):
"""
Access level that the current (logged in) user has for the object.
Either "owner", "admin", "privileged" or None.
"""
super(User, self)._compute_user_access_level()
if not self.user_access_level and self.user_has_groups('bestja_api_user.api_access'):
self.user_access_level = 'privileged'
class Partner(models.Model):
_inherit = 'res.partner'
email = fields.Char(groups='base.group_system,bestja_api_user.api_access') # give access to the email field
|
<commit_before># -*- coding: utf-8 -*-
from openerp import models, fields, api
class User(models.Model):
_inherit = 'res.users'
def __init__(self, pool, cr):
super(User, self).__init__(pool, cr)
self._add_permitted_fields(level='privileged', fields={'email'})
self._add_permitted_fields(level='owner', fields={'email'})
@api.one
def _compute_user_access_level(self):
"""
Access level that the current (logged in) user has for the object.
Either "owner", "admin", "privileged" or None.
"""
super(User, self)._compute_user_access_level()
if not self.user_access_level and self.user_has_groups('bestja_api_user.api_access'):
self.user_access_level = 'privileged'
class Partner(models.Model):
_inherit = 'res.partner'
email = fields.Char(groups='bestja_api_user.api_access') # give access to the email field
<commit_msg>Fix for Partner's email not being accessible to administrator<commit_after>
|
# -*- coding: utf-8 -*-
from openerp import models, fields, api
class User(models.Model):
_inherit = 'res.users'
def __init__(self, pool, cr):
super(User, self).__init__(pool, cr)
self._add_permitted_fields(level='privileged', fields={'email'})
self._add_permitted_fields(level='owner', fields={'email'})
@api.one
def _compute_user_access_level(self):
"""
Access level that the current (logged in) user has for the object.
Either "owner", "admin", "privileged" or None.
"""
super(User, self)._compute_user_access_level()
if not self.user_access_level and self.user_has_groups('bestja_api_user.api_access'):
self.user_access_level = 'privileged'
class Partner(models.Model):
_inherit = 'res.partner'
email = fields.Char(groups='base.group_system,bestja_api_user.api_access') # give access to the email field
|
# -*- coding: utf-8 -*-
from openerp import models, fields, api
class User(models.Model):
_inherit = 'res.users'
def __init__(self, pool, cr):
super(User, self).__init__(pool, cr)
self._add_permitted_fields(level='privileged', fields={'email'})
self._add_permitted_fields(level='owner', fields={'email'})
@api.one
def _compute_user_access_level(self):
"""
Access level that the current (logged in) user has for the object.
Either "owner", "admin", "privileged" or None.
"""
super(User, self)._compute_user_access_level()
if not self.user_access_level and self.user_has_groups('bestja_api_user.api_access'):
self.user_access_level = 'privileged'
class Partner(models.Model):
_inherit = 'res.partner'
email = fields.Char(groups='bestja_api_user.api_access') # give access to the email field
Fix for Partner's email not being accessible to administrator# -*- coding: utf-8 -*-
from openerp import models, fields, api
class User(models.Model):
_inherit = 'res.users'
def __init__(self, pool, cr):
super(User, self).__init__(pool, cr)
self._add_permitted_fields(level='privileged', fields={'email'})
self._add_permitted_fields(level='owner', fields={'email'})
@api.one
def _compute_user_access_level(self):
"""
Access level that the current (logged in) user has for the object.
Either "owner", "admin", "privileged" or None.
"""
super(User, self)._compute_user_access_level()
if not self.user_access_level and self.user_has_groups('bestja_api_user.api_access'):
self.user_access_level = 'privileged'
class Partner(models.Model):
_inherit = 'res.partner'
email = fields.Char(groups='base.group_system,bestja_api_user.api_access') # give access to the email field
|
<commit_before># -*- coding: utf-8 -*-
from openerp import models, fields, api
class User(models.Model):
_inherit = 'res.users'
def __init__(self, pool, cr):
super(User, self).__init__(pool, cr)
self._add_permitted_fields(level='privileged', fields={'email'})
self._add_permitted_fields(level='owner', fields={'email'})
@api.one
def _compute_user_access_level(self):
"""
Access level that the current (logged in) user has for the object.
Either "owner", "admin", "privileged" or None.
"""
super(User, self)._compute_user_access_level()
if not self.user_access_level and self.user_has_groups('bestja_api_user.api_access'):
self.user_access_level = 'privileged'
class Partner(models.Model):
_inherit = 'res.partner'
email = fields.Char(groups='bestja_api_user.api_access') # give access to the email field
<commit_msg>Fix for Partner's email not being accessible to administrator<commit_after># -*- coding: utf-8 -*-
from openerp import models, fields, api
class User(models.Model):
_inherit = 'res.users'
def __init__(self, pool, cr):
super(User, self).__init__(pool, cr)
self._add_permitted_fields(level='privileged', fields={'email'})
self._add_permitted_fields(level='owner', fields={'email'})
@api.one
def _compute_user_access_level(self):
"""
Access level that the current (logged in) user has for the object.
Either "owner", "admin", "privileged" or None.
"""
super(User, self)._compute_user_access_level()
if not self.user_access_level and self.user_has_groups('bestja_api_user.api_access'):
self.user_access_level = 'privileged'
class Partner(models.Model):
_inherit = 'res.partner'
email = fields.Char(groups='base.group_system,bestja_api_user.api_access') # give access to the email field
|
2f168fa2886a4e3c00f15b5407bb860f0f9b38f4
|
main.py
|
main.py
|
"""
Classify images using random decision forests
"""
import sys
from mlearner import train_model
from utils import get_feature_vector
def classify_image(image_path, clf):
""" Classify given image
"""
fvecs = [get_feature_vector(image_path)]
print(clf.predict(fvecs))
def main():
if len(sys.argv) != 2:
print('Usage: %s <image>' % sys.argv[0])
sys.exit(1)
clf = train_model()
classify_image(sys.argv[1], clf)
if __name__ == '__main__':
main()
|
"""
Classify images using random decision forests
"""
import sys, os
from mlearner import train_model
from utils import get_feature_vector
def classify_image(image_path, clf):
""" Classify given image
"""
fvecs = [get_feature_vector(image_path)]
print(clf.predict(fvecs), clf.predict_proba(fvecs))
def compute_score(root_dir, clf):
""" Test model with all images in given directory
"""
fvecs = []
true_classes = []
for fn in os.listdir(root_dir):
assert len(fn.split('_')) > 1, 'Invalid filename'
fname = os.path.join(root_dir, fn)
fvecs.append(get_feature_vector(fname))
true_classes.append(fn.split('_')[0])
print('Accuracy:', clf.score(fvecs, true_classes))
def main():
if len(sys.argv) != 2:
print('Usage: %s <image|directory>' % sys.argv[0])
sys.exit(1)
clf = train_model()
arg = sys.argv[1]
if os.path.isfile(arg):
classify_image(arg, clf)
elif os.path.isdir(arg):
compute_score(arg, clf)
else:
print('Invalid argument...')
if __name__ == '__main__':
main()
|
Add ability to test model on all images in some directory
|
Add ability to test model on all images in some directory
|
Python
|
mit
|
kpj/PyClass
|
"""
Classify images using random decision forests
"""
import sys
from mlearner import train_model
from utils import get_feature_vector
def classify_image(image_path, clf):
""" Classify given image
"""
fvecs = [get_feature_vector(image_path)]
print(clf.predict(fvecs))
def main():
if len(sys.argv) != 2:
print('Usage: %s <image>' % sys.argv[0])
sys.exit(1)
clf = train_model()
classify_image(sys.argv[1], clf)
if __name__ == '__main__':
main()
Add ability to test model on all images in some directory
|
"""
Classify images using random decision forests
"""
import sys, os
from mlearner import train_model
from utils import get_feature_vector
def classify_image(image_path, clf):
""" Classify given image
"""
fvecs = [get_feature_vector(image_path)]
print(clf.predict(fvecs), clf.predict_proba(fvecs))
def compute_score(root_dir, clf):
""" Test model with all images in given directory
"""
fvecs = []
true_classes = []
for fn in os.listdir(root_dir):
assert len(fn.split('_')) > 1, 'Invalid filename'
fname = os.path.join(root_dir, fn)
fvecs.append(get_feature_vector(fname))
true_classes.append(fn.split('_')[0])
print('Accuracy:', clf.score(fvecs, true_classes))
def main():
if len(sys.argv) != 2:
print('Usage: %s <image|directory>' % sys.argv[0])
sys.exit(1)
clf = train_model()
arg = sys.argv[1]
if os.path.isfile(arg):
classify_image(arg, clf)
elif os.path.isdir(arg):
compute_score(arg, clf)
else:
print('Invalid argument...')
if __name__ == '__main__':
main()
|
<commit_before>"""
Classify images using random decision forests
"""
import sys
from mlearner import train_model
from utils import get_feature_vector
def classify_image(image_path, clf):
""" Classify given image
"""
fvecs = [get_feature_vector(image_path)]
print(clf.predict(fvecs))
def main():
if len(sys.argv) != 2:
print('Usage: %s <image>' % sys.argv[0])
sys.exit(1)
clf = train_model()
classify_image(sys.argv[1], clf)
if __name__ == '__main__':
main()
<commit_msg>Add ability to test model on all images in some directory<commit_after>
|
"""
Classify images using random decision forests
"""
import sys, os
from mlearner import train_model
from utils import get_feature_vector
def classify_image(image_path, clf):
""" Classify given image
"""
fvecs = [get_feature_vector(image_path)]
print(clf.predict(fvecs), clf.predict_proba(fvecs))
def compute_score(root_dir, clf):
""" Test model with all images in given directory
"""
fvecs = []
true_classes = []
for fn in os.listdir(root_dir):
assert len(fn.split('_')) > 1, 'Invalid filename'
fname = os.path.join(root_dir, fn)
fvecs.append(get_feature_vector(fname))
true_classes.append(fn.split('_')[0])
print('Accuracy:', clf.score(fvecs, true_classes))
def main():
if len(sys.argv) != 2:
print('Usage: %s <image|directory>' % sys.argv[0])
sys.exit(1)
clf = train_model()
arg = sys.argv[1]
if os.path.isfile(arg):
classify_image(arg, clf)
elif os.path.isdir(arg):
compute_score(arg, clf)
else:
print('Invalid argument...')
if __name__ == '__main__':
main()
|
"""
Classify images using random decision forests
"""
import sys
from mlearner import train_model
from utils import get_feature_vector
def classify_image(image_path, clf):
""" Classify given image
"""
fvecs = [get_feature_vector(image_path)]
print(clf.predict(fvecs))
def main():
if len(sys.argv) != 2:
print('Usage: %s <image>' % sys.argv[0])
sys.exit(1)
clf = train_model()
classify_image(sys.argv[1], clf)
if __name__ == '__main__':
main()
Add ability to test model on all images in some directory"""
Classify images using random decision forests
"""
import sys, os
from mlearner import train_model
from utils import get_feature_vector
def classify_image(image_path, clf):
""" Classify given image
"""
fvecs = [get_feature_vector(image_path)]
print(clf.predict(fvecs), clf.predict_proba(fvecs))
def compute_score(root_dir, clf):
""" Test model with all images in given directory
"""
fvecs = []
true_classes = []
for fn in os.listdir(root_dir):
assert len(fn.split('_')) > 1, 'Invalid filename'
fname = os.path.join(root_dir, fn)
fvecs.append(get_feature_vector(fname))
true_classes.append(fn.split('_')[0])
print('Accuracy:', clf.score(fvecs, true_classes))
def main():
if len(sys.argv) != 2:
print('Usage: %s <image|directory>' % sys.argv[0])
sys.exit(1)
clf = train_model()
arg = sys.argv[1]
if os.path.isfile(arg):
classify_image(arg, clf)
elif os.path.isdir(arg):
compute_score(arg, clf)
else:
print('Invalid argument...')
if __name__ == '__main__':
main()
|
<commit_before>"""
Classify images using random decision forests
"""
import sys
from mlearner import train_model
from utils import get_feature_vector
def classify_image(image_path, clf):
""" Classify given image
"""
fvecs = [get_feature_vector(image_path)]
print(clf.predict(fvecs))
def main():
if len(sys.argv) != 2:
print('Usage: %s <image>' % sys.argv[0])
sys.exit(1)
clf = train_model()
classify_image(sys.argv[1], clf)
if __name__ == '__main__':
main()
<commit_msg>Add ability to test model on all images in some directory<commit_after>"""
Classify images using random decision forests
"""
import sys, os
from mlearner import train_model
from utils import get_feature_vector
def classify_image(image_path, clf):
""" Classify given image
"""
fvecs = [get_feature_vector(image_path)]
print(clf.predict(fvecs), clf.predict_proba(fvecs))
def compute_score(root_dir, clf):
""" Test model with all images in given directory
"""
fvecs = []
true_classes = []
for fn in os.listdir(root_dir):
assert len(fn.split('_')) > 1, 'Invalid filename'
fname = os.path.join(root_dir, fn)
fvecs.append(get_feature_vector(fname))
true_classes.append(fn.split('_')[0])
print('Accuracy:', clf.score(fvecs, true_classes))
def main():
if len(sys.argv) != 2:
print('Usage: %s <image|directory>' % sys.argv[0])
sys.exit(1)
clf = train_model()
arg = sys.argv[1]
if os.path.isfile(arg):
classify_image(arg, clf)
elif os.path.isdir(arg):
compute_score(arg, clf)
else:
print('Invalid argument...')
if __name__ == '__main__':
main()
|
06a71d22df5b6f1196cbdff737ab071ba92fad0b
|
spacy/tests/regression/test_issue834.py
|
spacy/tests/regression/test_issue834.py
|
# coding: utf-8
from io import StringIO
word2vec_str = """, -0.046107 -0.035951 -0.560418
de -0.648927 -0.400976 -0.527124
. 0.113685 0.439990 -0.634510
-1.499184 -0.184280 -0.598371"""
def test_issue834(en_vocab):
f = StringIO(word2vec_str)
vector_length = en_vocab.load_vectors(f)
assert vector_length == 3
|
# coding: utf-8
from __future__ import unicode_literals
from io import StringIO
word2vec_str = """, -0.046107 -0.035951 -0.560418
de -0.648927 -0.400976 -0.527124
. 0.113685 0.439990 -0.634510
-1.499184 -0.184280 -0.598371"""
def test_issue834(en_vocab):
f = StringIO(word2vec_str)
vector_length = en_vocab.load_vectors(f)
assert vector_length == 3
|
Fix test failure by using unicode literals
|
Fix test failure by using unicode literals
|
Python
|
mit
|
explosion/spaCy,banglakit/spaCy,aikramer2/spaCy,spacy-io/spaCy,raphael0202/spaCy,recognai/spaCy,Gregory-Howard/spaCy,honnibal/spaCy,spacy-io/spaCy,aikramer2/spaCy,spacy-io/spaCy,aikramer2/spaCy,recognai/spaCy,aikramer2/spaCy,raphael0202/spaCy,raphael0202/spaCy,recognai/spaCy,honnibal/spaCy,raphael0202/spaCy,explosion/spaCy,honnibal/spaCy,oroszgy/spaCy.hu,Gregory-Howard/spaCy,aikramer2/spaCy,oroszgy/spaCy.hu,honnibal/spaCy,oroszgy/spaCy.hu,banglakit/spaCy,explosion/spaCy,Gregory-Howard/spaCy,banglakit/spaCy,explosion/spaCy,spacy-io/spaCy,explosion/spaCy,aikramer2/spaCy,oroszgy/spaCy.hu,raphael0202/spaCy,oroszgy/spaCy.hu,spacy-io/spaCy,banglakit/spaCy,recognai/spaCy,spacy-io/spaCy,raphael0202/spaCy,oroszgy/spaCy.hu,Gregory-Howard/spaCy,Gregory-Howard/spaCy,recognai/spaCy,Gregory-Howard/spaCy,recognai/spaCy,explosion/spaCy,banglakit/spaCy,banglakit/spaCy
|
# coding: utf-8
from io import StringIO
word2vec_str = """, -0.046107 -0.035951 -0.560418
de -0.648927 -0.400976 -0.527124
. 0.113685 0.439990 -0.634510
-1.499184 -0.184280 -0.598371"""
def test_issue834(en_vocab):
f = StringIO(word2vec_str)
vector_length = en_vocab.load_vectors(f)
assert vector_length == 3
Fix test failure by using unicode literals
|
# coding: utf-8
from __future__ import unicode_literals
from io import StringIO
word2vec_str = """, -0.046107 -0.035951 -0.560418
de -0.648927 -0.400976 -0.527124
. 0.113685 0.439990 -0.634510
-1.499184 -0.184280 -0.598371"""
def test_issue834(en_vocab):
f = StringIO(word2vec_str)
vector_length = en_vocab.load_vectors(f)
assert vector_length == 3
|
<commit_before># coding: utf-8
from io import StringIO
word2vec_str = """, -0.046107 -0.035951 -0.560418
de -0.648927 -0.400976 -0.527124
. 0.113685 0.439990 -0.634510
-1.499184 -0.184280 -0.598371"""
def test_issue834(en_vocab):
f = StringIO(word2vec_str)
vector_length = en_vocab.load_vectors(f)
assert vector_length == 3
<commit_msg>Fix test failure by using unicode literals<commit_after>
|
# coding: utf-8
from __future__ import unicode_literals
from io import StringIO
word2vec_str = """, -0.046107 -0.035951 -0.560418
de -0.648927 -0.400976 -0.527124
. 0.113685 0.439990 -0.634510
-1.499184 -0.184280 -0.598371"""
def test_issue834(en_vocab):
f = StringIO(word2vec_str)
vector_length = en_vocab.load_vectors(f)
assert vector_length == 3
|
# coding: utf-8
from io import StringIO
word2vec_str = """, -0.046107 -0.035951 -0.560418
de -0.648927 -0.400976 -0.527124
. 0.113685 0.439990 -0.634510
-1.499184 -0.184280 -0.598371"""
def test_issue834(en_vocab):
f = StringIO(word2vec_str)
vector_length = en_vocab.load_vectors(f)
assert vector_length == 3
Fix test failure by using unicode literals# coding: utf-8
from __future__ import unicode_literals
from io import StringIO
word2vec_str = """, -0.046107 -0.035951 -0.560418
de -0.648927 -0.400976 -0.527124
. 0.113685 0.439990 -0.634510
-1.499184 -0.184280 -0.598371"""
def test_issue834(en_vocab):
f = StringIO(word2vec_str)
vector_length = en_vocab.load_vectors(f)
assert vector_length == 3
|
<commit_before># coding: utf-8
from io import StringIO
word2vec_str = """, -0.046107 -0.035951 -0.560418
de -0.648927 -0.400976 -0.527124
. 0.113685 0.439990 -0.634510
-1.499184 -0.184280 -0.598371"""
def test_issue834(en_vocab):
f = StringIO(word2vec_str)
vector_length = en_vocab.load_vectors(f)
assert vector_length == 3
<commit_msg>Fix test failure by using unicode literals<commit_after># coding: utf-8
from __future__ import unicode_literals
from io import StringIO
word2vec_str = """, -0.046107 -0.035951 -0.560418
de -0.648927 -0.400976 -0.527124
. 0.113685 0.439990 -0.634510
-1.499184 -0.184280 -0.598371"""
def test_issue834(en_vocab):
f = StringIO(word2vec_str)
vector_length = en_vocab.load_vectors(f)
assert vector_length == 3
|
b674f76c93b5208ad302fcba2d43b8c30bbaf14c
|
main.py
|
main.py
|
from altitude import run
run.run("27279", "C://Program Files (x86)/Altitude/servers/command.txt", "C://Program Files (x86)/Altitude/servers/log.txt",
"C://Program Files (x86)/Altitude/servers/log_old.txt", "C://Program Files (x86)/Altitude/servers/logs_archive.txt")
|
from altitude import run
run.run("27279", "/home/user/altitude-files/servers/command.txt", "/home/user/altitude-files/servers/log.txt",
"/home/user/altitude-files/servers/log_old.txt", "/home/user/altitude-files/servers/logs_archive.txt")
|
Put the directories back as they were for the server
|
Put the directories back as they were for the server
|
Python
|
mit
|
StamKaly/altitude-mod,StamKaly/altitude-mod
|
from altitude import run
run.run("27279", "C://Program Files (x86)/Altitude/servers/command.txt", "C://Program Files (x86)/Altitude/servers/log.txt",
"C://Program Files (x86)/Altitude/servers/log_old.txt", "C://Program Files (x86)/Altitude/servers/logs_archive.txt")
Put the directories back as they were for the server
|
from altitude import run
run.run("27279", "/home/user/altitude-files/servers/command.txt", "/home/user/altitude-files/servers/log.txt",
"/home/user/altitude-files/servers/log_old.txt", "/home/user/altitude-files/servers/logs_archive.txt")
|
<commit_before>from altitude import run
run.run("27279", "C://Program Files (x86)/Altitude/servers/command.txt", "C://Program Files (x86)/Altitude/servers/log.txt",
"C://Program Files (x86)/Altitude/servers/log_old.txt", "C://Program Files (x86)/Altitude/servers/logs_archive.txt")
<commit_msg>Put the directories back as they were for the server<commit_after>
|
from altitude import run
run.run("27279", "/home/user/altitude-files/servers/command.txt", "/home/user/altitude-files/servers/log.txt",
"/home/user/altitude-files/servers/log_old.txt", "/home/user/altitude-files/servers/logs_archive.txt")
|
from altitude import run
run.run("27279", "C://Program Files (x86)/Altitude/servers/command.txt", "C://Program Files (x86)/Altitude/servers/log.txt",
"C://Program Files (x86)/Altitude/servers/log_old.txt", "C://Program Files (x86)/Altitude/servers/logs_archive.txt")
Put the directories back as they were for the serverfrom altitude import run
run.run("27279", "/home/user/altitude-files/servers/command.txt", "/home/user/altitude-files/servers/log.txt",
"/home/user/altitude-files/servers/log_old.txt", "/home/user/altitude-files/servers/logs_archive.txt")
|
<commit_before>from altitude import run
run.run("27279", "C://Program Files (x86)/Altitude/servers/command.txt", "C://Program Files (x86)/Altitude/servers/log.txt",
"C://Program Files (x86)/Altitude/servers/log_old.txt", "C://Program Files (x86)/Altitude/servers/logs_archive.txt")
<commit_msg>Put the directories back as they were for the server<commit_after>from altitude import run
run.run("27279", "/home/user/altitude-files/servers/command.txt", "/home/user/altitude-files/servers/log.txt",
"/home/user/altitude-files/servers/log_old.txt", "/home/user/altitude-files/servers/logs_archive.txt")
|
ca6b04768a76299187d2c4efaeb612017812bcc7
|
bluebottle/redirects/views.py
|
bluebottle/redirects/views.py
|
from rest_framework import generics
from bluebottle.bluebottle_drf2.pagination import BluebottlePagination
from .models import Redirect
from .serializers import RedirectSerializer
class RedirectPagination(BluebottlePagination):
page_size = 100
class RedirectListView(generics.ListAPIView):
queryset = Redirect.objects.all()
serializer_class = RedirectSerializer
pagination_class = RedirectPagination
|
from rest_framework import generics
from bluebottle.bluebottle_drf2.pagination import BluebottlePagination
from .models import Redirect
from .serializers import RedirectSerializer
class RedirectPagination(BluebottlePagination):
page_size = 200
class RedirectListView(generics.ListAPIView):
queryset = Redirect.objects.all()
serializer_class = RedirectSerializer
pagination_class = RedirectPagination
|
Fix redirect page size voor vjb
|
Fix redirect page size voor vjb
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
from rest_framework import generics
from bluebottle.bluebottle_drf2.pagination import BluebottlePagination
from .models import Redirect
from .serializers import RedirectSerializer
class RedirectPagination(BluebottlePagination):
page_size = 100
class RedirectListView(generics.ListAPIView):
queryset = Redirect.objects.all()
serializer_class = RedirectSerializer
pagination_class = RedirectPagination
Fix redirect page size voor vjb
|
from rest_framework import generics
from bluebottle.bluebottle_drf2.pagination import BluebottlePagination
from .models import Redirect
from .serializers import RedirectSerializer
class RedirectPagination(BluebottlePagination):
page_size = 200
class RedirectListView(generics.ListAPIView):
queryset = Redirect.objects.all()
serializer_class = RedirectSerializer
pagination_class = RedirectPagination
|
<commit_before>from rest_framework import generics
from bluebottle.bluebottle_drf2.pagination import BluebottlePagination
from .models import Redirect
from .serializers import RedirectSerializer
class RedirectPagination(BluebottlePagination):
page_size = 100
class RedirectListView(generics.ListAPIView):
queryset = Redirect.objects.all()
serializer_class = RedirectSerializer
pagination_class = RedirectPagination
<commit_msg>Fix redirect page size voor vjb<commit_after>
|
from rest_framework import generics
from bluebottle.bluebottle_drf2.pagination import BluebottlePagination
from .models import Redirect
from .serializers import RedirectSerializer
class RedirectPagination(BluebottlePagination):
page_size = 200
class RedirectListView(generics.ListAPIView):
queryset = Redirect.objects.all()
serializer_class = RedirectSerializer
pagination_class = RedirectPagination
|
from rest_framework import generics
from bluebottle.bluebottle_drf2.pagination import BluebottlePagination
from .models import Redirect
from .serializers import RedirectSerializer
class RedirectPagination(BluebottlePagination):
page_size = 100
class RedirectListView(generics.ListAPIView):
queryset = Redirect.objects.all()
serializer_class = RedirectSerializer
pagination_class = RedirectPagination
Fix redirect page size voor vjbfrom rest_framework import generics
from bluebottle.bluebottle_drf2.pagination import BluebottlePagination
from .models import Redirect
from .serializers import RedirectSerializer
class RedirectPagination(BluebottlePagination):
page_size = 200
class RedirectListView(generics.ListAPIView):
queryset = Redirect.objects.all()
serializer_class = RedirectSerializer
pagination_class = RedirectPagination
|
<commit_before>from rest_framework import generics
from bluebottle.bluebottle_drf2.pagination import BluebottlePagination
from .models import Redirect
from .serializers import RedirectSerializer
class RedirectPagination(BluebottlePagination):
page_size = 100
class RedirectListView(generics.ListAPIView):
queryset = Redirect.objects.all()
serializer_class = RedirectSerializer
pagination_class = RedirectPagination
<commit_msg>Fix redirect page size voor vjb<commit_after>from rest_framework import generics
from bluebottle.bluebottle_drf2.pagination import BluebottlePagination
from .models import Redirect
from .serializers import RedirectSerializer
class RedirectPagination(BluebottlePagination):
page_size = 200
class RedirectListView(generics.ListAPIView):
queryset = Redirect.objects.all()
serializer_class = RedirectSerializer
pagination_class = RedirectPagination
|
cfaeb584ed74b10de76247a6984c0c6950a1eb25
|
Ispyra/checks.py
|
Ispyra/checks.py
|
from discord.ext import commands
from bot_globals import bot_masters, blacklist
#Check if a user is allowed to use a command
#Perm 0 is simply blacklist checking
#Perm 1 also checks if the user is a botmaster
def allowed(perm, pref):
def permission(ctx):
uid = ctx.message.author.id
if perm == 0:
return uid not in blacklist
elif perm == 1:
return uid in bot_masters and uid not in blacklist and pref is ctx.prefix
return commands.check(permission)
|
from discord.ext import commands
from bot_globals import bot_masters, blacklist
#Check if a user is allowed to use a command
#Perm 0 is simply blacklist checking
#Perm 1 also checks if the user is a botmaster
def allowed(perm, pref):
def permission(ctx):
uid = ctx.message.author.id
if perm == 0:
return uid not in blacklist and pref is ctx.prefix
elif perm == 1:
return uid in bot_masters and uid not in blacklist and pref is ctx.prefix
return commands.check(permission)
|
Fix prefix checking only working for botmaster commands
|
Fix prefix checking only working for botmaster commands
|
Python
|
mit
|
Ispira/Ispyra
|
from discord.ext import commands
from bot_globals import bot_masters, blacklist
#Check if a user is allowed to use a command
#Perm 0 is simply blacklist checking
#Perm 1 also checks if the user is a botmaster
def allowed(perm, pref):
def permission(ctx):
uid = ctx.message.author.id
if perm == 0:
return uid not in blacklist
elif perm == 1:
return uid in bot_masters and uid not in blacklist and pref is ctx.prefix
return commands.check(permission)
Fix prefix checking only working for botmaster commands
|
from discord.ext import commands
from bot_globals import bot_masters, blacklist
#Check if a user is allowed to use a command
#Perm 0 is simply blacklist checking
#Perm 1 also checks if the user is a botmaster
def allowed(perm, pref):
def permission(ctx):
uid = ctx.message.author.id
if perm == 0:
return uid not in blacklist and pref is ctx.prefix
elif perm == 1:
return uid in bot_masters and uid not in blacklist and pref is ctx.prefix
return commands.check(permission)
|
<commit_before>from discord.ext import commands
from bot_globals import bot_masters, blacklist
#Check if a user is allowed to use a command
#Perm 0 is simply blacklist checking
#Perm 1 also checks if the user is a botmaster
def allowed(perm, pref):
def permission(ctx):
uid = ctx.message.author.id
if perm == 0:
return uid not in blacklist
elif perm == 1:
return uid in bot_masters and uid not in blacklist and pref is ctx.prefix
return commands.check(permission)
<commit_msg>Fix prefix checking only working for botmaster commands<commit_after>
|
from discord.ext import commands
from bot_globals import bot_masters, blacklist
#Check if a user is allowed to use a command
#Perm 0 is simply blacklist checking
#Perm 1 also checks if the user is a botmaster
def allowed(perm, pref):
def permission(ctx):
uid = ctx.message.author.id
if perm == 0:
return uid not in blacklist and pref is ctx.prefix
elif perm == 1:
return uid in bot_masters and uid not in blacklist and pref is ctx.prefix
return commands.check(permission)
|
from discord.ext import commands
from bot_globals import bot_masters, blacklist
#Check if a user is allowed to use a command
#Perm 0 is simply blacklist checking
#Perm 1 also checks if the user is a botmaster
def allowed(perm, pref):
def permission(ctx):
uid = ctx.message.author.id
if perm == 0:
return uid not in blacklist
elif perm == 1:
return uid in bot_masters and uid not in blacklist and pref is ctx.prefix
return commands.check(permission)
Fix prefix checking only working for botmaster commandsfrom discord.ext import commands
from bot_globals import bot_masters, blacklist
#Check if a user is allowed to use a command
#Perm 0 is simply blacklist checking
#Perm 1 also checks if the user is a botmaster
def allowed(perm, pref):
def permission(ctx):
uid = ctx.message.author.id
if perm == 0:
return uid not in blacklist and pref is ctx.prefix
elif perm == 1:
return uid in bot_masters and uid not in blacklist and pref is ctx.prefix
return commands.check(permission)
|
<commit_before>from discord.ext import commands
from bot_globals import bot_masters, blacklist
#Check if a user is allowed to use a command
#Perm 0 is simply blacklist checking
#Perm 1 also checks if the user is a botmaster
def allowed(perm, pref):
def permission(ctx):
uid = ctx.message.author.id
if perm == 0:
return uid not in blacklist
elif perm == 1:
return uid in bot_masters and uid not in blacklist and pref is ctx.prefix
return commands.check(permission)
<commit_msg>Fix prefix checking only working for botmaster commands<commit_after>from discord.ext import commands
from bot_globals import bot_masters, blacklist
#Check if a user is allowed to use a command
#Perm 0 is simply blacklist checking
#Perm 1 also checks if the user is a botmaster
def allowed(perm, pref):
def permission(ctx):
uid = ctx.message.author.id
if perm == 0:
return uid not in blacklist and pref is ctx.prefix
elif perm == 1:
return uid in bot_masters and uid not in blacklist and pref is ctx.prefix
return commands.check(permission)
|
c85f423960050fea76452818ce25f9dc287c922a
|
vumidash/dummy_client.py
|
vumidash/dummy_client.py
|
"""MetricSource that serves dummy data."""
import random
from vumidash.base import MetricSource, UnknownMetricError
class DummyClient(MetricSource):
"""Serve dummy data."""
def __init__(self):
self.latest = None
self.metric_prefix = "test"
self.prev_values = {} # map of metrics to previous values
def new_value(self, metric):
values = self.prev_values.setdefault(metric, [])
values.insert(0, random.uniform(0, 100))
return values
def get_latest(self, metric, start, end, summary_size, skip_nulls=True):
values = self.get_history(metric, start, end, summary_size, skip_nulls)
return values[0], values[-1]
def get_history(self, metric, start, end, summary_size, skip_nulls=True):
if not metric.startswith(self.metric_prefix):
raise UnknownMetricError("Uknown metric %r" % (metric,))
steps = (self.total_seconds((-start) - (-end))
/ float(self.total_seconds(summary_size)))
values = self.new_value(metric)
while len(values) < steps:
values = self.new_value(metric)
return values[:steps]
|
"""MetricSource that serves dummy data."""
import random
from vumidash.base import MetricSource, UnknownMetricError
class DummyClient(MetricSource):
"""Serve dummy data."""
def __init__(self):
self.latest = None
self.metric_prefix = "test"
self.prev_values = {} # map of metrics to previous values
def new_value(self, metric):
values = self.prev_values.setdefault(metric, [])
values.insert(0, random.uniform(0, 100))
return values
def get_latest(self, metric, start, end, summary_size, skip_nulls=True):
values = self.get_history(metric, start, end, summary_size, skip_nulls)
return values[0], values[-1]
def get_history(self, metric, start, end, summary_size, skip_nulls=True):
if not metric.startswith(self.metric_prefix):
raise UnknownMetricError("Uknown metric %r" % (metric,))
steps = int(self.total_seconds((-start) - (-end))
/ float(self.total_seconds(summary_size)))
values = self.new_value(metric)
while len(values) < steps:
values = self.new_value(metric)
return values[:steps]
|
Fix steps calculation in dummy client -- how did this work before?
|
Fix steps calculation in dummy client -- how did this work before?
|
Python
|
bsd-3-clause
|
praekelt/vumi-dashboard,praekelt/vumi-dashboard
|
"""MetricSource that serves dummy data."""
import random
from vumidash.base import MetricSource, UnknownMetricError
class DummyClient(MetricSource):
"""Serve dummy data."""
def __init__(self):
self.latest = None
self.metric_prefix = "test"
self.prev_values = {} # map of metrics to previous values
def new_value(self, metric):
values = self.prev_values.setdefault(metric, [])
values.insert(0, random.uniform(0, 100))
return values
def get_latest(self, metric, start, end, summary_size, skip_nulls=True):
values = self.get_history(metric, start, end, summary_size, skip_nulls)
return values[0], values[-1]
def get_history(self, metric, start, end, summary_size, skip_nulls=True):
if not metric.startswith(self.metric_prefix):
raise UnknownMetricError("Uknown metric %r" % (metric,))
steps = (self.total_seconds((-start) - (-end))
/ float(self.total_seconds(summary_size)))
values = self.new_value(metric)
while len(values) < steps:
values = self.new_value(metric)
return values[:steps]
Fix steps calculation in dummy client -- how did this work before?
|
"""MetricSource that serves dummy data."""
import random
from vumidash.base import MetricSource, UnknownMetricError
class DummyClient(MetricSource):
"""Serve dummy data."""
def __init__(self):
self.latest = None
self.metric_prefix = "test"
self.prev_values = {} # map of metrics to previous values
def new_value(self, metric):
values = self.prev_values.setdefault(metric, [])
values.insert(0, random.uniform(0, 100))
return values
def get_latest(self, metric, start, end, summary_size, skip_nulls=True):
values = self.get_history(metric, start, end, summary_size, skip_nulls)
return values[0], values[-1]
def get_history(self, metric, start, end, summary_size, skip_nulls=True):
if not metric.startswith(self.metric_prefix):
raise UnknownMetricError("Uknown metric %r" % (metric,))
steps = int(self.total_seconds((-start) - (-end))
/ float(self.total_seconds(summary_size)))
values = self.new_value(metric)
while len(values) < steps:
values = self.new_value(metric)
return values[:steps]
|
<commit_before>"""MetricSource that serves dummy data."""
import random
from vumidash.base import MetricSource, UnknownMetricError
class DummyClient(MetricSource):
"""Serve dummy data."""
def __init__(self):
self.latest = None
self.metric_prefix = "test"
self.prev_values = {} # map of metrics to previous values
def new_value(self, metric):
values = self.prev_values.setdefault(metric, [])
values.insert(0, random.uniform(0, 100))
return values
def get_latest(self, metric, start, end, summary_size, skip_nulls=True):
values = self.get_history(metric, start, end, summary_size, skip_nulls)
return values[0], values[-1]
def get_history(self, metric, start, end, summary_size, skip_nulls=True):
if not metric.startswith(self.metric_prefix):
raise UnknownMetricError("Uknown metric %r" % (metric,))
steps = (self.total_seconds((-start) - (-end))
/ float(self.total_seconds(summary_size)))
values = self.new_value(metric)
while len(values) < steps:
values = self.new_value(metric)
return values[:steps]
<commit_msg>Fix steps calculation in dummy client -- how did this work before?<commit_after>
|
"""MetricSource that serves dummy data."""
import random
from vumidash.base import MetricSource, UnknownMetricError
class DummyClient(MetricSource):
"""Serve dummy data."""
def __init__(self):
self.latest = None
self.metric_prefix = "test"
self.prev_values = {} # map of metrics to previous values
def new_value(self, metric):
values = self.prev_values.setdefault(metric, [])
values.insert(0, random.uniform(0, 100))
return values
def get_latest(self, metric, start, end, summary_size, skip_nulls=True):
values = self.get_history(metric, start, end, summary_size, skip_nulls)
return values[0], values[-1]
def get_history(self, metric, start, end, summary_size, skip_nulls=True):
if not metric.startswith(self.metric_prefix):
raise UnknownMetricError("Uknown metric %r" % (metric,))
steps = int(self.total_seconds((-start) - (-end))
/ float(self.total_seconds(summary_size)))
values = self.new_value(metric)
while len(values) < steps:
values = self.new_value(metric)
return values[:steps]
|
"""MetricSource that serves dummy data."""
import random
from vumidash.base import MetricSource, UnknownMetricError
class DummyClient(MetricSource):
"""Serve dummy data."""
def __init__(self):
self.latest = None
self.metric_prefix = "test"
self.prev_values = {} # map of metrics to previous values
def new_value(self, metric):
values = self.prev_values.setdefault(metric, [])
values.insert(0, random.uniform(0, 100))
return values
def get_latest(self, metric, start, end, summary_size, skip_nulls=True):
values = self.get_history(metric, start, end, summary_size, skip_nulls)
return values[0], values[-1]
def get_history(self, metric, start, end, summary_size, skip_nulls=True):
if not metric.startswith(self.metric_prefix):
raise UnknownMetricError("Uknown metric %r" % (metric,))
steps = (self.total_seconds((-start) - (-end))
/ float(self.total_seconds(summary_size)))
values = self.new_value(metric)
while len(values) < steps:
values = self.new_value(metric)
return values[:steps]
Fix steps calculation in dummy client -- how did this work before?"""MetricSource that serves dummy data."""
import random
from vumidash.base import MetricSource, UnknownMetricError
class DummyClient(MetricSource):
"""Serve dummy data."""
def __init__(self):
self.latest = None
self.metric_prefix = "test"
self.prev_values = {} # map of metrics to previous values
def new_value(self, metric):
values = self.prev_values.setdefault(metric, [])
values.insert(0, random.uniform(0, 100))
return values
def get_latest(self, metric, start, end, summary_size, skip_nulls=True):
values = self.get_history(metric, start, end, summary_size, skip_nulls)
return values[0], values[-1]
def get_history(self, metric, start, end, summary_size, skip_nulls=True):
if not metric.startswith(self.metric_prefix):
raise UnknownMetricError("Uknown metric %r" % (metric,))
steps = int(self.total_seconds((-start) - (-end))
/ float(self.total_seconds(summary_size)))
values = self.new_value(metric)
while len(values) < steps:
values = self.new_value(metric)
return values[:steps]
|
<commit_before>"""MetricSource that serves dummy data."""
import random
from vumidash.base import MetricSource, UnknownMetricError
class DummyClient(MetricSource):
"""Serve dummy data."""
def __init__(self):
self.latest = None
self.metric_prefix = "test"
self.prev_values = {} # map of metrics to previous values
def new_value(self, metric):
values = self.prev_values.setdefault(metric, [])
values.insert(0, random.uniform(0, 100))
return values
def get_latest(self, metric, start, end, summary_size, skip_nulls=True):
values = self.get_history(metric, start, end, summary_size, skip_nulls)
return values[0], values[-1]
def get_history(self, metric, start, end, summary_size, skip_nulls=True):
if not metric.startswith(self.metric_prefix):
raise UnknownMetricError("Uknown metric %r" % (metric,))
steps = (self.total_seconds((-start) - (-end))
/ float(self.total_seconds(summary_size)))
values = self.new_value(metric)
while len(values) < steps:
values = self.new_value(metric)
return values[:steps]
<commit_msg>Fix steps calculation in dummy client -- how did this work before?<commit_after>"""MetricSource that serves dummy data."""
import random
from vumidash.base import MetricSource, UnknownMetricError
class DummyClient(MetricSource):
"""Serve dummy data."""
def __init__(self):
self.latest = None
self.metric_prefix = "test"
self.prev_values = {} # map of metrics to previous values
def new_value(self, metric):
values = self.prev_values.setdefault(metric, [])
values.insert(0, random.uniform(0, 100))
return values
def get_latest(self, metric, start, end, summary_size, skip_nulls=True):
values = self.get_history(metric, start, end, summary_size, skip_nulls)
return values[0], values[-1]
def get_history(self, metric, start, end, summary_size, skip_nulls=True):
if not metric.startswith(self.metric_prefix):
raise UnknownMetricError("Uknown metric %r" % (metric,))
steps = int(self.total_seconds((-start) - (-end))
/ float(self.total_seconds(summary_size)))
values = self.new_value(metric)
while len(values) < steps:
values = self.new_value(metric)
return values[:steps]
|
791f64250d5e7c2ac2c5e01aa1e890dbefbc0417
|
falcon_hateoas/middleware.py
|
falcon_hateoas/middleware.py
|
import json
import decimal
import datetime
import sqlalchemy
class AlchemyJSONEncoder(json.JSONEncoder):
def _is_alchemy_object(self, obj):
try:
sqlalchemy.orm.base.object_mapper(obj)
return True
except sqlalchemy.orm.exc.UnmappedInstanceError:
return False
def default(self, o):
if self._is_alchemy_object(o):
d = {}
for col in o.__table__.columns.keys():
value = getattr(o, col)
if hasattr(value, 'isoformat'):
d[col] = value.isoformat()
elif isinstance(value, datetime.timedelta):
d[col] = str(value)
elif isinstance(value, decimal.Decimal):
d[col] = float(value)
else:
d[col] = value
return d
else:
return super(AlchemyJSONEncoder, self).default(o)
class JsonMiddleware:
def process_response(self, req, resp, resource):
resp.set_header('Content-Type', 'application/json; charset=utf-8')
resp.body = json.dumps(resp.body, cls=AlchemyJSONEncoder)
|
import json
import decimal
import datetime
class AlchemyJSONEncoder(json.JSONEncoder):
def default(self, o):
d = {}
for col in o.__table__.columns.keys():
value = getattr(o, col)
if hasattr(value, 'isoformat'):
d[col] = value.isoformat()
elif isinstance(value, datetime.timedelta):
d[col] = str(value)
elif isinstance(value, decimal.Decimal):
d[col] = float(value)
else:
d[col] = value
return d
class JsonMiddleware:
def process_response(self, req, resp, resource):
resp.set_header('Content-Type', 'application/json; charset=utf-8')
resp.body = json.dumps(resp.body, cls=AlchemyJSONEncoder)
|
Remove dead code from AlchemyJSONEncoder
|
Remove dead code from AlchemyJSONEncoder
Signed-off-by: Michal Juranyi <29976087921aeab920eafb9b583221faa738f3f4@vnet.eu>
|
Python
|
mit
|
Vnet-as/falcon-hateoas
|
import json
import decimal
import datetime
import sqlalchemy
class AlchemyJSONEncoder(json.JSONEncoder):
def _is_alchemy_object(self, obj):
try:
sqlalchemy.orm.base.object_mapper(obj)
return True
except sqlalchemy.orm.exc.UnmappedInstanceError:
return False
def default(self, o):
if self._is_alchemy_object(o):
d = {}
for col in o.__table__.columns.keys():
value = getattr(o, col)
if hasattr(value, 'isoformat'):
d[col] = value.isoformat()
elif isinstance(value, datetime.timedelta):
d[col] = str(value)
elif isinstance(value, decimal.Decimal):
d[col] = float(value)
else:
d[col] = value
return d
else:
return super(AlchemyJSONEncoder, self).default(o)
class JsonMiddleware:
def process_response(self, req, resp, resource):
resp.set_header('Content-Type', 'application/json; charset=utf-8')
resp.body = json.dumps(resp.body, cls=AlchemyJSONEncoder)
Remove dead code from AlchemyJSONEncoder
Signed-off-by: Michal Juranyi <29976087921aeab920eafb9b583221faa738f3f4@vnet.eu>
|
import json
import decimal
import datetime
class AlchemyJSONEncoder(json.JSONEncoder):
def default(self, o):
d = {}
for col in o.__table__.columns.keys():
value = getattr(o, col)
if hasattr(value, 'isoformat'):
d[col] = value.isoformat()
elif isinstance(value, datetime.timedelta):
d[col] = str(value)
elif isinstance(value, decimal.Decimal):
d[col] = float(value)
else:
d[col] = value
return d
class JsonMiddleware:
def process_response(self, req, resp, resource):
resp.set_header('Content-Type', 'application/json; charset=utf-8')
resp.body = json.dumps(resp.body, cls=AlchemyJSONEncoder)
|
<commit_before>import json
import decimal
import datetime
import sqlalchemy
class AlchemyJSONEncoder(json.JSONEncoder):
def _is_alchemy_object(self, obj):
try:
sqlalchemy.orm.base.object_mapper(obj)
return True
except sqlalchemy.orm.exc.UnmappedInstanceError:
return False
def default(self, o):
if self._is_alchemy_object(o):
d = {}
for col in o.__table__.columns.keys():
value = getattr(o, col)
if hasattr(value, 'isoformat'):
d[col] = value.isoformat()
elif isinstance(value, datetime.timedelta):
d[col] = str(value)
elif isinstance(value, decimal.Decimal):
d[col] = float(value)
else:
d[col] = value
return d
else:
return super(AlchemyJSONEncoder, self).default(o)
class JsonMiddleware:
def process_response(self, req, resp, resource):
resp.set_header('Content-Type', 'application/json; charset=utf-8')
resp.body = json.dumps(resp.body, cls=AlchemyJSONEncoder)
<commit_msg>Remove dead code from AlchemyJSONEncoder
Signed-off-by: Michal Juranyi <29976087921aeab920eafb9b583221faa738f3f4@vnet.eu><commit_after>
|
import json
import decimal
import datetime
class AlchemyJSONEncoder(json.JSONEncoder):
def default(self, o):
d = {}
for col in o.__table__.columns.keys():
value = getattr(o, col)
if hasattr(value, 'isoformat'):
d[col] = value.isoformat()
elif isinstance(value, datetime.timedelta):
d[col] = str(value)
elif isinstance(value, decimal.Decimal):
d[col] = float(value)
else:
d[col] = value
return d
class JsonMiddleware:
def process_response(self, req, resp, resource):
resp.set_header('Content-Type', 'application/json; charset=utf-8')
resp.body = json.dumps(resp.body, cls=AlchemyJSONEncoder)
|
import json
import decimal
import datetime
import sqlalchemy
class AlchemyJSONEncoder(json.JSONEncoder):
def _is_alchemy_object(self, obj):
try:
sqlalchemy.orm.base.object_mapper(obj)
return True
except sqlalchemy.orm.exc.UnmappedInstanceError:
return False
def default(self, o):
if self._is_alchemy_object(o):
d = {}
for col in o.__table__.columns.keys():
value = getattr(o, col)
if hasattr(value, 'isoformat'):
d[col] = value.isoformat()
elif isinstance(value, datetime.timedelta):
d[col] = str(value)
elif isinstance(value, decimal.Decimal):
d[col] = float(value)
else:
d[col] = value
return d
else:
return super(AlchemyJSONEncoder, self).default(o)
class JsonMiddleware:
def process_response(self, req, resp, resource):
resp.set_header('Content-Type', 'application/json; charset=utf-8')
resp.body = json.dumps(resp.body, cls=AlchemyJSONEncoder)
Remove dead code from AlchemyJSONEncoder
Signed-off-by: Michal Juranyi <29976087921aeab920eafb9b583221faa738f3f4@vnet.eu>import json
import decimal
import datetime
class AlchemyJSONEncoder(json.JSONEncoder):
def default(self, o):
d = {}
for col in o.__table__.columns.keys():
value = getattr(o, col)
if hasattr(value, 'isoformat'):
d[col] = value.isoformat()
elif isinstance(value, datetime.timedelta):
d[col] = str(value)
elif isinstance(value, decimal.Decimal):
d[col] = float(value)
else:
d[col] = value
return d
class JsonMiddleware:
def process_response(self, req, resp, resource):
resp.set_header('Content-Type', 'application/json; charset=utf-8')
resp.body = json.dumps(resp.body, cls=AlchemyJSONEncoder)
|
<commit_before>import json
import decimal
import datetime
import sqlalchemy
class AlchemyJSONEncoder(json.JSONEncoder):
def _is_alchemy_object(self, obj):
try:
sqlalchemy.orm.base.object_mapper(obj)
return True
except sqlalchemy.orm.exc.UnmappedInstanceError:
return False
def default(self, o):
if self._is_alchemy_object(o):
d = {}
for col in o.__table__.columns.keys():
value = getattr(o, col)
if hasattr(value, 'isoformat'):
d[col] = value.isoformat()
elif isinstance(value, datetime.timedelta):
d[col] = str(value)
elif isinstance(value, decimal.Decimal):
d[col] = float(value)
else:
d[col] = value
return d
else:
return super(AlchemyJSONEncoder, self).default(o)
class JsonMiddleware:
def process_response(self, req, resp, resource):
resp.set_header('Content-Type', 'application/json; charset=utf-8')
resp.body = json.dumps(resp.body, cls=AlchemyJSONEncoder)
<commit_msg>Remove dead code from AlchemyJSONEncoder
Signed-off-by: Michal Juranyi <29976087921aeab920eafb9b583221faa738f3f4@vnet.eu><commit_after>import json
import decimal
import datetime
class AlchemyJSONEncoder(json.JSONEncoder):
def default(self, o):
d = {}
for col in o.__table__.columns.keys():
value = getattr(o, col)
if hasattr(value, 'isoformat'):
d[col] = value.isoformat()
elif isinstance(value, datetime.timedelta):
d[col] = str(value)
elif isinstance(value, decimal.Decimal):
d[col] = float(value)
else:
d[col] = value
return d
class JsonMiddleware:
def process_response(self, req, resp, resource):
resp.set_header('Content-Type', 'application/json; charset=utf-8')
resp.body = json.dumps(resp.body, cls=AlchemyJSONEncoder)
|
df251bfe9a3f68e30efeac2692df996e1494615b
|
tests/common.py
|
tests/common.py
|
import os
from wikipediabase.fetcher import CachingSiteFetcher
ALL_TEST_PAGES = [
]
def data(fname):
return os.path.abspath('/'.join([__package__, 'data', fname]))
def read_data(fname):
return open(data(fname)).read()
def download_all(pages=ALL_TEST_PAGES):
f = CachingSiteFetcher(offline=False, fname=data("pages.json"))
for p in pages:
f.download(p)
f.source(p)
TEST_FETCHER_SETUP = dict(offline=True, fname=data("pages.json"))
def get_fetcher():
return CachingSiteFetcher(**TEST_FETCHER_SETUP)
|
import os
from wikipediabase.fetcher import CachingSiteFetcher
ALL_TEST_PAGES = [
]
def data(fname):
return os.path.abspath('/'.join([__package__, 'data', fname]))
def read_data(fname):
return open(data(fname)).read()
def download_all(pages=ALL_TEST_PAGES):
f = CachingSiteFetcher(offline=False, fname=data("pages.json"))
for p in pages:
f.download(p)
f.source(p)
TEST_FETCHER_SETUP = dict(offline=False, fname=data("pages.json"))
def get_fetcher():
return CachingSiteFetcher(**TEST_FETCHER_SETUP)
|
Enable online lookups for test.
|
Enable online lookups for test.
|
Python
|
apache-2.0
|
fakedrake/WikipediaBase
|
import os
from wikipediabase.fetcher import CachingSiteFetcher
ALL_TEST_PAGES = [
]
def data(fname):
return os.path.abspath('/'.join([__package__, 'data', fname]))
def read_data(fname):
return open(data(fname)).read()
def download_all(pages=ALL_TEST_PAGES):
f = CachingSiteFetcher(offline=False, fname=data("pages.json"))
for p in pages:
f.download(p)
f.source(p)
TEST_FETCHER_SETUP = dict(offline=True, fname=data("pages.json"))
def get_fetcher():
return CachingSiteFetcher(**TEST_FETCHER_SETUP)
Enable online lookups for test.
|
import os
from wikipediabase.fetcher import CachingSiteFetcher
ALL_TEST_PAGES = [
]
def data(fname):
return os.path.abspath('/'.join([__package__, 'data', fname]))
def read_data(fname):
return open(data(fname)).read()
def download_all(pages=ALL_TEST_PAGES):
f = CachingSiteFetcher(offline=False, fname=data("pages.json"))
for p in pages:
f.download(p)
f.source(p)
TEST_FETCHER_SETUP = dict(offline=False, fname=data("pages.json"))
def get_fetcher():
return CachingSiteFetcher(**TEST_FETCHER_SETUP)
|
<commit_before>import os
from wikipediabase.fetcher import CachingSiteFetcher
ALL_TEST_PAGES = [
]
def data(fname):
return os.path.abspath('/'.join([__package__, 'data', fname]))
def read_data(fname):
return open(data(fname)).read()
def download_all(pages=ALL_TEST_PAGES):
f = CachingSiteFetcher(offline=False, fname=data("pages.json"))
for p in pages:
f.download(p)
f.source(p)
TEST_FETCHER_SETUP = dict(offline=True, fname=data("pages.json"))
def get_fetcher():
return CachingSiteFetcher(**TEST_FETCHER_SETUP)
<commit_msg>Enable online lookups for test.<commit_after>
|
import os
from wikipediabase.fetcher import CachingSiteFetcher
ALL_TEST_PAGES = [
]
def data(fname):
return os.path.abspath('/'.join([__package__, 'data', fname]))
def read_data(fname):
return open(data(fname)).read()
def download_all(pages=ALL_TEST_PAGES):
f = CachingSiteFetcher(offline=False, fname=data("pages.json"))
for p in pages:
f.download(p)
f.source(p)
TEST_FETCHER_SETUP = dict(offline=False, fname=data("pages.json"))
def get_fetcher():
return CachingSiteFetcher(**TEST_FETCHER_SETUP)
|
import os
from wikipediabase.fetcher import CachingSiteFetcher
ALL_TEST_PAGES = [
]
def data(fname):
return os.path.abspath('/'.join([__package__, 'data', fname]))
def read_data(fname):
return open(data(fname)).read()
def download_all(pages=ALL_TEST_PAGES):
f = CachingSiteFetcher(offline=False, fname=data("pages.json"))
for p in pages:
f.download(p)
f.source(p)
TEST_FETCHER_SETUP = dict(offline=True, fname=data("pages.json"))
def get_fetcher():
return CachingSiteFetcher(**TEST_FETCHER_SETUP)
Enable online lookups for test.import os
from wikipediabase.fetcher import CachingSiteFetcher
ALL_TEST_PAGES = [
]
def data(fname):
return os.path.abspath('/'.join([__package__, 'data', fname]))
def read_data(fname):
return open(data(fname)).read()
def download_all(pages=ALL_TEST_PAGES):
f = CachingSiteFetcher(offline=False, fname=data("pages.json"))
for p in pages:
f.download(p)
f.source(p)
TEST_FETCHER_SETUP = dict(offline=False, fname=data("pages.json"))
def get_fetcher():
return CachingSiteFetcher(**TEST_FETCHER_SETUP)
|
<commit_before>import os
from wikipediabase.fetcher import CachingSiteFetcher
ALL_TEST_PAGES = [
]
def data(fname):
return os.path.abspath('/'.join([__package__, 'data', fname]))
def read_data(fname):
return open(data(fname)).read()
def download_all(pages=ALL_TEST_PAGES):
f = CachingSiteFetcher(offline=False, fname=data("pages.json"))
for p in pages:
f.download(p)
f.source(p)
TEST_FETCHER_SETUP = dict(offline=True, fname=data("pages.json"))
def get_fetcher():
return CachingSiteFetcher(**TEST_FETCHER_SETUP)
<commit_msg>Enable online lookups for test.<commit_after>import os
from wikipediabase.fetcher import CachingSiteFetcher
ALL_TEST_PAGES = [
]
def data(fname):
return os.path.abspath('/'.join([__package__, 'data', fname]))
def read_data(fname):
return open(data(fname)).read()
def download_all(pages=ALL_TEST_PAGES):
f = CachingSiteFetcher(offline=False, fname=data("pages.json"))
for p in pages:
f.download(p)
f.source(p)
TEST_FETCHER_SETUP = dict(offline=False, fname=data("pages.json"))
def get_fetcher():
return CachingSiteFetcher(**TEST_FETCHER_SETUP)
|
6259df76129327a42c08fdd4b999ea7c617c6c9d
|
project/ndaparser/models.py
|
project/ndaparser/models.py
|
# -*- coding: utf-8 -*-
import datetime
import slugify as unicodeslugify
from django.db import models, transaction
from django.conf import settings
from django.contrib.auth import get_user_model
from django.utils.translation import ugettext_lazy as _
from asylum.models import AsylumModel
def get_sentinel_user():
"""Gets a "sentinel" user ("deleted") and for assigning as uploader"""
return get_user_model().objects.get_or_create(username='deleted')[0]
def datestamped_and_normalized(instance, filename):
"""Normalized filename and places in datestamped path"""
filename_normalized = unicodeslugify.slugify(
filename, only_ascii=True, lower=True,
spaces=False, space_replacement='_'
)
return datetime.datetime.now().strftime("ndaparser/%Y/%m/%d/{}").format(filename_normalized)
class UploadedTransaction(AsylumModel):
"""Track uploaded transaction files"""
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET(get_sentinel_user))
file = models.FileField(upload_to=datestamped_and_normalized)
stamp = models.DateTimeField(auto_now_add=True, editable=False)
last_transaction = models.DateField()
class Meta:
verbose_name = _('Uploaded transaction')
verbose_name_plural = _('Uploaded transaction')
ordering = [ '-stamp' ]
|
# -*- coding: utf-8 -*-
import datetime
import slugify as unicodeslugify
from django.db import models, transaction
from django.conf import settings
from django.contrib.auth import get_user_model
from django.utils.translation import ugettext_lazy as _
from asylum.models import AsylumModel
def get_sentinel_user():
"""Gets a "sentinel" user ("deleted") and for assigning as uploader"""
return get_user_model().objects.get_or_create(username='deleted')[0]
def datestamped_and_normalized(instance, filename):
"""Normalized filename and places in datestamped path"""
file_parts = filename.split('.')
if len(file_parts) > 1:
name = '.'.join(file_parts[:-1])
ext = '.' + file_parts[-1]
else:
ext = ''
name = filename
filename_normalized = unicodeslugify.slugify(
name, only_ascii=True, lower=True,
spaces=False, space_replacement='_'
) + ext
return datetime.datetime.now().strftime("ndaparser/%Y/%m/%d/{}").format(filename_normalized)
class UploadedTransaction(AsylumModel):
"""Track uploaded transaction files"""
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET(get_sentinel_user))
file = models.FileField(upload_to=datestamped_and_normalized)
stamp = models.DateTimeField(auto_now_add=True, editable=False)
last_transaction = models.DateField()
class Meta:
verbose_name = _('Uploaded transaction')
verbose_name_plural = _('Uploaded transaction')
ordering = [ '-stamp' ]
|
Fix the filename normalizing to keep extension
|
Fix the filename normalizing to keep extension
|
Python
|
mit
|
rambo/asylum,jautero/asylum,HelsinkiHacklab/asylum,rambo/asylum,HelsinkiHacklab/asylum,hacklab-fi/asylum,rambo/asylum,jautero/asylum,hacklab-fi/asylum,HelsinkiHacklab/asylum,rambo/asylum,HelsinkiHacklab/asylum,hacklab-fi/asylum,jautero/asylum,hacklab-fi/asylum,jautero/asylum
|
# -*- coding: utf-8 -*-
import datetime
import slugify as unicodeslugify
from django.db import models, transaction
from django.conf import settings
from django.contrib.auth import get_user_model
from django.utils.translation import ugettext_lazy as _
from asylum.models import AsylumModel
def get_sentinel_user():
"""Gets a "sentinel" user ("deleted") and for assigning as uploader"""
return get_user_model().objects.get_or_create(username='deleted')[0]
def datestamped_and_normalized(instance, filename):
"""Normalized filename and places in datestamped path"""
filename_normalized = unicodeslugify.slugify(
filename, only_ascii=True, lower=True,
spaces=False, space_replacement='_'
)
return datetime.datetime.now().strftime("ndaparser/%Y/%m/%d/{}").format(filename_normalized)
class UploadedTransaction(AsylumModel):
"""Track uploaded transaction files"""
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET(get_sentinel_user))
file = models.FileField(upload_to=datestamped_and_normalized)
stamp = models.DateTimeField(auto_now_add=True, editable=False)
last_transaction = models.DateField()
class Meta:
verbose_name = _('Uploaded transaction')
verbose_name_plural = _('Uploaded transaction')
ordering = [ '-stamp' ]
Fix the filename normalizing to keep extension
|
# -*- coding: utf-8 -*-
import datetime
import slugify as unicodeslugify
from django.db import models, transaction
from django.conf import settings
from django.contrib.auth import get_user_model
from django.utils.translation import ugettext_lazy as _
from asylum.models import AsylumModel
def get_sentinel_user():
"""Gets a "sentinel" user ("deleted") and for assigning as uploader"""
return get_user_model().objects.get_or_create(username='deleted')[0]
def datestamped_and_normalized(instance, filename):
"""Normalized filename and places in datestamped path"""
file_parts = filename.split('.')
if len(file_parts) > 1:
name = '.'.join(file_parts[:-1])
ext = '.' + file_parts[-1]
else:
ext = ''
name = filename
filename_normalized = unicodeslugify.slugify(
name, only_ascii=True, lower=True,
spaces=False, space_replacement='_'
) + ext
return datetime.datetime.now().strftime("ndaparser/%Y/%m/%d/{}").format(filename_normalized)
class UploadedTransaction(AsylumModel):
"""Track uploaded transaction files"""
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET(get_sentinel_user))
file = models.FileField(upload_to=datestamped_and_normalized)
stamp = models.DateTimeField(auto_now_add=True, editable=False)
last_transaction = models.DateField()
class Meta:
verbose_name = _('Uploaded transaction')
verbose_name_plural = _('Uploaded transaction')
ordering = [ '-stamp' ]
|
<commit_before># -*- coding: utf-8 -*-
import datetime
import slugify as unicodeslugify
from django.db import models, transaction
from django.conf import settings
from django.contrib.auth import get_user_model
from django.utils.translation import ugettext_lazy as _
from asylum.models import AsylumModel
def get_sentinel_user():
"""Gets a "sentinel" user ("deleted") and for assigning as uploader"""
return get_user_model().objects.get_or_create(username='deleted')[0]
def datestamped_and_normalized(instance, filename):
"""Normalized filename and places in datestamped path"""
filename_normalized = unicodeslugify.slugify(
filename, only_ascii=True, lower=True,
spaces=False, space_replacement='_'
)
return datetime.datetime.now().strftime("ndaparser/%Y/%m/%d/{}").format(filename_normalized)
class UploadedTransaction(AsylumModel):
"""Track uploaded transaction files"""
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET(get_sentinel_user))
file = models.FileField(upload_to=datestamped_and_normalized)
stamp = models.DateTimeField(auto_now_add=True, editable=False)
last_transaction = models.DateField()
class Meta:
verbose_name = _('Uploaded transaction')
verbose_name_plural = _('Uploaded transaction')
ordering = [ '-stamp' ]
<commit_msg>Fix the filename normalizing to keep extension<commit_after>
|
# -*- coding: utf-8 -*-
import datetime
import slugify as unicodeslugify
from django.db import models, transaction
from django.conf import settings
from django.contrib.auth import get_user_model
from django.utils.translation import ugettext_lazy as _
from asylum.models import AsylumModel
def get_sentinel_user():
"""Gets a "sentinel" user ("deleted") and for assigning as uploader"""
return get_user_model().objects.get_or_create(username='deleted')[0]
def datestamped_and_normalized(instance, filename):
"""Normalized filename and places in datestamped path"""
file_parts = filename.split('.')
if len(file_parts) > 1:
name = '.'.join(file_parts[:-1])
ext = '.' + file_parts[-1]
else:
ext = ''
name = filename
filename_normalized = unicodeslugify.slugify(
name, only_ascii=True, lower=True,
spaces=False, space_replacement='_'
) + ext
return datetime.datetime.now().strftime("ndaparser/%Y/%m/%d/{}").format(filename_normalized)
class UploadedTransaction(AsylumModel):
"""Track uploaded transaction files"""
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET(get_sentinel_user))
file = models.FileField(upload_to=datestamped_and_normalized)
stamp = models.DateTimeField(auto_now_add=True, editable=False)
last_transaction = models.DateField()
class Meta:
verbose_name = _('Uploaded transaction')
verbose_name_plural = _('Uploaded transaction')
ordering = [ '-stamp' ]
|
# -*- coding: utf-8 -*-
import datetime
import slugify as unicodeslugify
from django.db import models, transaction
from django.conf import settings
from django.contrib.auth import get_user_model
from django.utils.translation import ugettext_lazy as _
from asylum.models import AsylumModel
def get_sentinel_user():
"""Gets a "sentinel" user ("deleted") and for assigning as uploader"""
return get_user_model().objects.get_or_create(username='deleted')[0]
def datestamped_and_normalized(instance, filename):
"""Normalized filename and places in datestamped path"""
filename_normalized = unicodeslugify.slugify(
filename, only_ascii=True, lower=True,
spaces=False, space_replacement='_'
)
return datetime.datetime.now().strftime("ndaparser/%Y/%m/%d/{}").format(filename_normalized)
class UploadedTransaction(AsylumModel):
"""Track uploaded transaction files"""
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET(get_sentinel_user))
file = models.FileField(upload_to=datestamped_and_normalized)
stamp = models.DateTimeField(auto_now_add=True, editable=False)
last_transaction = models.DateField()
class Meta:
verbose_name = _('Uploaded transaction')
verbose_name_plural = _('Uploaded transaction')
ordering = [ '-stamp' ]
Fix the filename normalizing to keep extension# -*- coding: utf-8 -*-
import datetime
import slugify as unicodeslugify
from django.db import models, transaction
from django.conf import settings
from django.contrib.auth import get_user_model
from django.utils.translation import ugettext_lazy as _
from asylum.models import AsylumModel
def get_sentinel_user():
"""Gets a "sentinel" user ("deleted") and for assigning as uploader"""
return get_user_model().objects.get_or_create(username='deleted')[0]
def datestamped_and_normalized(instance, filename):
"""Normalized filename and places in datestamped path"""
file_parts = filename.split('.')
if len(file_parts) > 1:
name = '.'.join(file_parts[:-1])
ext = '.' + file_parts[-1]
else:
ext = ''
name = filename
filename_normalized = unicodeslugify.slugify(
name, only_ascii=True, lower=True,
spaces=False, space_replacement='_'
) + ext
return datetime.datetime.now().strftime("ndaparser/%Y/%m/%d/{}").format(filename_normalized)
class UploadedTransaction(AsylumModel):
"""Track uploaded transaction files"""
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET(get_sentinel_user))
file = models.FileField(upload_to=datestamped_and_normalized)
stamp = models.DateTimeField(auto_now_add=True, editable=False)
last_transaction = models.DateField()
class Meta:
verbose_name = _('Uploaded transaction')
verbose_name_plural = _('Uploaded transaction')
ordering = [ '-stamp' ]
|
<commit_before># -*- coding: utf-8 -*-
import datetime
import slugify as unicodeslugify
from django.db import models, transaction
from django.conf import settings
from django.contrib.auth import get_user_model
from django.utils.translation import ugettext_lazy as _
from asylum.models import AsylumModel
def get_sentinel_user():
"""Gets a "sentinel" user ("deleted") and for assigning as uploader"""
return get_user_model().objects.get_or_create(username='deleted')[0]
def datestamped_and_normalized(instance, filename):
"""Normalized filename and places in datestamped path"""
filename_normalized = unicodeslugify.slugify(
filename, only_ascii=True, lower=True,
spaces=False, space_replacement='_'
)
return datetime.datetime.now().strftime("ndaparser/%Y/%m/%d/{}").format(filename_normalized)
class UploadedTransaction(AsylumModel):
"""Track uploaded transaction files"""
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET(get_sentinel_user))
file = models.FileField(upload_to=datestamped_and_normalized)
stamp = models.DateTimeField(auto_now_add=True, editable=False)
last_transaction = models.DateField()
class Meta:
verbose_name = _('Uploaded transaction')
verbose_name_plural = _('Uploaded transaction')
ordering = [ '-stamp' ]
<commit_msg>Fix the filename normalizing to keep extension<commit_after># -*- coding: utf-8 -*-
import datetime
import slugify as unicodeslugify
from django.db import models, transaction
from django.conf import settings
from django.contrib.auth import get_user_model
from django.utils.translation import ugettext_lazy as _
from asylum.models import AsylumModel
def get_sentinel_user():
"""Gets a "sentinel" user ("deleted") and for assigning as uploader"""
return get_user_model().objects.get_or_create(username='deleted')[0]
def datestamped_and_normalized(instance, filename):
"""Normalized filename and places in datestamped path"""
file_parts = filename.split('.')
if len(file_parts) > 1:
name = '.'.join(file_parts[:-1])
ext = '.' + file_parts[-1]
else:
ext = ''
name = filename
filename_normalized = unicodeslugify.slugify(
name, only_ascii=True, lower=True,
spaces=False, space_replacement='_'
) + ext
return datetime.datetime.now().strftime("ndaparser/%Y/%m/%d/{}").format(filename_normalized)
class UploadedTransaction(AsylumModel):
"""Track uploaded transaction files"""
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET(get_sentinel_user))
file = models.FileField(upload_to=datestamped_and_normalized)
stamp = models.DateTimeField(auto_now_add=True, editable=False)
last_transaction = models.DateField()
class Meta:
verbose_name = _('Uploaded transaction')
verbose_name_plural = _('Uploaded transaction')
ordering = [ '-stamp' ]
|
a6a88fac6300b92c82e797f72477df1df6b87dbe
|
faq/views.py
|
faq/views.py
|
from django.http import Http404
from django.views.generic import ListView, DetailView
from faq.models import Question, Category
class FAQQuestionListView(ListView):
context_object_name = "question_list"
template_name = "faq/question_list.html"
def get_queryset(self):
return Question.objects.all()
class FAQQuestionDetailView(DetailView):
context_object_name = 'question'
template_name = 'faq/question_detail.html'
def get_object(self):
return Question.objects.get(slug__iexact=self.kwargs['slug'])
class FAQCategoryListView(ListView):
context_object_name = "category_list"
template_name = "faq/category_list.html"
def get_queryset(self):
return Category.objects.all()
class FAQCategoryDetailView(ListView):
context_object_name = 'question_list'
template_name = "faq/question_list.html"
def get_queryset(self):
try:
self.category = Category.objects.get(slug__iexact=self.kwargs['slug'])
except Category.DoesNotExist:
raise Http404
return Question.objects.get(category=self.category)
|
from django.http import Http404
from django.views.generic import ListView, DetailView
from faq.models import Question, Category
class FAQQuestionListView(ListView):
context_object_name = "question_list"
template_name = "faq/question_list.html"
def get_queryset(self):
return Question.objects.filter(categories=None)
class FAQQuestionDetailView(DetailView):
context_object_name = 'question'
template_name = 'faq/question_detail.html'
def get_object(self):
return Question.objects.get(slug__iexact=self.kwargs['slug'])
class FAQCategoryListView(ListView):
context_object_name = "category_list"
template_name = "faq/category_list.html"
def get_queryset(self):
return Category.objects.all()
class FAQCategoryDetailView(ListView):
context_object_name = 'question_list'
template_name = "faq/question_list.html"
def get_queryset(self):
try:
self.category = Category.objects.get(slug__iexact=self.kwargs['slug'])
except Category.DoesNotExist:
raise Http404
return Question.objects.get(category=self.category)
|
Index should be ones without a category.
|
Index should be ones without a category.
|
Python
|
bsd-3-clause
|
myles-archive/django-faq,asgardproject/django-faq
|
from django.http import Http404
from django.views.generic import ListView, DetailView
from faq.models import Question, Category
class FAQQuestionListView(ListView):
context_object_name = "question_list"
template_name = "faq/question_list.html"
def get_queryset(self):
return Question.objects.all()
class FAQQuestionDetailView(DetailView):
context_object_name = 'question'
template_name = 'faq/question_detail.html'
def get_object(self):
return Question.objects.get(slug__iexact=self.kwargs['slug'])
class FAQCategoryListView(ListView):
context_object_name = "category_list"
template_name = "faq/category_list.html"
def get_queryset(self):
return Category.objects.all()
class FAQCategoryDetailView(ListView):
context_object_name = 'question_list'
template_name = "faq/question_list.html"
def get_queryset(self):
try:
self.category = Category.objects.get(slug__iexact=self.kwargs['slug'])
except Category.DoesNotExist:
raise Http404
return Question.objects.get(category=self.category)Index should be ones without a category.
|
from django.http import Http404
from django.views.generic import ListView, DetailView
from faq.models import Question, Category
class FAQQuestionListView(ListView):
context_object_name = "question_list"
template_name = "faq/question_list.html"
def get_queryset(self):
return Question.objects.filter(categories=None)
class FAQQuestionDetailView(DetailView):
context_object_name = 'question'
template_name = 'faq/question_detail.html'
def get_object(self):
return Question.objects.get(slug__iexact=self.kwargs['slug'])
class FAQCategoryListView(ListView):
context_object_name = "category_list"
template_name = "faq/category_list.html"
def get_queryset(self):
return Category.objects.all()
class FAQCategoryDetailView(ListView):
context_object_name = 'question_list'
template_name = "faq/question_list.html"
def get_queryset(self):
try:
self.category = Category.objects.get(slug__iexact=self.kwargs['slug'])
except Category.DoesNotExist:
raise Http404
return Question.objects.get(category=self.category)
|
<commit_before>from django.http import Http404
from django.views.generic import ListView, DetailView
from faq.models import Question, Category
class FAQQuestionListView(ListView):
context_object_name = "question_list"
template_name = "faq/question_list.html"
def get_queryset(self):
return Question.objects.all()
class FAQQuestionDetailView(DetailView):
context_object_name = 'question'
template_name = 'faq/question_detail.html'
def get_object(self):
return Question.objects.get(slug__iexact=self.kwargs['slug'])
class FAQCategoryListView(ListView):
context_object_name = "category_list"
template_name = "faq/category_list.html"
def get_queryset(self):
return Category.objects.all()
class FAQCategoryDetailView(ListView):
context_object_name = 'question_list'
template_name = "faq/question_list.html"
def get_queryset(self):
try:
self.category = Category.objects.get(slug__iexact=self.kwargs['slug'])
except Category.DoesNotExist:
raise Http404
return Question.objects.get(category=self.category)<commit_msg>Index should be ones without a category.<commit_after>
|
from django.http import Http404
from django.views.generic import ListView, DetailView
from faq.models import Question, Category
class FAQQuestionListView(ListView):
context_object_name = "question_list"
template_name = "faq/question_list.html"
def get_queryset(self):
return Question.objects.filter(categories=None)
class FAQQuestionDetailView(DetailView):
context_object_name = 'question'
template_name = 'faq/question_detail.html'
def get_object(self):
return Question.objects.get(slug__iexact=self.kwargs['slug'])
class FAQCategoryListView(ListView):
context_object_name = "category_list"
template_name = "faq/category_list.html"
def get_queryset(self):
return Category.objects.all()
class FAQCategoryDetailView(ListView):
context_object_name = 'question_list'
template_name = "faq/question_list.html"
def get_queryset(self):
try:
self.category = Category.objects.get(slug__iexact=self.kwargs['slug'])
except Category.DoesNotExist:
raise Http404
return Question.objects.get(category=self.category)
|
from django.http import Http404
from django.views.generic import ListView, DetailView
from faq.models import Question, Category
class FAQQuestionListView(ListView):
context_object_name = "question_list"
template_name = "faq/question_list.html"
def get_queryset(self):
return Question.objects.all()
class FAQQuestionDetailView(DetailView):
context_object_name = 'question'
template_name = 'faq/question_detail.html'
def get_object(self):
return Question.objects.get(slug__iexact=self.kwargs['slug'])
class FAQCategoryListView(ListView):
context_object_name = "category_list"
template_name = "faq/category_list.html"
def get_queryset(self):
return Category.objects.all()
class FAQCategoryDetailView(ListView):
context_object_name = 'question_list'
template_name = "faq/question_list.html"
def get_queryset(self):
try:
self.category = Category.objects.get(slug__iexact=self.kwargs['slug'])
except Category.DoesNotExist:
raise Http404
return Question.objects.get(category=self.category)Index should be ones without a category.from django.http import Http404
from django.views.generic import ListView, DetailView
from faq.models import Question, Category
class FAQQuestionListView(ListView):
context_object_name = "question_list"
template_name = "faq/question_list.html"
def get_queryset(self):
return Question.objects.filter(categories=None)
class FAQQuestionDetailView(DetailView):
context_object_name = 'question'
template_name = 'faq/question_detail.html'
def get_object(self):
return Question.objects.get(slug__iexact=self.kwargs['slug'])
class FAQCategoryListView(ListView):
context_object_name = "category_list"
template_name = "faq/category_list.html"
def get_queryset(self):
return Category.objects.all()
class FAQCategoryDetailView(ListView):
context_object_name = 'question_list'
template_name = "faq/question_list.html"
def get_queryset(self):
try:
self.category = Category.objects.get(slug__iexact=self.kwargs['slug'])
except Category.DoesNotExist:
raise Http404
return Question.objects.get(category=self.category)
|
<commit_before>from django.http import Http404
from django.views.generic import ListView, DetailView
from faq.models import Question, Category
class FAQQuestionListView(ListView):
context_object_name = "question_list"
template_name = "faq/question_list.html"
def get_queryset(self):
return Question.objects.all()
class FAQQuestionDetailView(DetailView):
context_object_name = 'question'
template_name = 'faq/question_detail.html'
def get_object(self):
return Question.objects.get(slug__iexact=self.kwargs['slug'])
class FAQCategoryListView(ListView):
context_object_name = "category_list"
template_name = "faq/category_list.html"
def get_queryset(self):
return Category.objects.all()
class FAQCategoryDetailView(ListView):
context_object_name = 'question_list'
template_name = "faq/question_list.html"
def get_queryset(self):
try:
self.category = Category.objects.get(slug__iexact=self.kwargs['slug'])
except Category.DoesNotExist:
raise Http404
return Question.objects.get(category=self.category)<commit_msg>Index should be ones without a category.<commit_after>from django.http import Http404
from django.views.generic import ListView, DetailView
from faq.models import Question, Category
class FAQQuestionListView(ListView):
context_object_name = "question_list"
template_name = "faq/question_list.html"
def get_queryset(self):
return Question.objects.filter(categories=None)
class FAQQuestionDetailView(DetailView):
context_object_name = 'question'
template_name = 'faq/question_detail.html'
def get_object(self):
return Question.objects.get(slug__iexact=self.kwargs['slug'])
class FAQCategoryListView(ListView):
context_object_name = "category_list"
template_name = "faq/category_list.html"
def get_queryset(self):
return Category.objects.all()
class FAQCategoryDetailView(ListView):
context_object_name = 'question_list'
template_name = "faq/question_list.html"
def get_queryset(self):
try:
self.category = Category.objects.get(slug__iexact=self.kwargs['slug'])
except Category.DoesNotExist:
raise Http404
return Question.objects.get(category=self.category)
|
0ecea9e68755bb7f03702b68d3f8565dde4fd16b
|
src/squibs/memsquib.py
|
src/squibs/memsquib.py
|
#!/usr/bin/python2
# vim:set ts=4 sw=4 et nowrap syntax=python ff=unix:
##############################################################################
import sys, time
def memory ():
f = open('/proc/meminfo', 'r')
lines = f.readlines()
f.close()
mem = []
for x in range(4):
mem.append(int(lines[x].split()[1], 10) * 1024)
print 'mem.total %d' % (mem[0])
print 'mem.free %d' % (mem[1])
print 'mem.buffers %d' % (mem[2])
print 'mem.cached %d' % (mem[3])
sys.stdout.flush()
def run (poll_interval):
while True:
start = time.time()
memory()
done = time.time()
delay = poll_interval - (done - start)
if delay > 0.0: time.sleep(delay)
if __name__ == '__main__':
run(10.0)
##############################################################################
## THE END
|
#!/usr/bin/python2
# vim:set ts=4 sw=4 et nowrap syntax=python ff=unix:
##############################################################################
import sys, time
def memory ():
f = open('/proc/meminfo', 'r')
lines = f.readlines()
f.close()
mem = []
for x in range(4):
mem.append(int(lines[x].split()[1], 10) * 1024)
print 'mem.total %d' % (mem[0])
print 'mem.free %d' % (mem[1])
print 'mem.buffers %d' % (mem[2])
print 'mem.cached %d' % (mem[3])
print 'mem.used %d' %(mem[0] - sum(mem[1:]))
sys.stdout.flush()
def run (poll_interval):
while True:
start = time.time()
memory()
done = time.time()
delay = poll_interval - (done - start)
if delay > 0.0: time.sleep(delay)
if __name__ == '__main__':
run(10.0)
##############################################################################
## THE END
|
Add a memory used metric
|
Add a memory used metric
|
Python
|
apache-2.0
|
mcrewson/squib
|
#!/usr/bin/python2
# vim:set ts=4 sw=4 et nowrap syntax=python ff=unix:
##############################################################################
import sys, time
def memory ():
f = open('/proc/meminfo', 'r')
lines = f.readlines()
f.close()
mem = []
for x in range(4):
mem.append(int(lines[x].split()[1], 10) * 1024)
print 'mem.total %d' % (mem[0])
print 'mem.free %d' % (mem[1])
print 'mem.buffers %d' % (mem[2])
print 'mem.cached %d' % (mem[3])
sys.stdout.flush()
def run (poll_interval):
while True:
start = time.time()
memory()
done = time.time()
delay = poll_interval - (done - start)
if delay > 0.0: time.sleep(delay)
if __name__ == '__main__':
run(10.0)
##############################################################################
## THE END
Add a memory used metric
|
#!/usr/bin/python2
# vim:set ts=4 sw=4 et nowrap syntax=python ff=unix:
##############################################################################
import sys, time
def memory ():
f = open('/proc/meminfo', 'r')
lines = f.readlines()
f.close()
mem = []
for x in range(4):
mem.append(int(lines[x].split()[1], 10) * 1024)
print 'mem.total %d' % (mem[0])
print 'mem.free %d' % (mem[1])
print 'mem.buffers %d' % (mem[2])
print 'mem.cached %d' % (mem[3])
print 'mem.used %d' %(mem[0] - sum(mem[1:]))
sys.stdout.flush()
def run (poll_interval):
while True:
start = time.time()
memory()
done = time.time()
delay = poll_interval - (done - start)
if delay > 0.0: time.sleep(delay)
if __name__ == '__main__':
run(10.0)
##############################################################################
## THE END
|
<commit_before>#!/usr/bin/python2
# vim:set ts=4 sw=4 et nowrap syntax=python ff=unix:
##############################################################################
import sys, time
def memory ():
f = open('/proc/meminfo', 'r')
lines = f.readlines()
f.close()
mem = []
for x in range(4):
mem.append(int(lines[x].split()[1], 10) * 1024)
print 'mem.total %d' % (mem[0])
print 'mem.free %d' % (mem[1])
print 'mem.buffers %d' % (mem[2])
print 'mem.cached %d' % (mem[3])
sys.stdout.flush()
def run (poll_interval):
while True:
start = time.time()
memory()
done = time.time()
delay = poll_interval - (done - start)
if delay > 0.0: time.sleep(delay)
if __name__ == '__main__':
run(10.0)
##############################################################################
## THE END
<commit_msg>Add a memory used metric<commit_after>
|
#!/usr/bin/python2
# vim:set ts=4 sw=4 et nowrap syntax=python ff=unix:
##############################################################################
import sys, time
def memory ():
f = open('/proc/meminfo', 'r')
lines = f.readlines()
f.close()
mem = []
for x in range(4):
mem.append(int(lines[x].split()[1], 10) * 1024)
print 'mem.total %d' % (mem[0])
print 'mem.free %d' % (mem[1])
print 'mem.buffers %d' % (mem[2])
print 'mem.cached %d' % (mem[3])
print 'mem.used %d' %(mem[0] - sum(mem[1:]))
sys.stdout.flush()
def run (poll_interval):
while True:
start = time.time()
memory()
done = time.time()
delay = poll_interval - (done - start)
if delay > 0.0: time.sleep(delay)
if __name__ == '__main__':
run(10.0)
##############################################################################
## THE END
|
#!/usr/bin/python2
# vim:set ts=4 sw=4 et nowrap syntax=python ff=unix:
##############################################################################
import sys, time
def memory ():
f = open('/proc/meminfo', 'r')
lines = f.readlines()
f.close()
mem = []
for x in range(4):
mem.append(int(lines[x].split()[1], 10) * 1024)
print 'mem.total %d' % (mem[0])
print 'mem.free %d' % (mem[1])
print 'mem.buffers %d' % (mem[2])
print 'mem.cached %d' % (mem[3])
sys.stdout.flush()
def run (poll_interval):
while True:
start = time.time()
memory()
done = time.time()
delay = poll_interval - (done - start)
if delay > 0.0: time.sleep(delay)
if __name__ == '__main__':
run(10.0)
##############################################################################
## THE END
Add a memory used metric#!/usr/bin/python2
# vim:set ts=4 sw=4 et nowrap syntax=python ff=unix:
##############################################################################
import sys, time
def memory ():
f = open('/proc/meminfo', 'r')
lines = f.readlines()
f.close()
mem = []
for x in range(4):
mem.append(int(lines[x].split()[1], 10) * 1024)
print 'mem.total %d' % (mem[0])
print 'mem.free %d' % (mem[1])
print 'mem.buffers %d' % (mem[2])
print 'mem.cached %d' % (mem[3])
print 'mem.used %d' %(mem[0] - sum(mem[1:]))
sys.stdout.flush()
def run (poll_interval):
while True:
start = time.time()
memory()
done = time.time()
delay = poll_interval - (done - start)
if delay > 0.0: time.sleep(delay)
if __name__ == '__main__':
run(10.0)
##############################################################################
## THE END
|
<commit_before>#!/usr/bin/python2
# vim:set ts=4 sw=4 et nowrap syntax=python ff=unix:
##############################################################################
import sys, time
def memory ():
f = open('/proc/meminfo', 'r')
lines = f.readlines()
f.close()
mem = []
for x in range(4):
mem.append(int(lines[x].split()[1], 10) * 1024)
print 'mem.total %d' % (mem[0])
print 'mem.free %d' % (mem[1])
print 'mem.buffers %d' % (mem[2])
print 'mem.cached %d' % (mem[3])
sys.stdout.flush()
def run (poll_interval):
while True:
start = time.time()
memory()
done = time.time()
delay = poll_interval - (done - start)
if delay > 0.0: time.sleep(delay)
if __name__ == '__main__':
run(10.0)
##############################################################################
## THE END
<commit_msg>Add a memory used metric<commit_after>#!/usr/bin/python2
# vim:set ts=4 sw=4 et nowrap syntax=python ff=unix:
##############################################################################
import sys, time
def memory ():
f = open('/proc/meminfo', 'r')
lines = f.readlines()
f.close()
mem = []
for x in range(4):
mem.append(int(lines[x].split()[1], 10) * 1024)
print 'mem.total %d' % (mem[0])
print 'mem.free %d' % (mem[1])
print 'mem.buffers %d' % (mem[2])
print 'mem.cached %d' % (mem[3])
print 'mem.used %d' %(mem[0] - sum(mem[1:]))
sys.stdout.flush()
def run (poll_interval):
while True:
start = time.time()
memory()
done = time.time()
delay = poll_interval - (done - start)
if delay > 0.0: time.sleep(delay)
if __name__ == '__main__':
run(10.0)
##############################################################################
## THE END
|
f4d87b49f100121896ab147e08f634ebcf68ae40
|
generator.py
|
generator.py
|
import graph
def generate():
count = graph.getTotalCount()
zahajeni = graph.getSkupinaZahajeni(count)
probihajici = graph.getSkupinaProbihajici(count)
printHeader()
printBody(count, zahajeni, probihajici)
printFooter()
def printHeader():
print("<!DOCTYPE html>\n<html>\n<head>\n" +
"<title>Skupiny clenu v RV</title>\n" +
"</head>")
def printBody(count, zahajeni, probihajici):
print("<body>\n" +
"<h1>Skupiny clenu v RV</h1>\n" +
"<table border=\"1\"><thead><tr>\n" +
"<td>Pocet clenu</td>\n" +
"<td>Velikost skupiny pro zahajeni jednani</td>\n" +
"<td>Velikost skupiny na probihajicim jednani</td>\n" +
"</tr>\n</thead>\n<tbody>\n<tr>" +
"<td>" +
str(count) +
"</td><td>" +
str(zahajeni) +
"</td><td>" +
str(probihajici) +
"</td></tr>\n" +
"</tbody></table>\n" +
"</body>")
def printFooter():
print("</html>")
generate()
|
import graph
import datetime
def generate():
count = graph.getTotalCount()
zahajeni = graph.getSkupinaZahajeni(count)
probihajici = graph.getSkupinaProbihajici(count)
printHeader()
printBody(count, zahajeni, probihajici)
printFooter()
def printHeader():
print("<!DOCTYPE html>\n<html>\n<head>\n" +
"<title>Skupiny clenu v RV</title>\n" +
"</head>")
def printBody(count, zahajeni, probihajici):
print("<body>\n" +
"<h1>Skupiny clenu v RV</h1>\n" +
"<table border=\"1\"><thead><tr>\n" +
"<td>Pocet clenu</td>\n" +
"<td>Velikost skupiny pro zahajeni jednani</td>\n" +
"<td>Velikost skupiny na probihajicim jednani</td>\n" +
"</tr>\n</thead>\n<tbody>\n<tr>" +
"<td>" +
str(count) +
"</td><td>" +
str(zahajeni) +
"</td><td>" +
str(probihajici) +
"</td></tr>\n" +
"</tbody></table>\n")
def printFooter():
print("<p>Generated: " + datetime.datetime.now() + "</p>")
print("</body></html>")
generate()
|
Print generated date & time
|
Print generated date & time
|
Python
|
mit
|
eghuro/pirgroups
|
import graph
def generate():
count = graph.getTotalCount()
zahajeni = graph.getSkupinaZahajeni(count)
probihajici = graph.getSkupinaProbihajici(count)
printHeader()
printBody(count, zahajeni, probihajici)
printFooter()
def printHeader():
print("<!DOCTYPE html>\n<html>\n<head>\n" +
"<title>Skupiny clenu v RV</title>\n" +
"</head>")
def printBody(count, zahajeni, probihajici):
print("<body>\n" +
"<h1>Skupiny clenu v RV</h1>\n" +
"<table border=\"1\"><thead><tr>\n" +
"<td>Pocet clenu</td>\n" +
"<td>Velikost skupiny pro zahajeni jednani</td>\n" +
"<td>Velikost skupiny na probihajicim jednani</td>\n" +
"</tr>\n</thead>\n<tbody>\n<tr>" +
"<td>" +
str(count) +
"</td><td>" +
str(zahajeni) +
"</td><td>" +
str(probihajici) +
"</td></tr>\n" +
"</tbody></table>\n" +
"</body>")
def printFooter():
print("</html>")
generate()
Print generated date & time
|
import graph
import datetime
def generate():
count = graph.getTotalCount()
zahajeni = graph.getSkupinaZahajeni(count)
probihajici = graph.getSkupinaProbihajici(count)
printHeader()
printBody(count, zahajeni, probihajici)
printFooter()
def printHeader():
print("<!DOCTYPE html>\n<html>\n<head>\n" +
"<title>Skupiny clenu v RV</title>\n" +
"</head>")
def printBody(count, zahajeni, probihajici):
print("<body>\n" +
"<h1>Skupiny clenu v RV</h1>\n" +
"<table border=\"1\"><thead><tr>\n" +
"<td>Pocet clenu</td>\n" +
"<td>Velikost skupiny pro zahajeni jednani</td>\n" +
"<td>Velikost skupiny na probihajicim jednani</td>\n" +
"</tr>\n</thead>\n<tbody>\n<tr>" +
"<td>" +
str(count) +
"</td><td>" +
str(zahajeni) +
"</td><td>" +
str(probihajici) +
"</td></tr>\n" +
"</tbody></table>\n")
def printFooter():
print("<p>Generated: " + datetime.datetime.now() + "</p>")
print("</body></html>")
generate()
|
<commit_before>import graph
def generate():
count = graph.getTotalCount()
zahajeni = graph.getSkupinaZahajeni(count)
probihajici = graph.getSkupinaProbihajici(count)
printHeader()
printBody(count, zahajeni, probihajici)
printFooter()
def printHeader():
print("<!DOCTYPE html>\n<html>\n<head>\n" +
"<title>Skupiny clenu v RV</title>\n" +
"</head>")
def printBody(count, zahajeni, probihajici):
print("<body>\n" +
"<h1>Skupiny clenu v RV</h1>\n" +
"<table border=\"1\"><thead><tr>\n" +
"<td>Pocet clenu</td>\n" +
"<td>Velikost skupiny pro zahajeni jednani</td>\n" +
"<td>Velikost skupiny na probihajicim jednani</td>\n" +
"</tr>\n</thead>\n<tbody>\n<tr>" +
"<td>" +
str(count) +
"</td><td>" +
str(zahajeni) +
"</td><td>" +
str(probihajici) +
"</td></tr>\n" +
"</tbody></table>\n" +
"</body>")
def printFooter():
print("</html>")
generate()
<commit_msg>Print generated date & time<commit_after>
|
import graph
import datetime
def generate():
count = graph.getTotalCount()
zahajeni = graph.getSkupinaZahajeni(count)
probihajici = graph.getSkupinaProbihajici(count)
printHeader()
printBody(count, zahajeni, probihajici)
printFooter()
def printHeader():
print("<!DOCTYPE html>\n<html>\n<head>\n" +
"<title>Skupiny clenu v RV</title>\n" +
"</head>")
def printBody(count, zahajeni, probihajici):
print("<body>\n" +
"<h1>Skupiny clenu v RV</h1>\n" +
"<table border=\"1\"><thead><tr>\n" +
"<td>Pocet clenu</td>\n" +
"<td>Velikost skupiny pro zahajeni jednani</td>\n" +
"<td>Velikost skupiny na probihajicim jednani</td>\n" +
"</tr>\n</thead>\n<tbody>\n<tr>" +
"<td>" +
str(count) +
"</td><td>" +
str(zahajeni) +
"</td><td>" +
str(probihajici) +
"</td></tr>\n" +
"</tbody></table>\n")
def printFooter():
print("<p>Generated: " + datetime.datetime.now() + "</p>")
print("</body></html>")
generate()
|
import graph
def generate():
count = graph.getTotalCount()
zahajeni = graph.getSkupinaZahajeni(count)
probihajici = graph.getSkupinaProbihajici(count)
printHeader()
printBody(count, zahajeni, probihajici)
printFooter()
def printHeader():
print("<!DOCTYPE html>\n<html>\n<head>\n" +
"<title>Skupiny clenu v RV</title>\n" +
"</head>")
def printBody(count, zahajeni, probihajici):
print("<body>\n" +
"<h1>Skupiny clenu v RV</h1>\n" +
"<table border=\"1\"><thead><tr>\n" +
"<td>Pocet clenu</td>\n" +
"<td>Velikost skupiny pro zahajeni jednani</td>\n" +
"<td>Velikost skupiny na probihajicim jednani</td>\n" +
"</tr>\n</thead>\n<tbody>\n<tr>" +
"<td>" +
str(count) +
"</td><td>" +
str(zahajeni) +
"</td><td>" +
str(probihajici) +
"</td></tr>\n" +
"</tbody></table>\n" +
"</body>")
def printFooter():
print("</html>")
generate()
Print generated date & timeimport graph
import datetime
def generate():
count = graph.getTotalCount()
zahajeni = graph.getSkupinaZahajeni(count)
probihajici = graph.getSkupinaProbihajici(count)
printHeader()
printBody(count, zahajeni, probihajici)
printFooter()
def printHeader():
print("<!DOCTYPE html>\n<html>\n<head>\n" +
"<title>Skupiny clenu v RV</title>\n" +
"</head>")
def printBody(count, zahajeni, probihajici):
print("<body>\n" +
"<h1>Skupiny clenu v RV</h1>\n" +
"<table border=\"1\"><thead><tr>\n" +
"<td>Pocet clenu</td>\n" +
"<td>Velikost skupiny pro zahajeni jednani</td>\n" +
"<td>Velikost skupiny na probihajicim jednani</td>\n" +
"</tr>\n</thead>\n<tbody>\n<tr>" +
"<td>" +
str(count) +
"</td><td>" +
str(zahajeni) +
"</td><td>" +
str(probihajici) +
"</td></tr>\n" +
"</tbody></table>\n")
def printFooter():
print("<p>Generated: " + datetime.datetime.now() + "</p>")
print("</body></html>")
generate()
|
<commit_before>import graph
def generate():
count = graph.getTotalCount()
zahajeni = graph.getSkupinaZahajeni(count)
probihajici = graph.getSkupinaProbihajici(count)
printHeader()
printBody(count, zahajeni, probihajici)
printFooter()
def printHeader():
print("<!DOCTYPE html>\n<html>\n<head>\n" +
"<title>Skupiny clenu v RV</title>\n" +
"</head>")
def printBody(count, zahajeni, probihajici):
print("<body>\n" +
"<h1>Skupiny clenu v RV</h1>\n" +
"<table border=\"1\"><thead><tr>\n" +
"<td>Pocet clenu</td>\n" +
"<td>Velikost skupiny pro zahajeni jednani</td>\n" +
"<td>Velikost skupiny na probihajicim jednani</td>\n" +
"</tr>\n</thead>\n<tbody>\n<tr>" +
"<td>" +
str(count) +
"</td><td>" +
str(zahajeni) +
"</td><td>" +
str(probihajici) +
"</td></tr>\n" +
"</tbody></table>\n" +
"</body>")
def printFooter():
print("</html>")
generate()
<commit_msg>Print generated date & time<commit_after>import graph
import datetime
def generate():
count = graph.getTotalCount()
zahajeni = graph.getSkupinaZahajeni(count)
probihajici = graph.getSkupinaProbihajici(count)
printHeader()
printBody(count, zahajeni, probihajici)
printFooter()
def printHeader():
print("<!DOCTYPE html>\n<html>\n<head>\n" +
"<title>Skupiny clenu v RV</title>\n" +
"</head>")
def printBody(count, zahajeni, probihajici):
print("<body>\n" +
"<h1>Skupiny clenu v RV</h1>\n" +
"<table border=\"1\"><thead><tr>\n" +
"<td>Pocet clenu</td>\n" +
"<td>Velikost skupiny pro zahajeni jednani</td>\n" +
"<td>Velikost skupiny na probihajicim jednani</td>\n" +
"</tr>\n</thead>\n<tbody>\n<tr>" +
"<td>" +
str(count) +
"</td><td>" +
str(zahajeni) +
"</td><td>" +
str(probihajici) +
"</td></tr>\n" +
"</tbody></table>\n")
def printFooter():
print("<p>Generated: " + datetime.datetime.now() + "</p>")
print("</body></html>")
generate()
|
29cd6935a35872774e30804918c861af94230a8a
|
src/utils/cxiwriter.py
|
src/utils/cxiwriter.py
|
try:
import h5writer
except ImportError:
print 100*"*"
print "ERROR: For using the utils.cxiwriter.CXIWriter class please install the package \'h5writer\'."
print "\t $ pip install h5writer"
print "\t (Github repository: https://github.com/mhantke/h5writer)"
print 100*"*"
exit(1)
import ipc.mpi
logger = h5writer.logger
if ipc.mpi.size == 1:
CXIWriter = h5writer.H5Writer
else:
class CXIWriter(h5writer.H5WriterMPI):
def __init__(self, filename, chunksize=100, compression=None):
h5writer.H5WriterMPI.__init__(self, filename=filename, chunksize=chunksize, compression=compression, comm=ipc.mpi.slaves_comm)
|
try:
import h5writer
except ImportError:
print 100*"*"
print "ERROR: For using the utils.cxiwriter.CXIWriter class please install the package \'h5writer\'."
print "\t $ pip install h5writer"
print "\t (Github repository: https://github.com/mhantke/h5writer)"
print 100*"*"
exit(1)
import ipc.mpi
logger = h5writer.logger
if ipc.mpi.size == 1:
CXIWriter = h5writer.H5Writer
else:
class CXIWriter(h5writer.H5WriterMPI):
def __init__(self, filename, chunksize=100, compression=None):
h5writer.H5WriterMPI.__init__(self, filename=filename, chunksize=chunksize, compression=compression, comm=ipc.mpi.slaves_comm.Clone())
|
Use a separate communicator for each cxi writer
|
Use a separate communicator for each cxi writer
|
Python
|
bsd-2-clause
|
SPIhub/hummingbird,FXIhub/hummingbird,FXIhub/hummingbird
|
try:
import h5writer
except ImportError:
print 100*"*"
print "ERROR: For using the utils.cxiwriter.CXIWriter class please install the package \'h5writer\'."
print "\t $ pip install h5writer"
print "\t (Github repository: https://github.com/mhantke/h5writer)"
print 100*"*"
exit(1)
import ipc.mpi
logger = h5writer.logger
if ipc.mpi.size == 1:
CXIWriter = h5writer.H5Writer
else:
class CXIWriter(h5writer.H5WriterMPI):
def __init__(self, filename, chunksize=100, compression=None):
h5writer.H5WriterMPI.__init__(self, filename=filename, chunksize=chunksize, compression=compression, comm=ipc.mpi.slaves_comm)
Use a separate communicator for each cxi writer
|
try:
import h5writer
except ImportError:
print 100*"*"
print "ERROR: For using the utils.cxiwriter.CXIWriter class please install the package \'h5writer\'."
print "\t $ pip install h5writer"
print "\t (Github repository: https://github.com/mhantke/h5writer)"
print 100*"*"
exit(1)
import ipc.mpi
logger = h5writer.logger
if ipc.mpi.size == 1:
CXIWriter = h5writer.H5Writer
else:
class CXIWriter(h5writer.H5WriterMPI):
def __init__(self, filename, chunksize=100, compression=None):
h5writer.H5WriterMPI.__init__(self, filename=filename, chunksize=chunksize, compression=compression, comm=ipc.mpi.slaves_comm.Clone())
|
<commit_before>try:
import h5writer
except ImportError:
print 100*"*"
print "ERROR: For using the utils.cxiwriter.CXIWriter class please install the package \'h5writer\'."
print "\t $ pip install h5writer"
print "\t (Github repository: https://github.com/mhantke/h5writer)"
print 100*"*"
exit(1)
import ipc.mpi
logger = h5writer.logger
if ipc.mpi.size == 1:
CXIWriter = h5writer.H5Writer
else:
class CXIWriter(h5writer.H5WriterMPI):
def __init__(self, filename, chunksize=100, compression=None):
h5writer.H5WriterMPI.__init__(self, filename=filename, chunksize=chunksize, compression=compression, comm=ipc.mpi.slaves_comm)
<commit_msg>Use a separate communicator for each cxi writer<commit_after>
|
try:
import h5writer
except ImportError:
print 100*"*"
print "ERROR: For using the utils.cxiwriter.CXIWriter class please install the package \'h5writer\'."
print "\t $ pip install h5writer"
print "\t (Github repository: https://github.com/mhantke/h5writer)"
print 100*"*"
exit(1)
import ipc.mpi
logger = h5writer.logger
if ipc.mpi.size == 1:
CXIWriter = h5writer.H5Writer
else:
class CXIWriter(h5writer.H5WriterMPI):
def __init__(self, filename, chunksize=100, compression=None):
h5writer.H5WriterMPI.__init__(self, filename=filename, chunksize=chunksize, compression=compression, comm=ipc.mpi.slaves_comm.Clone())
|
try:
import h5writer
except ImportError:
print 100*"*"
print "ERROR: For using the utils.cxiwriter.CXIWriter class please install the package \'h5writer\'."
print "\t $ pip install h5writer"
print "\t (Github repository: https://github.com/mhantke/h5writer)"
print 100*"*"
exit(1)
import ipc.mpi
logger = h5writer.logger
if ipc.mpi.size == 1:
CXIWriter = h5writer.H5Writer
else:
class CXIWriter(h5writer.H5WriterMPI):
def __init__(self, filename, chunksize=100, compression=None):
h5writer.H5WriterMPI.__init__(self, filename=filename, chunksize=chunksize, compression=compression, comm=ipc.mpi.slaves_comm)
Use a separate communicator for each cxi writertry:
import h5writer
except ImportError:
print 100*"*"
print "ERROR: For using the utils.cxiwriter.CXIWriter class please install the package \'h5writer\'."
print "\t $ pip install h5writer"
print "\t (Github repository: https://github.com/mhantke/h5writer)"
print 100*"*"
exit(1)
import ipc.mpi
logger = h5writer.logger
if ipc.mpi.size == 1:
CXIWriter = h5writer.H5Writer
else:
class CXIWriter(h5writer.H5WriterMPI):
def __init__(self, filename, chunksize=100, compression=None):
h5writer.H5WriterMPI.__init__(self, filename=filename, chunksize=chunksize, compression=compression, comm=ipc.mpi.slaves_comm.Clone())
|
<commit_before>try:
import h5writer
except ImportError:
print 100*"*"
print "ERROR: For using the utils.cxiwriter.CXIWriter class please install the package \'h5writer\'."
print "\t $ pip install h5writer"
print "\t (Github repository: https://github.com/mhantke/h5writer)"
print 100*"*"
exit(1)
import ipc.mpi
logger = h5writer.logger
if ipc.mpi.size == 1:
CXIWriter = h5writer.H5Writer
else:
class CXIWriter(h5writer.H5WriterMPI):
def __init__(self, filename, chunksize=100, compression=None):
h5writer.H5WriterMPI.__init__(self, filename=filename, chunksize=chunksize, compression=compression, comm=ipc.mpi.slaves_comm)
<commit_msg>Use a separate communicator for each cxi writer<commit_after>try:
import h5writer
except ImportError:
print 100*"*"
print "ERROR: For using the utils.cxiwriter.CXIWriter class please install the package \'h5writer\'."
print "\t $ pip install h5writer"
print "\t (Github repository: https://github.com/mhantke/h5writer)"
print 100*"*"
exit(1)
import ipc.mpi
logger = h5writer.logger
if ipc.mpi.size == 1:
CXIWriter = h5writer.H5Writer
else:
class CXIWriter(h5writer.H5WriterMPI):
def __init__(self, filename, chunksize=100, compression=None):
h5writer.H5WriterMPI.__init__(self, filename=filename, chunksize=chunksize, compression=compression, comm=ipc.mpi.slaves_comm.Clone())
|
4a12eb87e660b3fdf1ae4a6c0b2628f45541695a
|
app/birdfeeder/handlers/main.py
|
app/birdfeeder/handlers/main.py
|
import urllib
from base.constants import CONSTANTS
import session
class IndexHandler(session.SessionApiHandler):
def _get_signed_in(self):
twitter_user = self._api.GetUser(self._session.twitter_id)
timeline_feed_url = self._get_path(
'feed/timeline/%s' % self._session.feed_id)
timeline_reader_url = \
'http://www.google.com/reader/view/%s' % urllib.quote(
CONSTANTS.APP_URL + timeline_feed_url)
self._write_template('birdfeeder/index-signed-in.html', {
'twitter_user': twitter_user,
'sign_out_path': self._get_path('sign-out'),
'timeline_feed_url': timeline_feed_url,
'timeline_reader_url': timeline_reader_url,
})
def _get_signed_out(self):
self._write_template('birdfeeder/index-signed-out.html', {
'sign_in_path': self._get_path('sign-in'),
})
|
import urllib
from base.constants import CONSTANTS
import session
class IndexHandler(session.SessionApiHandler):
def _get_signed_in(self):
twitter_user = self._api.GetUser(self._session.twitter_id)
timeline_feed_url = self._get_path(
'feed/timeline/%s' % self._session.feed_id)
timeline_reader_url = \
'http://www.google.com/reader/view/feed/%s' % urllib.quote(
CONSTANTS.APP_URL + timeline_feed_url)
self._write_template('birdfeeder/index-signed-in.html', {
'twitter_user': twitter_user,
'sign_out_path': self._get_path('sign-out'),
'timeline_feed_url': timeline_feed_url,
'timeline_reader_url': timeline_reader_url,
})
def _get_signed_out(self):
self._write_template('birdfeeder/index-signed-out.html', {
'sign_in_path': self._get_path('sign-in'),
})
|
Fix Bird Feeder Reader URL.
|
Fix Bird Feeder Reader URL.
|
Python
|
apache-2.0
|
mihaip/streamspigot,mihaip/streamspigot,mihaip/streamspigot,mihaip/streamspigot
|
import urllib
from base.constants import CONSTANTS
import session
class IndexHandler(session.SessionApiHandler):
def _get_signed_in(self):
twitter_user = self._api.GetUser(self._session.twitter_id)
timeline_feed_url = self._get_path(
'feed/timeline/%s' % self._session.feed_id)
timeline_reader_url = \
'http://www.google.com/reader/view/%s' % urllib.quote(
CONSTANTS.APP_URL + timeline_feed_url)
self._write_template('birdfeeder/index-signed-in.html', {
'twitter_user': twitter_user,
'sign_out_path': self._get_path('sign-out'),
'timeline_feed_url': timeline_feed_url,
'timeline_reader_url': timeline_reader_url,
})
def _get_signed_out(self):
self._write_template('birdfeeder/index-signed-out.html', {
'sign_in_path': self._get_path('sign-in'),
})
Fix Bird Feeder Reader URL.
|
import urllib
from base.constants import CONSTANTS
import session
class IndexHandler(session.SessionApiHandler):
def _get_signed_in(self):
twitter_user = self._api.GetUser(self._session.twitter_id)
timeline_feed_url = self._get_path(
'feed/timeline/%s' % self._session.feed_id)
timeline_reader_url = \
'http://www.google.com/reader/view/feed/%s' % urllib.quote(
CONSTANTS.APP_URL + timeline_feed_url)
self._write_template('birdfeeder/index-signed-in.html', {
'twitter_user': twitter_user,
'sign_out_path': self._get_path('sign-out'),
'timeline_feed_url': timeline_feed_url,
'timeline_reader_url': timeline_reader_url,
})
def _get_signed_out(self):
self._write_template('birdfeeder/index-signed-out.html', {
'sign_in_path': self._get_path('sign-in'),
})
|
<commit_before>import urllib
from base.constants import CONSTANTS
import session
class IndexHandler(session.SessionApiHandler):
def _get_signed_in(self):
twitter_user = self._api.GetUser(self._session.twitter_id)
timeline_feed_url = self._get_path(
'feed/timeline/%s' % self._session.feed_id)
timeline_reader_url = \
'http://www.google.com/reader/view/%s' % urllib.quote(
CONSTANTS.APP_URL + timeline_feed_url)
self._write_template('birdfeeder/index-signed-in.html', {
'twitter_user': twitter_user,
'sign_out_path': self._get_path('sign-out'),
'timeline_feed_url': timeline_feed_url,
'timeline_reader_url': timeline_reader_url,
})
def _get_signed_out(self):
self._write_template('birdfeeder/index-signed-out.html', {
'sign_in_path': self._get_path('sign-in'),
})
<commit_msg>Fix Bird Feeder Reader URL.<commit_after>
|
import urllib
from base.constants import CONSTANTS
import session
class IndexHandler(session.SessionApiHandler):
def _get_signed_in(self):
twitter_user = self._api.GetUser(self._session.twitter_id)
timeline_feed_url = self._get_path(
'feed/timeline/%s' % self._session.feed_id)
timeline_reader_url = \
'http://www.google.com/reader/view/feed/%s' % urllib.quote(
CONSTANTS.APP_URL + timeline_feed_url)
self._write_template('birdfeeder/index-signed-in.html', {
'twitter_user': twitter_user,
'sign_out_path': self._get_path('sign-out'),
'timeline_feed_url': timeline_feed_url,
'timeline_reader_url': timeline_reader_url,
})
def _get_signed_out(self):
self._write_template('birdfeeder/index-signed-out.html', {
'sign_in_path': self._get_path('sign-in'),
})
|
import urllib
from base.constants import CONSTANTS
import session
class IndexHandler(session.SessionApiHandler):
def _get_signed_in(self):
twitter_user = self._api.GetUser(self._session.twitter_id)
timeline_feed_url = self._get_path(
'feed/timeline/%s' % self._session.feed_id)
timeline_reader_url = \
'http://www.google.com/reader/view/%s' % urllib.quote(
CONSTANTS.APP_URL + timeline_feed_url)
self._write_template('birdfeeder/index-signed-in.html', {
'twitter_user': twitter_user,
'sign_out_path': self._get_path('sign-out'),
'timeline_feed_url': timeline_feed_url,
'timeline_reader_url': timeline_reader_url,
})
def _get_signed_out(self):
self._write_template('birdfeeder/index-signed-out.html', {
'sign_in_path': self._get_path('sign-in'),
})
Fix Bird Feeder Reader URL.import urllib
from base.constants import CONSTANTS
import session
class IndexHandler(session.SessionApiHandler):
def _get_signed_in(self):
twitter_user = self._api.GetUser(self._session.twitter_id)
timeline_feed_url = self._get_path(
'feed/timeline/%s' % self._session.feed_id)
timeline_reader_url = \
'http://www.google.com/reader/view/feed/%s' % urllib.quote(
CONSTANTS.APP_URL + timeline_feed_url)
self._write_template('birdfeeder/index-signed-in.html', {
'twitter_user': twitter_user,
'sign_out_path': self._get_path('sign-out'),
'timeline_feed_url': timeline_feed_url,
'timeline_reader_url': timeline_reader_url,
})
def _get_signed_out(self):
self._write_template('birdfeeder/index-signed-out.html', {
'sign_in_path': self._get_path('sign-in'),
})
|
<commit_before>import urllib
from base.constants import CONSTANTS
import session
class IndexHandler(session.SessionApiHandler):
def _get_signed_in(self):
twitter_user = self._api.GetUser(self._session.twitter_id)
timeline_feed_url = self._get_path(
'feed/timeline/%s' % self._session.feed_id)
timeline_reader_url = \
'http://www.google.com/reader/view/%s' % urllib.quote(
CONSTANTS.APP_URL + timeline_feed_url)
self._write_template('birdfeeder/index-signed-in.html', {
'twitter_user': twitter_user,
'sign_out_path': self._get_path('sign-out'),
'timeline_feed_url': timeline_feed_url,
'timeline_reader_url': timeline_reader_url,
})
def _get_signed_out(self):
self._write_template('birdfeeder/index-signed-out.html', {
'sign_in_path': self._get_path('sign-in'),
})
<commit_msg>Fix Bird Feeder Reader URL.<commit_after>import urllib
from base.constants import CONSTANTS
import session
class IndexHandler(session.SessionApiHandler):
def _get_signed_in(self):
twitter_user = self._api.GetUser(self._session.twitter_id)
timeline_feed_url = self._get_path(
'feed/timeline/%s' % self._session.feed_id)
timeline_reader_url = \
'http://www.google.com/reader/view/feed/%s' % urllib.quote(
CONSTANTS.APP_URL + timeline_feed_url)
self._write_template('birdfeeder/index-signed-in.html', {
'twitter_user': twitter_user,
'sign_out_path': self._get_path('sign-out'),
'timeline_feed_url': timeline_feed_url,
'timeline_reader_url': timeline_reader_url,
})
def _get_signed_out(self):
self._write_template('birdfeeder/index-signed-out.html', {
'sign_in_path': self._get_path('sign-in'),
})
|
199b3b2d95c7ada67a0b3c49abe9b6347266c0eb
|
codefett/users/serializers.py
|
codefett/users/serializers.py
|
from rest_framework import serializers
from .models import CFUser
class CFUserSerializer(serializers.ModelSerializer):
"""
Serializes a CFUser Model
"""
user__password = serializers.CharField(write_only=True, required=False)
class Meta:
model = CFUser
fields = ('id', 'user__email', 'full_name', 'user__password', 'avatar', 'created_at', 'user__date_joined')
def create(self, validated_data):
return CFUser.objects.create(**validated_data)
def update(self, instance, validated_data):
pass
|
from django.contrib.auth import update_session_auth_hash
from rest_framework import serializers
from .models import CFUser
class CFUserSerializer(serializers.ModelSerializer):
"""
Serializes a CFUser Model
"""
password = serializers.CharField(write_only=True, required=False)
confirm_password = serializers.CharField(write_only=True, required=False)
class Meta:
model = CFUser
fields = ('id', 'email', 'full_name', 'created_at', 'updated_at', 'password', 'confirm_password')
read_only_fields = ('created_at', 'updated_at')
def create(self, validated_data):
return CFUser.objects.create(**validated_data)
def update(self, instance, validated_data):
instance.email = validated_data('email', instance.email)
instance.save()
password = validated_data.get('password', None)
confirm_password = validated_data.get('confirm_password', None)
if password and confirm_password and password == confirm_password:
instance.set_password(password)
instance.save()
update_session_auth_hash(self.context.get('request'), instance)
return instance
|
Complete update method of User Serializer
|
Complete update method of User Serializer
|
Python
|
agpl-3.0
|
Rulox/codefett,Rulox/codefett,Rulox/codefett
|
from rest_framework import serializers
from .models import CFUser
class CFUserSerializer(serializers.ModelSerializer):
"""
Serializes a CFUser Model
"""
user__password = serializers.CharField(write_only=True, required=False)
class Meta:
model = CFUser
fields = ('id', 'user__email', 'full_name', 'user__password', 'avatar', 'created_at', 'user__date_joined')
def create(self, validated_data):
return CFUser.objects.create(**validated_data)
def update(self, instance, validated_data):
passComplete update method of User Serializer
|
from django.contrib.auth import update_session_auth_hash
from rest_framework import serializers
from .models import CFUser
class CFUserSerializer(serializers.ModelSerializer):
"""
Serializes a CFUser Model
"""
password = serializers.CharField(write_only=True, required=False)
confirm_password = serializers.CharField(write_only=True, required=False)
class Meta:
model = CFUser
fields = ('id', 'email', 'full_name', 'created_at', 'updated_at', 'password', 'confirm_password')
read_only_fields = ('created_at', 'updated_at')
def create(self, validated_data):
return CFUser.objects.create(**validated_data)
def update(self, instance, validated_data):
instance.email = validated_data('email', instance.email)
instance.save()
password = validated_data.get('password', None)
confirm_password = validated_data.get('confirm_password', None)
if password and confirm_password and password == confirm_password:
instance.set_password(password)
instance.save()
update_session_auth_hash(self.context.get('request'), instance)
return instance
|
<commit_before>from rest_framework import serializers
from .models import CFUser
class CFUserSerializer(serializers.ModelSerializer):
"""
Serializes a CFUser Model
"""
user__password = serializers.CharField(write_only=True, required=False)
class Meta:
model = CFUser
fields = ('id', 'user__email', 'full_name', 'user__password', 'avatar', 'created_at', 'user__date_joined')
def create(self, validated_data):
return CFUser.objects.create(**validated_data)
def update(self, instance, validated_data):
pass<commit_msg>Complete update method of User Serializer<commit_after>
|
from django.contrib.auth import update_session_auth_hash
from rest_framework import serializers
from .models import CFUser
class CFUserSerializer(serializers.ModelSerializer):
"""
Serializes a CFUser Model
"""
password = serializers.CharField(write_only=True, required=False)
confirm_password = serializers.CharField(write_only=True, required=False)
class Meta:
model = CFUser
fields = ('id', 'email', 'full_name', 'created_at', 'updated_at', 'password', 'confirm_password')
read_only_fields = ('created_at', 'updated_at')
def create(self, validated_data):
return CFUser.objects.create(**validated_data)
def update(self, instance, validated_data):
instance.email = validated_data('email', instance.email)
instance.save()
password = validated_data.get('password', None)
confirm_password = validated_data.get('confirm_password', None)
if password and confirm_password and password == confirm_password:
instance.set_password(password)
instance.save()
update_session_auth_hash(self.context.get('request'), instance)
return instance
|
from rest_framework import serializers
from .models import CFUser
class CFUserSerializer(serializers.ModelSerializer):
"""
Serializes a CFUser Model
"""
user__password = serializers.CharField(write_only=True, required=False)
class Meta:
model = CFUser
fields = ('id', 'user__email', 'full_name', 'user__password', 'avatar', 'created_at', 'user__date_joined')
def create(self, validated_data):
return CFUser.objects.create(**validated_data)
def update(self, instance, validated_data):
passComplete update method of User Serializerfrom django.contrib.auth import update_session_auth_hash
from rest_framework import serializers
from .models import CFUser
class CFUserSerializer(serializers.ModelSerializer):
"""
Serializes a CFUser Model
"""
password = serializers.CharField(write_only=True, required=False)
confirm_password = serializers.CharField(write_only=True, required=False)
class Meta:
model = CFUser
fields = ('id', 'email', 'full_name', 'created_at', 'updated_at', 'password', 'confirm_password')
read_only_fields = ('created_at', 'updated_at')
def create(self, validated_data):
return CFUser.objects.create(**validated_data)
def update(self, instance, validated_data):
instance.email = validated_data('email', instance.email)
instance.save()
password = validated_data.get('password', None)
confirm_password = validated_data.get('confirm_password', None)
if password and confirm_password and password == confirm_password:
instance.set_password(password)
instance.save()
update_session_auth_hash(self.context.get('request'), instance)
return instance
|
<commit_before>from rest_framework import serializers
from .models import CFUser
class CFUserSerializer(serializers.ModelSerializer):
"""
Serializes a CFUser Model
"""
user__password = serializers.CharField(write_only=True, required=False)
class Meta:
model = CFUser
fields = ('id', 'user__email', 'full_name', 'user__password', 'avatar', 'created_at', 'user__date_joined')
def create(self, validated_data):
return CFUser.objects.create(**validated_data)
def update(self, instance, validated_data):
pass<commit_msg>Complete update method of User Serializer<commit_after>from django.contrib.auth import update_session_auth_hash
from rest_framework import serializers
from .models import CFUser
class CFUserSerializer(serializers.ModelSerializer):
"""
Serializes a CFUser Model
"""
password = serializers.CharField(write_only=True, required=False)
confirm_password = serializers.CharField(write_only=True, required=False)
class Meta:
model = CFUser
fields = ('id', 'email', 'full_name', 'created_at', 'updated_at', 'password', 'confirm_password')
read_only_fields = ('created_at', 'updated_at')
def create(self, validated_data):
return CFUser.objects.create(**validated_data)
def update(self, instance, validated_data):
instance.email = validated_data('email', instance.email)
instance.save()
password = validated_data.get('password', None)
confirm_password = validated_data.get('confirm_password', None)
if password and confirm_password and password == confirm_password:
instance.set_password(password)
instance.save()
update_session_auth_hash(self.context.get('request'), instance)
return instance
|
4e5674d938e40d86a140ec591d6a7429b9c29902
|
test/conftest.py
|
test/conftest.py
|
def pytest_addoption(parser):
parser.addoption("--webdriver", action="store", default="None",
help=("Selenium WebDriver interface to use for running the test. "
"Choices: None, PhantomJS, Chrome, Firefox, ChromeHeadless, "
"FirefoxHeadless. Alternatively, it can be arbitrary Python code "
"with a return statement with selenium.webdriver object, for "
"example 'return Chrome()'"))
|
import os
def pytest_addoption(parser):
parser.addoption("--webdriver", action="store", default="None",
help=("Selenium WebDriver interface to use for running the test. "
"Choices: None, PhantomJS, Chrome, Firefox, ChromeHeadless, "
"FirefoxHeadless. Alternatively, it can be arbitrary Python code "
"with a return statement with selenium.webdriver object, for "
"example 'return Chrome()'"))
parser.addoption("--offline", action="store_true", default=False,
help=("Do not download items from internet. Use if you have predownloaded "
"packages and set PIP_FIND_LINKS."))
def pytest_sessionstart(session):
if session.config.getoption('offline'):
os.environ['PIP_NO_INDEX'] = '1'
os.environ['CONDA_OFFLINE'] = 'True'
|
Add --offline flag for testing without downloads
|
Add --offline flag for testing without downloads
|
Python
|
bsd-3-clause
|
spacetelescope/asv,qwhelan/asv,spacetelescope/asv,airspeed-velocity/asv,spacetelescope/asv,spacetelescope/asv,airspeed-velocity/asv,airspeed-velocity/asv,pv/asv,pv/asv,qwhelan/asv,pv/asv,qwhelan/asv,qwhelan/asv,airspeed-velocity/asv,pv/asv
|
def pytest_addoption(parser):
parser.addoption("--webdriver", action="store", default="None",
help=("Selenium WebDriver interface to use for running the test. "
"Choices: None, PhantomJS, Chrome, Firefox, ChromeHeadless, "
"FirefoxHeadless. Alternatively, it can be arbitrary Python code "
"with a return statement with selenium.webdriver object, for "
"example 'return Chrome()'"))
Add --offline flag for testing without downloads
|
import os
def pytest_addoption(parser):
parser.addoption("--webdriver", action="store", default="None",
help=("Selenium WebDriver interface to use for running the test. "
"Choices: None, PhantomJS, Chrome, Firefox, ChromeHeadless, "
"FirefoxHeadless. Alternatively, it can be arbitrary Python code "
"with a return statement with selenium.webdriver object, for "
"example 'return Chrome()'"))
parser.addoption("--offline", action="store_true", default=False,
help=("Do not download items from internet. Use if you have predownloaded "
"packages and set PIP_FIND_LINKS."))
def pytest_sessionstart(session):
if session.config.getoption('offline'):
os.environ['PIP_NO_INDEX'] = '1'
os.environ['CONDA_OFFLINE'] = 'True'
|
<commit_before>
def pytest_addoption(parser):
parser.addoption("--webdriver", action="store", default="None",
help=("Selenium WebDriver interface to use for running the test. "
"Choices: None, PhantomJS, Chrome, Firefox, ChromeHeadless, "
"FirefoxHeadless. Alternatively, it can be arbitrary Python code "
"with a return statement with selenium.webdriver object, for "
"example 'return Chrome()'"))
<commit_msg>Add --offline flag for testing without downloads<commit_after>
|
import os
def pytest_addoption(parser):
parser.addoption("--webdriver", action="store", default="None",
help=("Selenium WebDriver interface to use for running the test. "
"Choices: None, PhantomJS, Chrome, Firefox, ChromeHeadless, "
"FirefoxHeadless. Alternatively, it can be arbitrary Python code "
"with a return statement with selenium.webdriver object, for "
"example 'return Chrome()'"))
parser.addoption("--offline", action="store_true", default=False,
help=("Do not download items from internet. Use if you have predownloaded "
"packages and set PIP_FIND_LINKS."))
def pytest_sessionstart(session):
if session.config.getoption('offline'):
os.environ['PIP_NO_INDEX'] = '1'
os.environ['CONDA_OFFLINE'] = 'True'
|
def pytest_addoption(parser):
parser.addoption("--webdriver", action="store", default="None",
help=("Selenium WebDriver interface to use for running the test. "
"Choices: None, PhantomJS, Chrome, Firefox, ChromeHeadless, "
"FirefoxHeadless. Alternatively, it can be arbitrary Python code "
"with a return statement with selenium.webdriver object, for "
"example 'return Chrome()'"))
Add --offline flag for testing without downloadsimport os
def pytest_addoption(parser):
parser.addoption("--webdriver", action="store", default="None",
help=("Selenium WebDriver interface to use for running the test. "
"Choices: None, PhantomJS, Chrome, Firefox, ChromeHeadless, "
"FirefoxHeadless. Alternatively, it can be arbitrary Python code "
"with a return statement with selenium.webdriver object, for "
"example 'return Chrome()'"))
parser.addoption("--offline", action="store_true", default=False,
help=("Do not download items from internet. Use if you have predownloaded "
"packages and set PIP_FIND_LINKS."))
def pytest_sessionstart(session):
if session.config.getoption('offline'):
os.environ['PIP_NO_INDEX'] = '1'
os.environ['CONDA_OFFLINE'] = 'True'
|
<commit_before>
def pytest_addoption(parser):
parser.addoption("--webdriver", action="store", default="None",
help=("Selenium WebDriver interface to use for running the test. "
"Choices: None, PhantomJS, Chrome, Firefox, ChromeHeadless, "
"FirefoxHeadless. Alternatively, it can be arbitrary Python code "
"with a return statement with selenium.webdriver object, for "
"example 'return Chrome()'"))
<commit_msg>Add --offline flag for testing without downloads<commit_after>import os
def pytest_addoption(parser):
parser.addoption("--webdriver", action="store", default="None",
help=("Selenium WebDriver interface to use for running the test. "
"Choices: None, PhantomJS, Chrome, Firefox, ChromeHeadless, "
"FirefoxHeadless. Alternatively, it can be arbitrary Python code "
"with a return statement with selenium.webdriver object, for "
"example 'return Chrome()'"))
parser.addoption("--offline", action="store_true", default=False,
help=("Do not download items from internet. Use if you have predownloaded "
"packages and set PIP_FIND_LINKS."))
def pytest_sessionstart(session):
if session.config.getoption('offline'):
os.environ['PIP_NO_INDEX'] = '1'
os.environ['CONDA_OFFLINE'] = 'True'
|
d47e3b7216effab8aa067d0a214b071ca77393fd
|
stories/serializers.py
|
stories/serializers.py
|
from rest_framework import serializers
from users.serializers import AuthorSerializer
from .models import Story, StoryLine
class StoryLineSerializer(serializers.ModelSerializer):
class Meta:
model = StoryLine
fields = ('id', 'content', 'posted_on')
class StorySerializer(serializers.ModelSerializer):
title = serializers.CharField(min_length=3, max_length=100)
author = AuthorSerializer(read_only=True)
storyline_set = StoryLineSerializer(many=True)
class Meta:
model = Story
fields = ('id', 'title', 'author', 'posted_on', 'storyline_set')
def create(self, validated_data):
request = self.context['request']
author = request.user.author
return Story.objects.create(author=author, **validated_data)
|
from rest_framework import serializers
from users.serializers import AuthorSerializer
from .models import Story, StoryLine
class StoryLineSerializer(serializers.ModelSerializer):
class Meta:
model = StoryLine
fields = ('id', 'content', 'posted_on')
class StorySerializer(serializers.ModelSerializer):
title = serializers.CharField(min_length=3, max_length=100)
author = AuthorSerializer(read_only=True)
storyline_set = StoryLineSerializer(read_only=True, many=True)
class Meta:
model = Story
fields = ('id', 'title', 'author', 'posted_on', 'storyline_set')
def create(self, validated_data):
request = self.context['request']
author = request.user.author
return Story.objects.create(author=author, **validated_data)
|
Set stories_set to read only field
|
Set stories_set to read only field
|
Python
|
mit
|
pu6ki/tarina,pu6ki/tarina,pu6ki/tarina
|
from rest_framework import serializers
from users.serializers import AuthorSerializer
from .models import Story, StoryLine
class StoryLineSerializer(serializers.ModelSerializer):
class Meta:
model = StoryLine
fields = ('id', 'content', 'posted_on')
class StorySerializer(serializers.ModelSerializer):
title = serializers.CharField(min_length=3, max_length=100)
author = AuthorSerializer(read_only=True)
storyline_set = StoryLineSerializer(many=True)
class Meta:
model = Story
fields = ('id', 'title', 'author', 'posted_on', 'storyline_set')
def create(self, validated_data):
request = self.context['request']
author = request.user.author
return Story.objects.create(author=author, **validated_data)
Set stories_set to read only field
|
from rest_framework import serializers
from users.serializers import AuthorSerializer
from .models import Story, StoryLine
class StoryLineSerializer(serializers.ModelSerializer):
class Meta:
model = StoryLine
fields = ('id', 'content', 'posted_on')
class StorySerializer(serializers.ModelSerializer):
title = serializers.CharField(min_length=3, max_length=100)
author = AuthorSerializer(read_only=True)
storyline_set = StoryLineSerializer(read_only=True, many=True)
class Meta:
model = Story
fields = ('id', 'title', 'author', 'posted_on', 'storyline_set')
def create(self, validated_data):
request = self.context['request']
author = request.user.author
return Story.objects.create(author=author, **validated_data)
|
<commit_before>from rest_framework import serializers
from users.serializers import AuthorSerializer
from .models import Story, StoryLine
class StoryLineSerializer(serializers.ModelSerializer):
class Meta:
model = StoryLine
fields = ('id', 'content', 'posted_on')
class StorySerializer(serializers.ModelSerializer):
title = serializers.CharField(min_length=3, max_length=100)
author = AuthorSerializer(read_only=True)
storyline_set = StoryLineSerializer(many=True)
class Meta:
model = Story
fields = ('id', 'title', 'author', 'posted_on', 'storyline_set')
def create(self, validated_data):
request = self.context['request']
author = request.user.author
return Story.objects.create(author=author, **validated_data)
<commit_msg>Set stories_set to read only field<commit_after>
|
from rest_framework import serializers
from users.serializers import AuthorSerializer
from .models import Story, StoryLine
class StoryLineSerializer(serializers.ModelSerializer):
class Meta:
model = StoryLine
fields = ('id', 'content', 'posted_on')
class StorySerializer(serializers.ModelSerializer):
title = serializers.CharField(min_length=3, max_length=100)
author = AuthorSerializer(read_only=True)
storyline_set = StoryLineSerializer(read_only=True, many=True)
class Meta:
model = Story
fields = ('id', 'title', 'author', 'posted_on', 'storyline_set')
def create(self, validated_data):
request = self.context['request']
author = request.user.author
return Story.objects.create(author=author, **validated_data)
|
from rest_framework import serializers
from users.serializers import AuthorSerializer
from .models import Story, StoryLine
class StoryLineSerializer(serializers.ModelSerializer):
class Meta:
model = StoryLine
fields = ('id', 'content', 'posted_on')
class StorySerializer(serializers.ModelSerializer):
title = serializers.CharField(min_length=3, max_length=100)
author = AuthorSerializer(read_only=True)
storyline_set = StoryLineSerializer(many=True)
class Meta:
model = Story
fields = ('id', 'title', 'author', 'posted_on', 'storyline_set')
def create(self, validated_data):
request = self.context['request']
author = request.user.author
return Story.objects.create(author=author, **validated_data)
Set stories_set to read only fieldfrom rest_framework import serializers
from users.serializers import AuthorSerializer
from .models import Story, StoryLine
class StoryLineSerializer(serializers.ModelSerializer):
class Meta:
model = StoryLine
fields = ('id', 'content', 'posted_on')
class StorySerializer(serializers.ModelSerializer):
title = serializers.CharField(min_length=3, max_length=100)
author = AuthorSerializer(read_only=True)
storyline_set = StoryLineSerializer(read_only=True, many=True)
class Meta:
model = Story
fields = ('id', 'title', 'author', 'posted_on', 'storyline_set')
def create(self, validated_data):
request = self.context['request']
author = request.user.author
return Story.objects.create(author=author, **validated_data)
|
<commit_before>from rest_framework import serializers
from users.serializers import AuthorSerializer
from .models import Story, StoryLine
class StoryLineSerializer(serializers.ModelSerializer):
class Meta:
model = StoryLine
fields = ('id', 'content', 'posted_on')
class StorySerializer(serializers.ModelSerializer):
title = serializers.CharField(min_length=3, max_length=100)
author = AuthorSerializer(read_only=True)
storyline_set = StoryLineSerializer(many=True)
class Meta:
model = Story
fields = ('id', 'title', 'author', 'posted_on', 'storyline_set')
def create(self, validated_data):
request = self.context['request']
author = request.user.author
return Story.objects.create(author=author, **validated_data)
<commit_msg>Set stories_set to read only field<commit_after>from rest_framework import serializers
from users.serializers import AuthorSerializer
from .models import Story, StoryLine
class StoryLineSerializer(serializers.ModelSerializer):
class Meta:
model = StoryLine
fields = ('id', 'content', 'posted_on')
class StorySerializer(serializers.ModelSerializer):
title = serializers.CharField(min_length=3, max_length=100)
author = AuthorSerializer(read_only=True)
storyline_set = StoryLineSerializer(read_only=True, many=True)
class Meta:
model = Story
fields = ('id', 'title', 'author', 'posted_on', 'storyline_set')
def create(self, validated_data):
request = self.context['request']
author = request.user.author
return Story.objects.create(author=author, **validated_data)
|
ca8fa466638c0ef405a82dfc3cfecfdb400faaa7
|
sublime_jedi/helper.py
|
sublime_jedi/helper.py
|
# -*- coding: utf-8 -*-
import sublime
import sublime_plugin
from .utils import ask_daemon
class HelpMessageCommand(sublime_plugin.TextCommand):
def run(self, edit, docstring):
self.view.close()
self.view.insert(edit, self.view.size(), docstring)
class SublimeJediDocstring(sublime_plugin.TextCommand):
"""
Show doctring in output panel
"""
def run(self, edit):
ask_daemon(self.view, self.show_docstring, 'docstring')
def show_docstring(self, view, docstring):
window = sublime.active_window()
if docstring:
output = window.get_output_panel('help_panel')
output.set_read_only(False)
output.run_command('help_message', {'docstring': docstring})
output.set_read_only(True)
window.run_command("show_panel", {"panel": "output.help_panel"})
else:
window.run_command("hide_panel", {"panel": "output.help_panel"})
sublime.status_message('Jedi: No results!')
class SublimeJediSignature(sublime_plugin.TextCommand):
"""
Show signature in statusbar
"""
def run(self, edit):
ask_daemon(self.view, self.show_signature, 'signature')
def show_signature(self, view, signature):
if signature:
sublime.status_message('Jedi: {0}'.format(signature))
|
# -*- coding: utf-8 -*-
import sublime
import sublime_plugin
from .utils import ask_daemon, PythonCommandMixin
class HelpMessageCommand(sublime_plugin.TextCommand):
def run(self, edit, docstring):
self.view.close()
self.view.insert(edit, self.view.size(), docstring)
class SublimeJediDocstring(PythonCommandMixin, sublime_plugin.TextCommand):
"""
Show doctring in output panel
"""
def run(self, edit):
ask_daemon(self.view, self.show_docstring, 'docstring')
def show_docstring(self, view, docstring):
window = sublime.active_window()
if docstring:
output = window.get_output_panel('help_panel')
output.set_read_only(False)
output.run_command('help_message', {'docstring': docstring})
output.set_read_only(True)
window.run_command("show_panel", {"panel": "output.help_panel"})
else:
window.run_command("hide_panel", {"panel": "output.help_panel"})
sublime.status_message('Jedi: No results!')
class SublimeJediSignature(PythonCommandMixin, sublime_plugin.TextCommand):
"""
Show signature in statusbar
"""
def run(self, edit):
ask_daemon(self.view, self.show_signature, 'signature')
def show_signature(self, view, signature):
if signature:
sublime.status_message('Jedi: {0}'.format(signature))
|
Hide documentation commands in non-python scope
|
Hide documentation commands in non-python scope
|
Python
|
mit
|
srusskih/SublimeJEDI,edelvalle/SublimeJEDI
|
# -*- coding: utf-8 -*-
import sublime
import sublime_plugin
from .utils import ask_daemon
class HelpMessageCommand(sublime_plugin.TextCommand):
def run(self, edit, docstring):
self.view.close()
self.view.insert(edit, self.view.size(), docstring)
class SublimeJediDocstring(sublime_plugin.TextCommand):
"""
Show doctring in output panel
"""
def run(self, edit):
ask_daemon(self.view, self.show_docstring, 'docstring')
def show_docstring(self, view, docstring):
window = sublime.active_window()
if docstring:
output = window.get_output_panel('help_panel')
output.set_read_only(False)
output.run_command('help_message', {'docstring': docstring})
output.set_read_only(True)
window.run_command("show_panel", {"panel": "output.help_panel"})
else:
window.run_command("hide_panel", {"panel": "output.help_panel"})
sublime.status_message('Jedi: No results!')
class SublimeJediSignature(sublime_plugin.TextCommand):
"""
Show signature in statusbar
"""
def run(self, edit):
ask_daemon(self.view, self.show_signature, 'signature')
def show_signature(self, view, signature):
if signature:
sublime.status_message('Jedi: {0}'.format(signature))
Hide documentation commands in non-python scope
|
# -*- coding: utf-8 -*-
import sublime
import sublime_plugin
from .utils import ask_daemon, PythonCommandMixin
class HelpMessageCommand(sublime_plugin.TextCommand):
def run(self, edit, docstring):
self.view.close()
self.view.insert(edit, self.view.size(), docstring)
class SublimeJediDocstring(PythonCommandMixin, sublime_plugin.TextCommand):
"""
Show doctring in output panel
"""
def run(self, edit):
ask_daemon(self.view, self.show_docstring, 'docstring')
def show_docstring(self, view, docstring):
window = sublime.active_window()
if docstring:
output = window.get_output_panel('help_panel')
output.set_read_only(False)
output.run_command('help_message', {'docstring': docstring})
output.set_read_only(True)
window.run_command("show_panel", {"panel": "output.help_panel"})
else:
window.run_command("hide_panel", {"panel": "output.help_panel"})
sublime.status_message('Jedi: No results!')
class SublimeJediSignature(PythonCommandMixin, sublime_plugin.TextCommand):
"""
Show signature in statusbar
"""
def run(self, edit):
ask_daemon(self.view, self.show_signature, 'signature')
def show_signature(self, view, signature):
if signature:
sublime.status_message('Jedi: {0}'.format(signature))
|
<commit_before># -*- coding: utf-8 -*-
import sublime
import sublime_plugin
from .utils import ask_daemon
class HelpMessageCommand(sublime_plugin.TextCommand):
def run(self, edit, docstring):
self.view.close()
self.view.insert(edit, self.view.size(), docstring)
class SublimeJediDocstring(sublime_plugin.TextCommand):
"""
Show doctring in output panel
"""
def run(self, edit):
ask_daemon(self.view, self.show_docstring, 'docstring')
def show_docstring(self, view, docstring):
window = sublime.active_window()
if docstring:
output = window.get_output_panel('help_panel')
output.set_read_only(False)
output.run_command('help_message', {'docstring': docstring})
output.set_read_only(True)
window.run_command("show_panel", {"panel": "output.help_panel"})
else:
window.run_command("hide_panel", {"panel": "output.help_panel"})
sublime.status_message('Jedi: No results!')
class SublimeJediSignature(sublime_plugin.TextCommand):
"""
Show signature in statusbar
"""
def run(self, edit):
ask_daemon(self.view, self.show_signature, 'signature')
def show_signature(self, view, signature):
if signature:
sublime.status_message('Jedi: {0}'.format(signature))
<commit_msg>Hide documentation commands in non-python scope<commit_after>
|
# -*- coding: utf-8 -*-
import sublime
import sublime_plugin
from .utils import ask_daemon, PythonCommandMixin
class HelpMessageCommand(sublime_plugin.TextCommand):
def run(self, edit, docstring):
self.view.close()
self.view.insert(edit, self.view.size(), docstring)
class SublimeJediDocstring(PythonCommandMixin, sublime_plugin.TextCommand):
"""
Show doctring in output panel
"""
def run(self, edit):
ask_daemon(self.view, self.show_docstring, 'docstring')
def show_docstring(self, view, docstring):
window = sublime.active_window()
if docstring:
output = window.get_output_panel('help_panel')
output.set_read_only(False)
output.run_command('help_message', {'docstring': docstring})
output.set_read_only(True)
window.run_command("show_panel", {"panel": "output.help_panel"})
else:
window.run_command("hide_panel", {"panel": "output.help_panel"})
sublime.status_message('Jedi: No results!')
class SublimeJediSignature(PythonCommandMixin, sublime_plugin.TextCommand):
"""
Show signature in statusbar
"""
def run(self, edit):
ask_daemon(self.view, self.show_signature, 'signature')
def show_signature(self, view, signature):
if signature:
sublime.status_message('Jedi: {0}'.format(signature))
|
# -*- coding: utf-8 -*-
import sublime
import sublime_plugin
from .utils import ask_daemon
class HelpMessageCommand(sublime_plugin.TextCommand):
def run(self, edit, docstring):
self.view.close()
self.view.insert(edit, self.view.size(), docstring)
class SublimeJediDocstring(sublime_plugin.TextCommand):
"""
Show doctring in output panel
"""
def run(self, edit):
ask_daemon(self.view, self.show_docstring, 'docstring')
def show_docstring(self, view, docstring):
window = sublime.active_window()
if docstring:
output = window.get_output_panel('help_panel')
output.set_read_only(False)
output.run_command('help_message', {'docstring': docstring})
output.set_read_only(True)
window.run_command("show_panel", {"panel": "output.help_panel"})
else:
window.run_command("hide_panel", {"panel": "output.help_panel"})
sublime.status_message('Jedi: No results!')
class SublimeJediSignature(sublime_plugin.TextCommand):
"""
Show signature in statusbar
"""
def run(self, edit):
ask_daemon(self.view, self.show_signature, 'signature')
def show_signature(self, view, signature):
if signature:
sublime.status_message('Jedi: {0}'.format(signature))
Hide documentation commands in non-python scope# -*- coding: utf-8 -*-
import sublime
import sublime_plugin
from .utils import ask_daemon, PythonCommandMixin
class HelpMessageCommand(sublime_plugin.TextCommand):
def run(self, edit, docstring):
self.view.close()
self.view.insert(edit, self.view.size(), docstring)
class SublimeJediDocstring(PythonCommandMixin, sublime_plugin.TextCommand):
"""
Show doctring in output panel
"""
def run(self, edit):
ask_daemon(self.view, self.show_docstring, 'docstring')
def show_docstring(self, view, docstring):
window = sublime.active_window()
if docstring:
output = window.get_output_panel('help_panel')
output.set_read_only(False)
output.run_command('help_message', {'docstring': docstring})
output.set_read_only(True)
window.run_command("show_panel", {"panel": "output.help_panel"})
else:
window.run_command("hide_panel", {"panel": "output.help_panel"})
sublime.status_message('Jedi: No results!')
class SublimeJediSignature(PythonCommandMixin, sublime_plugin.TextCommand):
"""
Show signature in statusbar
"""
def run(self, edit):
ask_daemon(self.view, self.show_signature, 'signature')
def show_signature(self, view, signature):
if signature:
sublime.status_message('Jedi: {0}'.format(signature))
|
<commit_before># -*- coding: utf-8 -*-
import sublime
import sublime_plugin
from .utils import ask_daemon
class HelpMessageCommand(sublime_plugin.TextCommand):
def run(self, edit, docstring):
self.view.close()
self.view.insert(edit, self.view.size(), docstring)
class SublimeJediDocstring(sublime_plugin.TextCommand):
"""
Show doctring in output panel
"""
def run(self, edit):
ask_daemon(self.view, self.show_docstring, 'docstring')
def show_docstring(self, view, docstring):
window = sublime.active_window()
if docstring:
output = window.get_output_panel('help_panel')
output.set_read_only(False)
output.run_command('help_message', {'docstring': docstring})
output.set_read_only(True)
window.run_command("show_panel", {"panel": "output.help_panel"})
else:
window.run_command("hide_panel", {"panel": "output.help_panel"})
sublime.status_message('Jedi: No results!')
class SublimeJediSignature(sublime_plugin.TextCommand):
"""
Show signature in statusbar
"""
def run(self, edit):
ask_daemon(self.view, self.show_signature, 'signature')
def show_signature(self, view, signature):
if signature:
sublime.status_message('Jedi: {0}'.format(signature))
<commit_msg>Hide documentation commands in non-python scope<commit_after># -*- coding: utf-8 -*-
import sublime
import sublime_plugin
from .utils import ask_daemon, PythonCommandMixin
class HelpMessageCommand(sublime_plugin.TextCommand):
def run(self, edit, docstring):
self.view.close()
self.view.insert(edit, self.view.size(), docstring)
class SublimeJediDocstring(PythonCommandMixin, sublime_plugin.TextCommand):
"""
Show doctring in output panel
"""
def run(self, edit):
ask_daemon(self.view, self.show_docstring, 'docstring')
def show_docstring(self, view, docstring):
window = sublime.active_window()
if docstring:
output = window.get_output_panel('help_panel')
output.set_read_only(False)
output.run_command('help_message', {'docstring': docstring})
output.set_read_only(True)
window.run_command("show_panel", {"panel": "output.help_panel"})
else:
window.run_command("hide_panel", {"panel": "output.help_panel"})
sublime.status_message('Jedi: No results!')
class SublimeJediSignature(PythonCommandMixin, sublime_plugin.TextCommand):
"""
Show signature in statusbar
"""
def run(self, edit):
ask_daemon(self.view, self.show_signature, 'signature')
def show_signature(self, view, signature):
if signature:
sublime.status_message('Jedi: {0}'.format(signature))
|
f56d4814cade66fe8f8b58578048962179e18b15
|
infohandball/settings/dev.py
|
infohandball/settings/dev.py
|
from base import *
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
INSTALLED_APPS += [
'debug_toolbar',
]
|
from base import *
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
INSTALLED_APPS += [
'debug_toolbar',
]
JQUERY_URL = ''
|
Use existing jquery for debug toolbar.
|
Use existing jquery for debug toolbar.
|
Python
|
mit
|
lhuriguen/tophandball,lhuriguen/tophandball,lhuriguen/tophandball
|
from base import *
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
INSTALLED_APPS += [
'debug_toolbar',
]
Use existing jquery for debug toolbar.
|
from base import *
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
INSTALLED_APPS += [
'debug_toolbar',
]
JQUERY_URL = ''
|
<commit_before>from base import *
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
INSTALLED_APPS += [
'debug_toolbar',
]
<commit_msg>Use existing jquery for debug toolbar.<commit_after>
|
from base import *
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
INSTALLED_APPS += [
'debug_toolbar',
]
JQUERY_URL = ''
|
from base import *
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
INSTALLED_APPS += [
'debug_toolbar',
]
Use existing jquery for debug toolbar.from base import *
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
INSTALLED_APPS += [
'debug_toolbar',
]
JQUERY_URL = ''
|
<commit_before>from base import *
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
INSTALLED_APPS += [
'debug_toolbar',
]
<commit_msg>Use existing jquery for debug toolbar.<commit_after>from base import *
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
INSTALLED_APPS += [
'debug_toolbar',
]
JQUERY_URL = ''
|
ea43efc9d820833090670305a73543b43cf4286b
|
test/test_pyc.py
|
test/test_pyc.py
|
"""
Test completions from *.pyc files:
- generated a dummy python module
- compile the dummy module to generate a *.pyc
- delete the pure python dummy module
- try jedi on the generated *.pyc
"""
import os
import compileall
import jedi
SRC = """class Foo:
pass
class Bar:
pass
"""
def generate_pyc():
with open("dummy.py", 'w') as f:
f.write(SRC)
compileall.compile_file("dummy.py")
os.remove("dummy.py")
def test_pyc():
"""
The list of completion must be greater than 2.
"""
generate_pyc()
s = jedi.Script("import dummy; dummy.")
assert len(s.completions()) >= 2
if __name__ == "__main__":
test_pyc()
|
"""
Test completions from *.pyc files:
- generate a dummy python module
- compile the dummy module to generate a *.pyc
- delete the pure python dummy module
- try jedi on the generated *.pyc
"""
import compileall
import os
import shutil
import sys
import jedi
SRC = """class Foo:
pass
class Bar:
pass
"""
def generate_pyc():
with open("dummy.py", 'w') as f:
f.write(SRC)
compileall.compile_file("dummy.py")
os.remove("dummy.py")
if sys.version_info[0] == 3:
# Python3 specific:
# To import pyc modules, we must move them out of the __pycache__
# directory and rename them to remove ".cpython-%s%d"
# see: http://stackoverflow.com/questions/11648440/python-does-not-detect-pyc-files
for f in os.listdir("__pycache__"):
dst = f.replace('.cpython-%s%s' % sys.version_info[:2], "")
shutil.copy(os.path.join("__pycache__", f), dst)
def test_pyc():
"""
The list of completion must be greater than 2.
"""
generate_pyc()
s = jedi.Script("import dummy; dummy.")
assert len(s.completions()) >= 2
if __name__ == "__main__":
test_pyc()
|
Fix pyc test for python3
|
Fix pyc test for python3
To import pyc modules, we must move them out of the __pycache__
directory and rename them to remove ".cpython-%s%d".
This should still faild with python3 (UnicodeDecodeError)
|
Python
|
mit
|
flurischt/jedi,flurischt/jedi,dwillmer/jedi,jonashaag/jedi,WoLpH/jedi,jonashaag/jedi,mfussenegger/jedi,tjwei/jedi,dwillmer/jedi,tjwei/jedi,mfussenegger/jedi,WoLpH/jedi
|
"""
Test completions from *.pyc files:
- generated a dummy python module
- compile the dummy module to generate a *.pyc
- delete the pure python dummy module
- try jedi on the generated *.pyc
"""
import os
import compileall
import jedi
SRC = """class Foo:
pass
class Bar:
pass
"""
def generate_pyc():
with open("dummy.py", 'w') as f:
f.write(SRC)
compileall.compile_file("dummy.py")
os.remove("dummy.py")
def test_pyc():
"""
The list of completion must be greater than 2.
"""
generate_pyc()
s = jedi.Script("import dummy; dummy.")
assert len(s.completions()) >= 2
if __name__ == "__main__":
test_pyc()
Fix pyc test for python3
To import pyc modules, we must move them out of the __pycache__
directory and rename them to remove ".cpython-%s%d".
This should still faild with python3 (UnicodeDecodeError)
|
"""
Test completions from *.pyc files:
- generate a dummy python module
- compile the dummy module to generate a *.pyc
- delete the pure python dummy module
- try jedi on the generated *.pyc
"""
import compileall
import os
import shutil
import sys
import jedi
SRC = """class Foo:
pass
class Bar:
pass
"""
def generate_pyc():
with open("dummy.py", 'w') as f:
f.write(SRC)
compileall.compile_file("dummy.py")
os.remove("dummy.py")
if sys.version_info[0] == 3:
# Python3 specific:
# To import pyc modules, we must move them out of the __pycache__
# directory and rename them to remove ".cpython-%s%d"
# see: http://stackoverflow.com/questions/11648440/python-does-not-detect-pyc-files
for f in os.listdir("__pycache__"):
dst = f.replace('.cpython-%s%s' % sys.version_info[:2], "")
shutil.copy(os.path.join("__pycache__", f), dst)
def test_pyc():
"""
The list of completion must be greater than 2.
"""
generate_pyc()
s = jedi.Script("import dummy; dummy.")
assert len(s.completions()) >= 2
if __name__ == "__main__":
test_pyc()
|
<commit_before>"""
Test completions from *.pyc files:
- generated a dummy python module
- compile the dummy module to generate a *.pyc
- delete the pure python dummy module
- try jedi on the generated *.pyc
"""
import os
import compileall
import jedi
SRC = """class Foo:
pass
class Bar:
pass
"""
def generate_pyc():
with open("dummy.py", 'w') as f:
f.write(SRC)
compileall.compile_file("dummy.py")
os.remove("dummy.py")
def test_pyc():
"""
The list of completion must be greater than 2.
"""
generate_pyc()
s = jedi.Script("import dummy; dummy.")
assert len(s.completions()) >= 2
if __name__ == "__main__":
test_pyc()
<commit_msg>Fix pyc test for python3
To import pyc modules, we must move them out of the __pycache__
directory and rename them to remove ".cpython-%s%d".
This should still faild with python3 (UnicodeDecodeError)<commit_after>
|
"""
Test completions from *.pyc files:
- generate a dummy python module
- compile the dummy module to generate a *.pyc
- delete the pure python dummy module
- try jedi on the generated *.pyc
"""
import compileall
import os
import shutil
import sys
import jedi
SRC = """class Foo:
pass
class Bar:
pass
"""
def generate_pyc():
with open("dummy.py", 'w') as f:
f.write(SRC)
compileall.compile_file("dummy.py")
os.remove("dummy.py")
if sys.version_info[0] == 3:
# Python3 specific:
# To import pyc modules, we must move them out of the __pycache__
# directory and rename them to remove ".cpython-%s%d"
# see: http://stackoverflow.com/questions/11648440/python-does-not-detect-pyc-files
for f in os.listdir("__pycache__"):
dst = f.replace('.cpython-%s%s' % sys.version_info[:2], "")
shutil.copy(os.path.join("__pycache__", f), dst)
def test_pyc():
"""
The list of completion must be greater than 2.
"""
generate_pyc()
s = jedi.Script("import dummy; dummy.")
assert len(s.completions()) >= 2
if __name__ == "__main__":
test_pyc()
|
"""
Test completions from *.pyc files:
- generated a dummy python module
- compile the dummy module to generate a *.pyc
- delete the pure python dummy module
- try jedi on the generated *.pyc
"""
import os
import compileall
import jedi
SRC = """class Foo:
pass
class Bar:
pass
"""
def generate_pyc():
with open("dummy.py", 'w') as f:
f.write(SRC)
compileall.compile_file("dummy.py")
os.remove("dummy.py")
def test_pyc():
"""
The list of completion must be greater than 2.
"""
generate_pyc()
s = jedi.Script("import dummy; dummy.")
assert len(s.completions()) >= 2
if __name__ == "__main__":
test_pyc()
Fix pyc test for python3
To import pyc modules, we must move them out of the __pycache__
directory and rename them to remove ".cpython-%s%d".
This should still faild with python3 (UnicodeDecodeError)"""
Test completions from *.pyc files:
- generate a dummy python module
- compile the dummy module to generate a *.pyc
- delete the pure python dummy module
- try jedi on the generated *.pyc
"""
import compileall
import os
import shutil
import sys
import jedi
SRC = """class Foo:
pass
class Bar:
pass
"""
def generate_pyc():
with open("dummy.py", 'w') as f:
f.write(SRC)
compileall.compile_file("dummy.py")
os.remove("dummy.py")
if sys.version_info[0] == 3:
# Python3 specific:
# To import pyc modules, we must move them out of the __pycache__
# directory and rename them to remove ".cpython-%s%d"
# see: http://stackoverflow.com/questions/11648440/python-does-not-detect-pyc-files
for f in os.listdir("__pycache__"):
dst = f.replace('.cpython-%s%s' % sys.version_info[:2], "")
shutil.copy(os.path.join("__pycache__", f), dst)
def test_pyc():
"""
The list of completion must be greater than 2.
"""
generate_pyc()
s = jedi.Script("import dummy; dummy.")
assert len(s.completions()) >= 2
if __name__ == "__main__":
test_pyc()
|
<commit_before>"""
Test completions from *.pyc files:
- generated a dummy python module
- compile the dummy module to generate a *.pyc
- delete the pure python dummy module
- try jedi on the generated *.pyc
"""
import os
import compileall
import jedi
SRC = """class Foo:
pass
class Bar:
pass
"""
def generate_pyc():
with open("dummy.py", 'w') as f:
f.write(SRC)
compileall.compile_file("dummy.py")
os.remove("dummy.py")
def test_pyc():
"""
The list of completion must be greater than 2.
"""
generate_pyc()
s = jedi.Script("import dummy; dummy.")
assert len(s.completions()) >= 2
if __name__ == "__main__":
test_pyc()
<commit_msg>Fix pyc test for python3
To import pyc modules, we must move them out of the __pycache__
directory and rename them to remove ".cpython-%s%d".
This should still faild with python3 (UnicodeDecodeError)<commit_after>"""
Test completions from *.pyc files:
- generate a dummy python module
- compile the dummy module to generate a *.pyc
- delete the pure python dummy module
- try jedi on the generated *.pyc
"""
import compileall
import os
import shutil
import sys
import jedi
SRC = """class Foo:
pass
class Bar:
pass
"""
def generate_pyc():
with open("dummy.py", 'w') as f:
f.write(SRC)
compileall.compile_file("dummy.py")
os.remove("dummy.py")
if sys.version_info[0] == 3:
# Python3 specific:
# To import pyc modules, we must move them out of the __pycache__
# directory and rename them to remove ".cpython-%s%d"
# see: http://stackoverflow.com/questions/11648440/python-does-not-detect-pyc-files
for f in os.listdir("__pycache__"):
dst = f.replace('.cpython-%s%s' % sys.version_info[:2], "")
shutil.copy(os.path.join("__pycache__", f), dst)
def test_pyc():
"""
The list of completion must be greater than 2.
"""
generate_pyc()
s = jedi.Script("import dummy; dummy.")
assert len(s.completions()) >= 2
if __name__ == "__main__":
test_pyc()
|
997b8fc0658a5c581d65211285bf11df771889a4
|
app/single_resource/forms.py
|
app/single_resource/forms.py
|
from flask.ext.wtf import Form
from wtforms.fields import FloatField, StringField, SubmitField
from wtforms.validators import InputRequired, Length
class SingleResourceForm(Form):
name = StringField('Name', validators=[
InputRequired(),
Length(1, 500)
])
address = StringField('Address', validators=[
InputRequired(),
Length(1, 500)
])
latitude = FloatField('Latitude', validators=[
InputRequired()
])
longitude = FloatField('Longitude', validators=[
InputRequired()
])
submit = SubmitField('Save Resource')
|
from flask.ext.wtf import Form
from wtforms.fields import FloatField, StringField, SubmitField
from wtforms.validators import InputRequired, Length, Optional
class SingleResourceForm(Form):
name = StringField('Name', validators=[
InputRequired(),
Length(1, 500)
])
address = StringField('Address', validators=[
InputRequired(),
Length(1, 500)
])
latitude = FloatField('Latitude', validators=[
Optional()
])
longitude = FloatField('Longitude', validators=[
Optional()
])
submit = SubmitField('Save Resource')
def validate(self):
rv = Form.validate(self)
if not rv:
return False
if not self.latitude.data or not self.longitude.data:
self.address.errors.append('Please select a valid address')
return False
return True
|
Add validator if latitude/longitude is empty
|
Add validator if latitude/longitude is empty
|
Python
|
mit
|
hack4impact/maps4all,hack4impact/maps4all,hack4impact/maps4all,hack4impact/maps4all
|
from flask.ext.wtf import Form
from wtforms.fields import FloatField, StringField, SubmitField
from wtforms.validators import InputRequired, Length
class SingleResourceForm(Form):
name = StringField('Name', validators=[
InputRequired(),
Length(1, 500)
])
address = StringField('Address', validators=[
InputRequired(),
Length(1, 500)
])
latitude = FloatField('Latitude', validators=[
InputRequired()
])
longitude = FloatField('Longitude', validators=[
InputRequired()
])
submit = SubmitField('Save Resource')
Add validator if latitude/longitude is empty
|
from flask.ext.wtf import Form
from wtforms.fields import FloatField, StringField, SubmitField
from wtforms.validators import InputRequired, Length, Optional
class SingleResourceForm(Form):
name = StringField('Name', validators=[
InputRequired(),
Length(1, 500)
])
address = StringField('Address', validators=[
InputRequired(),
Length(1, 500)
])
latitude = FloatField('Latitude', validators=[
Optional()
])
longitude = FloatField('Longitude', validators=[
Optional()
])
submit = SubmitField('Save Resource')
def validate(self):
rv = Form.validate(self)
if not rv:
return False
if not self.latitude.data or not self.longitude.data:
self.address.errors.append('Please select a valid address')
return False
return True
|
<commit_before>from flask.ext.wtf import Form
from wtforms.fields import FloatField, StringField, SubmitField
from wtforms.validators import InputRequired, Length
class SingleResourceForm(Form):
name = StringField('Name', validators=[
InputRequired(),
Length(1, 500)
])
address = StringField('Address', validators=[
InputRequired(),
Length(1, 500)
])
latitude = FloatField('Latitude', validators=[
InputRequired()
])
longitude = FloatField('Longitude', validators=[
InputRequired()
])
submit = SubmitField('Save Resource')
<commit_msg>Add validator if latitude/longitude is empty<commit_after>
|
from flask.ext.wtf import Form
from wtforms.fields import FloatField, StringField, SubmitField
from wtforms.validators import InputRequired, Length, Optional
class SingleResourceForm(Form):
name = StringField('Name', validators=[
InputRequired(),
Length(1, 500)
])
address = StringField('Address', validators=[
InputRequired(),
Length(1, 500)
])
latitude = FloatField('Latitude', validators=[
Optional()
])
longitude = FloatField('Longitude', validators=[
Optional()
])
submit = SubmitField('Save Resource')
def validate(self):
rv = Form.validate(self)
if not rv:
return False
if not self.latitude.data or not self.longitude.data:
self.address.errors.append('Please select a valid address')
return False
return True
|
from flask.ext.wtf import Form
from wtforms.fields import FloatField, StringField, SubmitField
from wtforms.validators import InputRequired, Length
class SingleResourceForm(Form):
name = StringField('Name', validators=[
InputRequired(),
Length(1, 500)
])
address = StringField('Address', validators=[
InputRequired(),
Length(1, 500)
])
latitude = FloatField('Latitude', validators=[
InputRequired()
])
longitude = FloatField('Longitude', validators=[
InputRequired()
])
submit = SubmitField('Save Resource')
Add validator if latitude/longitude is emptyfrom flask.ext.wtf import Form
from wtforms.fields import FloatField, StringField, SubmitField
from wtforms.validators import InputRequired, Length, Optional
class SingleResourceForm(Form):
name = StringField('Name', validators=[
InputRequired(),
Length(1, 500)
])
address = StringField('Address', validators=[
InputRequired(),
Length(1, 500)
])
latitude = FloatField('Latitude', validators=[
Optional()
])
longitude = FloatField('Longitude', validators=[
Optional()
])
submit = SubmitField('Save Resource')
def validate(self):
rv = Form.validate(self)
if not rv:
return False
if not self.latitude.data or not self.longitude.data:
self.address.errors.append('Please select a valid address')
return False
return True
|
<commit_before>from flask.ext.wtf import Form
from wtforms.fields import FloatField, StringField, SubmitField
from wtforms.validators import InputRequired, Length
class SingleResourceForm(Form):
name = StringField('Name', validators=[
InputRequired(),
Length(1, 500)
])
address = StringField('Address', validators=[
InputRequired(),
Length(1, 500)
])
latitude = FloatField('Latitude', validators=[
InputRequired()
])
longitude = FloatField('Longitude', validators=[
InputRequired()
])
submit = SubmitField('Save Resource')
<commit_msg>Add validator if latitude/longitude is empty<commit_after>from flask.ext.wtf import Form
from wtforms.fields import FloatField, StringField, SubmitField
from wtforms.validators import InputRequired, Length, Optional
class SingleResourceForm(Form):
name = StringField('Name', validators=[
InputRequired(),
Length(1, 500)
])
address = StringField('Address', validators=[
InputRequired(),
Length(1, 500)
])
latitude = FloatField('Latitude', validators=[
Optional()
])
longitude = FloatField('Longitude', validators=[
Optional()
])
submit = SubmitField('Save Resource')
def validate(self):
rv = Form.validate(self)
if not rv:
return False
if not self.latitude.data or not self.longitude.data:
self.address.errors.append('Please select a valid address')
return False
return True
|
906505d85914287af3a031bf77f74dd79a4aaa32
|
pygraphc/preprocess/CreateGraphModel.py
|
pygraphc/preprocess/CreateGraphModel.py
|
from pygraphc.preprocess.ParallelPreprocess import ParallelPreprocess
from pygraphc.similarity.JaroWinkler import JaroWinkler
from pygraphc.pruning.TrianglePruning import TrianglePruning
import networkx as nx
class CreateGraphModel(object):
def __init__(self, log_file):
self.log_file = log_file
self.unique_events = []
self.unique_events_length = 0
self.distances = []
self.graph = nx.MultiGraph()
def __get_nodes(self):
pp = ParallelPreprocess(self.log_file)
self.unique_events = pp.get_unique_events()
self.unique_events_length = pp.unique_events_length
self.event_attributes = pp.event_attributes
def __get_distances(self):
jw = JaroWinkler(self.event_attributes, self.unique_events_length)
self.distances = jw.get_jarowinkler()
def create_graph(self):
self.__get_nodes()
self.__get_distances()
self.graph.add_nodes_from(self.unique_events)
self.graph.add_weighted_edges_from(self.distances)
tp = TrianglePruning(self.graph)
tp.get_triangle()
self.graph = tp.graph
return self.graph
|
from pygraphc.preprocess.ParallelPreprocess import ParallelPreprocess
from pygraphc.similarity.CosineSimilarity import ParallelCosineSimilarity
from pygraphc.pruning.TrianglePruning import TrianglePruning
import networkx as nx
class CreateGraphModel(object):
def __init__(self, log_file):
self.log_file = log_file
self.unique_events = []
self.unique_events_length = 0
self.distances = []
self.graph = nx.MultiGraph()
def __get_nodes(self):
pp = ParallelPreprocess(self.log_file)
self.unique_events = pp.get_unique_events()
self.unique_events_length = pp.unique_events_length
self.event_attributes = pp.event_attributes
def __get_distances(self):
pcs = ParallelCosineSimilarity(self.event_attributes, self.unique_events_length)
self.distances = pcs.get_parallel_cosine_similarity()
def create_graph(self):
self.__get_nodes()
self.__get_distances()
self.graph.add_nodes_from(self.unique_events)
self.graph.add_weighted_edges_from(self.distances)
tp = TrianglePruning(self.graph)
tp.get_triangle()
self.graph = tp.graph
return self.graph
|
Change jaro-winkler to cosine similarity
|
Change jaro-winkler to cosine similarity
|
Python
|
mit
|
studiawan/pygraphc
|
from pygraphc.preprocess.ParallelPreprocess import ParallelPreprocess
from pygraphc.similarity.JaroWinkler import JaroWinkler
from pygraphc.pruning.TrianglePruning import TrianglePruning
import networkx as nx
class CreateGraphModel(object):
def __init__(self, log_file):
self.log_file = log_file
self.unique_events = []
self.unique_events_length = 0
self.distances = []
self.graph = nx.MultiGraph()
def __get_nodes(self):
pp = ParallelPreprocess(self.log_file)
self.unique_events = pp.get_unique_events()
self.unique_events_length = pp.unique_events_length
self.event_attributes = pp.event_attributes
def __get_distances(self):
jw = JaroWinkler(self.event_attributes, self.unique_events_length)
self.distances = jw.get_jarowinkler()
def create_graph(self):
self.__get_nodes()
self.__get_distances()
self.graph.add_nodes_from(self.unique_events)
self.graph.add_weighted_edges_from(self.distances)
tp = TrianglePruning(self.graph)
tp.get_triangle()
self.graph = tp.graph
return self.graph
Change jaro-winkler to cosine similarity
|
from pygraphc.preprocess.ParallelPreprocess import ParallelPreprocess
from pygraphc.similarity.CosineSimilarity import ParallelCosineSimilarity
from pygraphc.pruning.TrianglePruning import TrianglePruning
import networkx as nx
class CreateGraphModel(object):
def __init__(self, log_file):
self.log_file = log_file
self.unique_events = []
self.unique_events_length = 0
self.distances = []
self.graph = nx.MultiGraph()
def __get_nodes(self):
pp = ParallelPreprocess(self.log_file)
self.unique_events = pp.get_unique_events()
self.unique_events_length = pp.unique_events_length
self.event_attributes = pp.event_attributes
def __get_distances(self):
pcs = ParallelCosineSimilarity(self.event_attributes, self.unique_events_length)
self.distances = pcs.get_parallel_cosine_similarity()
def create_graph(self):
self.__get_nodes()
self.__get_distances()
self.graph.add_nodes_from(self.unique_events)
self.graph.add_weighted_edges_from(self.distances)
tp = TrianglePruning(self.graph)
tp.get_triangle()
self.graph = tp.graph
return self.graph
|
<commit_before>from pygraphc.preprocess.ParallelPreprocess import ParallelPreprocess
from pygraphc.similarity.JaroWinkler import JaroWinkler
from pygraphc.pruning.TrianglePruning import TrianglePruning
import networkx as nx
class CreateGraphModel(object):
def __init__(self, log_file):
self.log_file = log_file
self.unique_events = []
self.unique_events_length = 0
self.distances = []
self.graph = nx.MultiGraph()
def __get_nodes(self):
pp = ParallelPreprocess(self.log_file)
self.unique_events = pp.get_unique_events()
self.unique_events_length = pp.unique_events_length
self.event_attributes = pp.event_attributes
def __get_distances(self):
jw = JaroWinkler(self.event_attributes, self.unique_events_length)
self.distances = jw.get_jarowinkler()
def create_graph(self):
self.__get_nodes()
self.__get_distances()
self.graph.add_nodes_from(self.unique_events)
self.graph.add_weighted_edges_from(self.distances)
tp = TrianglePruning(self.graph)
tp.get_triangle()
self.graph = tp.graph
return self.graph
<commit_msg>Change jaro-winkler to cosine similarity<commit_after>
|
from pygraphc.preprocess.ParallelPreprocess import ParallelPreprocess
from pygraphc.similarity.CosineSimilarity import ParallelCosineSimilarity
from pygraphc.pruning.TrianglePruning import TrianglePruning
import networkx as nx
class CreateGraphModel(object):
def __init__(self, log_file):
self.log_file = log_file
self.unique_events = []
self.unique_events_length = 0
self.distances = []
self.graph = nx.MultiGraph()
def __get_nodes(self):
pp = ParallelPreprocess(self.log_file)
self.unique_events = pp.get_unique_events()
self.unique_events_length = pp.unique_events_length
self.event_attributes = pp.event_attributes
def __get_distances(self):
pcs = ParallelCosineSimilarity(self.event_attributes, self.unique_events_length)
self.distances = pcs.get_parallel_cosine_similarity()
def create_graph(self):
self.__get_nodes()
self.__get_distances()
self.graph.add_nodes_from(self.unique_events)
self.graph.add_weighted_edges_from(self.distances)
tp = TrianglePruning(self.graph)
tp.get_triangle()
self.graph = tp.graph
return self.graph
|
from pygraphc.preprocess.ParallelPreprocess import ParallelPreprocess
from pygraphc.similarity.JaroWinkler import JaroWinkler
from pygraphc.pruning.TrianglePruning import TrianglePruning
import networkx as nx
class CreateGraphModel(object):
def __init__(self, log_file):
self.log_file = log_file
self.unique_events = []
self.unique_events_length = 0
self.distances = []
self.graph = nx.MultiGraph()
def __get_nodes(self):
pp = ParallelPreprocess(self.log_file)
self.unique_events = pp.get_unique_events()
self.unique_events_length = pp.unique_events_length
self.event_attributes = pp.event_attributes
def __get_distances(self):
jw = JaroWinkler(self.event_attributes, self.unique_events_length)
self.distances = jw.get_jarowinkler()
def create_graph(self):
self.__get_nodes()
self.__get_distances()
self.graph.add_nodes_from(self.unique_events)
self.graph.add_weighted_edges_from(self.distances)
tp = TrianglePruning(self.graph)
tp.get_triangle()
self.graph = tp.graph
return self.graph
Change jaro-winkler to cosine similarityfrom pygraphc.preprocess.ParallelPreprocess import ParallelPreprocess
from pygraphc.similarity.CosineSimilarity import ParallelCosineSimilarity
from pygraphc.pruning.TrianglePruning import TrianglePruning
import networkx as nx
class CreateGraphModel(object):
def __init__(self, log_file):
self.log_file = log_file
self.unique_events = []
self.unique_events_length = 0
self.distances = []
self.graph = nx.MultiGraph()
def __get_nodes(self):
pp = ParallelPreprocess(self.log_file)
self.unique_events = pp.get_unique_events()
self.unique_events_length = pp.unique_events_length
self.event_attributes = pp.event_attributes
def __get_distances(self):
pcs = ParallelCosineSimilarity(self.event_attributes, self.unique_events_length)
self.distances = pcs.get_parallel_cosine_similarity()
def create_graph(self):
self.__get_nodes()
self.__get_distances()
self.graph.add_nodes_from(self.unique_events)
self.graph.add_weighted_edges_from(self.distances)
tp = TrianglePruning(self.graph)
tp.get_triangle()
self.graph = tp.graph
return self.graph
|
<commit_before>from pygraphc.preprocess.ParallelPreprocess import ParallelPreprocess
from pygraphc.similarity.JaroWinkler import JaroWinkler
from pygraphc.pruning.TrianglePruning import TrianglePruning
import networkx as nx
class CreateGraphModel(object):
def __init__(self, log_file):
self.log_file = log_file
self.unique_events = []
self.unique_events_length = 0
self.distances = []
self.graph = nx.MultiGraph()
def __get_nodes(self):
pp = ParallelPreprocess(self.log_file)
self.unique_events = pp.get_unique_events()
self.unique_events_length = pp.unique_events_length
self.event_attributes = pp.event_attributes
def __get_distances(self):
jw = JaroWinkler(self.event_attributes, self.unique_events_length)
self.distances = jw.get_jarowinkler()
def create_graph(self):
self.__get_nodes()
self.__get_distances()
self.graph.add_nodes_from(self.unique_events)
self.graph.add_weighted_edges_from(self.distances)
tp = TrianglePruning(self.graph)
tp.get_triangle()
self.graph = tp.graph
return self.graph
<commit_msg>Change jaro-winkler to cosine similarity<commit_after>from pygraphc.preprocess.ParallelPreprocess import ParallelPreprocess
from pygraphc.similarity.CosineSimilarity import ParallelCosineSimilarity
from pygraphc.pruning.TrianglePruning import TrianglePruning
import networkx as nx
class CreateGraphModel(object):
def __init__(self, log_file):
self.log_file = log_file
self.unique_events = []
self.unique_events_length = 0
self.distances = []
self.graph = nx.MultiGraph()
def __get_nodes(self):
pp = ParallelPreprocess(self.log_file)
self.unique_events = pp.get_unique_events()
self.unique_events_length = pp.unique_events_length
self.event_attributes = pp.event_attributes
def __get_distances(self):
pcs = ParallelCosineSimilarity(self.event_attributes, self.unique_events_length)
self.distances = pcs.get_parallel_cosine_similarity()
def create_graph(self):
self.__get_nodes()
self.__get_distances()
self.graph.add_nodes_from(self.unique_events)
self.graph.add_weighted_edges_from(self.distances)
tp = TrianglePruning(self.graph)
tp.get_triangle()
self.graph = tp.graph
return self.graph
|
10047e427d38e2fef09b1f4b4d8ece567c811315
|
reqon/deprecated/__init__.py
|
reqon/deprecated/__init__.py
|
import rethinkdb as r
from . import coerce, geo, operators, terms
from .coerce import COERSIONS
from .operators import BOOLEAN, EXPRESSIONS, MODIFIERS
from .terms import TERMS
from .exceptions import ReqonError, InvalidTypeError, InvalidFilterError
def query(query):
try:
reql = r.db(query['$db']).table(query['$table'])
except KeyError:
try:
reql = r.table(query['$table'])
except KeyError:
raise ReqonError('The query descriptor requires a $table key.')
return build_terms(query['$query'], reql)
def build_terms(query, reql):
for sequence in query:
term = sequence[0]
try:
reql = TERMS[term](reql, *sequence[1:])
except ReqonError:
raise
except r.ReqlError:
message = 'Invalid values for {0} with args {1}'
raise ReqonError(message.format(term, sequence[1:]))
except Exception:
message = 'Unknown exception, {0}: {1}'
raise ReqonError(message.format(term, sequence[1:]))
return reql
|
import rethinkdb as r
from . import coerce, geo, operators, terms
from .coerce import COERSIONS
from .operators import BOOLEAN, EXPRESSIONS, MODIFIERS
from .terms import TERMS
from .exceptions import ReqonError, InvalidTypeError, InvalidFilterError
def query(query):
try:
reql = r.db(query['$db']).table(query['$table'])
except KeyError:
try:
reql = r.table(query['$table'])
except KeyError:
raise ReqonError('The query descriptor requires a $table key.')
return build_terms(query['$query'], reql)
def build_terms(reql, query):
for sequence in query:
term = sequence[0]
try:
reql = TERMS[term](reql, *sequence[1:])
except ReqonError:
raise
except r.ReqlError:
message = 'Invalid values for {0} with args {1}'
raise ReqonError(message.format(term, sequence[1:]))
except Exception:
message = 'Unknown exception, {0}: {1}'
raise ReqonError(message.format(term, sequence[1:]))
return reql
|
Fix arguments order of reqon.deprecated.build_terms().
|
Fix arguments order of reqon.deprecated.build_terms().
|
Python
|
mit
|
dmpayton/reqon
|
import rethinkdb as r
from . import coerce, geo, operators, terms
from .coerce import COERSIONS
from .operators import BOOLEAN, EXPRESSIONS, MODIFIERS
from .terms import TERMS
from .exceptions import ReqonError, InvalidTypeError, InvalidFilterError
def query(query):
try:
reql = r.db(query['$db']).table(query['$table'])
except KeyError:
try:
reql = r.table(query['$table'])
except KeyError:
raise ReqonError('The query descriptor requires a $table key.')
return build_terms(query['$query'], reql)
def build_terms(query, reql):
for sequence in query:
term = sequence[0]
try:
reql = TERMS[term](reql, *sequence[1:])
except ReqonError:
raise
except r.ReqlError:
message = 'Invalid values for {0} with args {1}'
raise ReqonError(message.format(term, sequence[1:]))
except Exception:
message = 'Unknown exception, {0}: {1}'
raise ReqonError(message.format(term, sequence[1:]))
return reql
Fix arguments order of reqon.deprecated.build_terms().
|
import rethinkdb as r
from . import coerce, geo, operators, terms
from .coerce import COERSIONS
from .operators import BOOLEAN, EXPRESSIONS, MODIFIERS
from .terms import TERMS
from .exceptions import ReqonError, InvalidTypeError, InvalidFilterError
def query(query):
try:
reql = r.db(query['$db']).table(query['$table'])
except KeyError:
try:
reql = r.table(query['$table'])
except KeyError:
raise ReqonError('The query descriptor requires a $table key.')
return build_terms(query['$query'], reql)
def build_terms(reql, query):
for sequence in query:
term = sequence[0]
try:
reql = TERMS[term](reql, *sequence[1:])
except ReqonError:
raise
except r.ReqlError:
message = 'Invalid values for {0} with args {1}'
raise ReqonError(message.format(term, sequence[1:]))
except Exception:
message = 'Unknown exception, {0}: {1}'
raise ReqonError(message.format(term, sequence[1:]))
return reql
|
<commit_before>import rethinkdb as r
from . import coerce, geo, operators, terms
from .coerce import COERSIONS
from .operators import BOOLEAN, EXPRESSIONS, MODIFIERS
from .terms import TERMS
from .exceptions import ReqonError, InvalidTypeError, InvalidFilterError
def query(query):
try:
reql = r.db(query['$db']).table(query['$table'])
except KeyError:
try:
reql = r.table(query['$table'])
except KeyError:
raise ReqonError('The query descriptor requires a $table key.')
return build_terms(query['$query'], reql)
def build_terms(query, reql):
for sequence in query:
term = sequence[0]
try:
reql = TERMS[term](reql, *sequence[1:])
except ReqonError:
raise
except r.ReqlError:
message = 'Invalid values for {0} with args {1}'
raise ReqonError(message.format(term, sequence[1:]))
except Exception:
message = 'Unknown exception, {0}: {1}'
raise ReqonError(message.format(term, sequence[1:]))
return reql
<commit_msg>Fix arguments order of reqon.deprecated.build_terms().<commit_after>
|
import rethinkdb as r
from . import coerce, geo, operators, terms
from .coerce import COERSIONS
from .operators import BOOLEAN, EXPRESSIONS, MODIFIERS
from .terms import TERMS
from .exceptions import ReqonError, InvalidTypeError, InvalidFilterError
def query(query):
try:
reql = r.db(query['$db']).table(query['$table'])
except KeyError:
try:
reql = r.table(query['$table'])
except KeyError:
raise ReqonError('The query descriptor requires a $table key.')
return build_terms(query['$query'], reql)
def build_terms(reql, query):
for sequence in query:
term = sequence[0]
try:
reql = TERMS[term](reql, *sequence[1:])
except ReqonError:
raise
except r.ReqlError:
message = 'Invalid values for {0} with args {1}'
raise ReqonError(message.format(term, sequence[1:]))
except Exception:
message = 'Unknown exception, {0}: {1}'
raise ReqonError(message.format(term, sequence[1:]))
return reql
|
import rethinkdb as r
from . import coerce, geo, operators, terms
from .coerce import COERSIONS
from .operators import BOOLEAN, EXPRESSIONS, MODIFIERS
from .terms import TERMS
from .exceptions import ReqonError, InvalidTypeError, InvalidFilterError
def query(query):
try:
reql = r.db(query['$db']).table(query['$table'])
except KeyError:
try:
reql = r.table(query['$table'])
except KeyError:
raise ReqonError('The query descriptor requires a $table key.')
return build_terms(query['$query'], reql)
def build_terms(query, reql):
for sequence in query:
term = sequence[0]
try:
reql = TERMS[term](reql, *sequence[1:])
except ReqonError:
raise
except r.ReqlError:
message = 'Invalid values for {0} with args {1}'
raise ReqonError(message.format(term, sequence[1:]))
except Exception:
message = 'Unknown exception, {0}: {1}'
raise ReqonError(message.format(term, sequence[1:]))
return reql
Fix arguments order of reqon.deprecated.build_terms().import rethinkdb as r
from . import coerce, geo, operators, terms
from .coerce import COERSIONS
from .operators import BOOLEAN, EXPRESSIONS, MODIFIERS
from .terms import TERMS
from .exceptions import ReqonError, InvalidTypeError, InvalidFilterError
def query(query):
try:
reql = r.db(query['$db']).table(query['$table'])
except KeyError:
try:
reql = r.table(query['$table'])
except KeyError:
raise ReqonError('The query descriptor requires a $table key.')
return build_terms(query['$query'], reql)
def build_terms(reql, query):
for sequence in query:
term = sequence[0]
try:
reql = TERMS[term](reql, *sequence[1:])
except ReqonError:
raise
except r.ReqlError:
message = 'Invalid values for {0} with args {1}'
raise ReqonError(message.format(term, sequence[1:]))
except Exception:
message = 'Unknown exception, {0}: {1}'
raise ReqonError(message.format(term, sequence[1:]))
return reql
|
<commit_before>import rethinkdb as r
from . import coerce, geo, operators, terms
from .coerce import COERSIONS
from .operators import BOOLEAN, EXPRESSIONS, MODIFIERS
from .terms import TERMS
from .exceptions import ReqonError, InvalidTypeError, InvalidFilterError
def query(query):
try:
reql = r.db(query['$db']).table(query['$table'])
except KeyError:
try:
reql = r.table(query['$table'])
except KeyError:
raise ReqonError('The query descriptor requires a $table key.')
return build_terms(query['$query'], reql)
def build_terms(query, reql):
for sequence in query:
term = sequence[0]
try:
reql = TERMS[term](reql, *sequence[1:])
except ReqonError:
raise
except r.ReqlError:
message = 'Invalid values for {0} with args {1}'
raise ReqonError(message.format(term, sequence[1:]))
except Exception:
message = 'Unknown exception, {0}: {1}'
raise ReqonError(message.format(term, sequence[1:]))
return reql
<commit_msg>Fix arguments order of reqon.deprecated.build_terms().<commit_after>import rethinkdb as r
from . import coerce, geo, operators, terms
from .coerce import COERSIONS
from .operators import BOOLEAN, EXPRESSIONS, MODIFIERS
from .terms import TERMS
from .exceptions import ReqonError, InvalidTypeError, InvalidFilterError
def query(query):
try:
reql = r.db(query['$db']).table(query['$table'])
except KeyError:
try:
reql = r.table(query['$table'])
except KeyError:
raise ReqonError('The query descriptor requires a $table key.')
return build_terms(query['$query'], reql)
def build_terms(reql, query):
for sequence in query:
term = sequence[0]
try:
reql = TERMS[term](reql, *sequence[1:])
except ReqonError:
raise
except r.ReqlError:
message = 'Invalid values for {0} with args {1}'
raise ReqonError(message.format(term, sequence[1:]))
except Exception:
message = 'Unknown exception, {0}: {1}'
raise ReqonError(message.format(term, sequence[1:]))
return reql
|
249638f69c82ed26a240b892bac85e7abe038151
|
run_tests.py
|
run_tests.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import unittest2 as unittest
current_path = os.path.abspath(os.path.dirname(__file__))
tests_path = os.path.join(current_path, 'tests')
sys.path[0:0] = [
current_path,
tests_path,
]
all_tests = [f[:-3] for f in os.listdir(tests_path)
if f.startswith('test_') and f.endswith(".py")]
def get_suite(tests):
tests = sorted(tests)
suite = unittest.TestSuite()
loader = unittest.TestLoader()
for test in tests:
suite.addTest(loader.loadTestsFromName(test))
return suite
if __name__ == '__main__':
"""
To run all tests:
$ python run_tests.py
To run a single test:
$ python run_tests.py app
To run a couple of tests:
$ python run_tests.py app config sessions
To run code coverage:
$ coverage run run_tests.py
$ coverage report -m
"""
tests = sys.argv[1:]
if not tests:
tests = all_tests
tests = ['%s' % t for t in tests]
suite = get_suite(tests)
unittest.TextTestRunner(verbosity=2).run(suite)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import unittest2 as unittest
current_path = os.path.abspath(os.path.dirname(__file__))
tests_path = os.path.join(current_path, 'tests')
sys.path[0:0] = [
current_path,
tests_path,
]
all_tests = [f[:-3] for f in os.listdir(tests_path)
if f.startswith('test_') and f.endswith(".py")]
def get_suite(tests):
tests = sorted(tests)
suite = unittest.TestSuite()
loader = unittest.TestLoader()
for test in tests:
suite.addTest(loader.loadTestsFromName(test))
return suite
if __name__ == '__main__':
"""
To run all tests:
$ python run_tests.py
To run a single test:
$ python run_tests.py app
To run a couple of tests:
$ python run_tests.py app config sessions
To run code coverage:
$ coverage run run_tests.py
$ coverage report -m
"""
tests = sys.argv[1:]
if not tests:
tests = all_tests
tests = ['%s' % t for t in tests]
suite = get_suite(tests)
unittest.TextTestRunner(verbosity=1).run(suite)
|
Reduce the verbosity of the tests.
|
Reduce the verbosity of the tests.
Signed-off-by: Gora Khargosh <a2078c57e3ac12c6dfb97b7c2c4e6d6d7db7e92f@gmail.com>
|
Python
|
apache-2.0
|
gorakhargosh/pyoauth,gorakhargosh/pyoauth
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import unittest2 as unittest
current_path = os.path.abspath(os.path.dirname(__file__))
tests_path = os.path.join(current_path, 'tests')
sys.path[0:0] = [
current_path,
tests_path,
]
all_tests = [f[:-3] for f in os.listdir(tests_path)
if f.startswith('test_') and f.endswith(".py")]
def get_suite(tests):
tests = sorted(tests)
suite = unittest.TestSuite()
loader = unittest.TestLoader()
for test in tests:
suite.addTest(loader.loadTestsFromName(test))
return suite
if __name__ == '__main__':
"""
To run all tests:
$ python run_tests.py
To run a single test:
$ python run_tests.py app
To run a couple of tests:
$ python run_tests.py app config sessions
To run code coverage:
$ coverage run run_tests.py
$ coverage report -m
"""
tests = sys.argv[1:]
if not tests:
tests = all_tests
tests = ['%s' % t for t in tests]
suite = get_suite(tests)
unittest.TextTestRunner(verbosity=2).run(suite)
Reduce the verbosity of the tests.
Signed-off-by: Gora Khargosh <a2078c57e3ac12c6dfb97b7c2c4e6d6d7db7e92f@gmail.com>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import unittest2 as unittest
current_path = os.path.abspath(os.path.dirname(__file__))
tests_path = os.path.join(current_path, 'tests')
sys.path[0:0] = [
current_path,
tests_path,
]
all_tests = [f[:-3] for f in os.listdir(tests_path)
if f.startswith('test_') and f.endswith(".py")]
def get_suite(tests):
tests = sorted(tests)
suite = unittest.TestSuite()
loader = unittest.TestLoader()
for test in tests:
suite.addTest(loader.loadTestsFromName(test))
return suite
if __name__ == '__main__':
"""
To run all tests:
$ python run_tests.py
To run a single test:
$ python run_tests.py app
To run a couple of tests:
$ python run_tests.py app config sessions
To run code coverage:
$ coverage run run_tests.py
$ coverage report -m
"""
tests = sys.argv[1:]
if not tests:
tests = all_tests
tests = ['%s' % t for t in tests]
suite = get_suite(tests)
unittest.TextTestRunner(verbosity=1).run(suite)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import unittest2 as unittest
current_path = os.path.abspath(os.path.dirname(__file__))
tests_path = os.path.join(current_path, 'tests')
sys.path[0:0] = [
current_path,
tests_path,
]
all_tests = [f[:-3] for f in os.listdir(tests_path)
if f.startswith('test_') and f.endswith(".py")]
def get_suite(tests):
tests = sorted(tests)
suite = unittest.TestSuite()
loader = unittest.TestLoader()
for test in tests:
suite.addTest(loader.loadTestsFromName(test))
return suite
if __name__ == '__main__':
"""
To run all tests:
$ python run_tests.py
To run a single test:
$ python run_tests.py app
To run a couple of tests:
$ python run_tests.py app config sessions
To run code coverage:
$ coverage run run_tests.py
$ coverage report -m
"""
tests = sys.argv[1:]
if not tests:
tests = all_tests
tests = ['%s' % t for t in tests]
suite = get_suite(tests)
unittest.TextTestRunner(verbosity=2).run(suite)
<commit_msg>Reduce the verbosity of the tests.
Signed-off-by: Gora Khargosh <a2078c57e3ac12c6dfb97b7c2c4e6d6d7db7e92f@gmail.com><commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import unittest2 as unittest
current_path = os.path.abspath(os.path.dirname(__file__))
tests_path = os.path.join(current_path, 'tests')
sys.path[0:0] = [
current_path,
tests_path,
]
all_tests = [f[:-3] for f in os.listdir(tests_path)
if f.startswith('test_') and f.endswith(".py")]
def get_suite(tests):
tests = sorted(tests)
suite = unittest.TestSuite()
loader = unittest.TestLoader()
for test in tests:
suite.addTest(loader.loadTestsFromName(test))
return suite
if __name__ == '__main__':
"""
To run all tests:
$ python run_tests.py
To run a single test:
$ python run_tests.py app
To run a couple of tests:
$ python run_tests.py app config sessions
To run code coverage:
$ coverage run run_tests.py
$ coverage report -m
"""
tests = sys.argv[1:]
if not tests:
tests = all_tests
tests = ['%s' % t for t in tests]
suite = get_suite(tests)
unittest.TextTestRunner(verbosity=1).run(suite)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import unittest2 as unittest
current_path = os.path.abspath(os.path.dirname(__file__))
tests_path = os.path.join(current_path, 'tests')
sys.path[0:0] = [
current_path,
tests_path,
]
all_tests = [f[:-3] for f in os.listdir(tests_path)
if f.startswith('test_') and f.endswith(".py")]
def get_suite(tests):
tests = sorted(tests)
suite = unittest.TestSuite()
loader = unittest.TestLoader()
for test in tests:
suite.addTest(loader.loadTestsFromName(test))
return suite
if __name__ == '__main__':
"""
To run all tests:
$ python run_tests.py
To run a single test:
$ python run_tests.py app
To run a couple of tests:
$ python run_tests.py app config sessions
To run code coverage:
$ coverage run run_tests.py
$ coverage report -m
"""
tests = sys.argv[1:]
if not tests:
tests = all_tests
tests = ['%s' % t for t in tests]
suite = get_suite(tests)
unittest.TextTestRunner(verbosity=2).run(suite)
Reduce the verbosity of the tests.
Signed-off-by: Gora Khargosh <a2078c57e3ac12c6dfb97b7c2c4e6d6d7db7e92f@gmail.com>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import unittest2 as unittest
current_path = os.path.abspath(os.path.dirname(__file__))
tests_path = os.path.join(current_path, 'tests')
sys.path[0:0] = [
current_path,
tests_path,
]
all_tests = [f[:-3] for f in os.listdir(tests_path)
if f.startswith('test_') and f.endswith(".py")]
def get_suite(tests):
tests = sorted(tests)
suite = unittest.TestSuite()
loader = unittest.TestLoader()
for test in tests:
suite.addTest(loader.loadTestsFromName(test))
return suite
if __name__ == '__main__':
"""
To run all tests:
$ python run_tests.py
To run a single test:
$ python run_tests.py app
To run a couple of tests:
$ python run_tests.py app config sessions
To run code coverage:
$ coverage run run_tests.py
$ coverage report -m
"""
tests = sys.argv[1:]
if not tests:
tests = all_tests
tests = ['%s' % t for t in tests]
suite = get_suite(tests)
unittest.TextTestRunner(verbosity=1).run(suite)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import unittest2 as unittest
current_path = os.path.abspath(os.path.dirname(__file__))
tests_path = os.path.join(current_path, 'tests')
sys.path[0:0] = [
current_path,
tests_path,
]
all_tests = [f[:-3] for f in os.listdir(tests_path)
if f.startswith('test_') and f.endswith(".py")]
def get_suite(tests):
tests = sorted(tests)
suite = unittest.TestSuite()
loader = unittest.TestLoader()
for test in tests:
suite.addTest(loader.loadTestsFromName(test))
return suite
if __name__ == '__main__':
"""
To run all tests:
$ python run_tests.py
To run a single test:
$ python run_tests.py app
To run a couple of tests:
$ python run_tests.py app config sessions
To run code coverage:
$ coverage run run_tests.py
$ coverage report -m
"""
tests = sys.argv[1:]
if not tests:
tests = all_tests
tests = ['%s' % t for t in tests]
suite = get_suite(tests)
unittest.TextTestRunner(verbosity=2).run(suite)
<commit_msg>Reduce the verbosity of the tests.
Signed-off-by: Gora Khargosh <a2078c57e3ac12c6dfb97b7c2c4e6d6d7db7e92f@gmail.com><commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import unittest2 as unittest
current_path = os.path.abspath(os.path.dirname(__file__))
tests_path = os.path.join(current_path, 'tests')
sys.path[0:0] = [
current_path,
tests_path,
]
all_tests = [f[:-3] for f in os.listdir(tests_path)
if f.startswith('test_') and f.endswith(".py")]
def get_suite(tests):
tests = sorted(tests)
suite = unittest.TestSuite()
loader = unittest.TestLoader()
for test in tests:
suite.addTest(loader.loadTestsFromName(test))
return suite
if __name__ == '__main__':
"""
To run all tests:
$ python run_tests.py
To run a single test:
$ python run_tests.py app
To run a couple of tests:
$ python run_tests.py app config sessions
To run code coverage:
$ coverage run run_tests.py
$ coverage report -m
"""
tests = sys.argv[1:]
if not tests:
tests = all_tests
tests = ['%s' % t for t in tests]
suite = get_suite(tests)
unittest.TextTestRunner(verbosity=1).run(suite)
|
687bb4e81e7223e9d380e75b36c46d36c142deea
|
sentiment.py
|
sentiment.py
|
from azure.mgmt.cognitiveservices import CognitiveServicesManagementClient
from azure.common.credentials import UserPassCredentials
import cfg
c = cfg.read_cfg()
subscription_id = c['txtkey']
|
Add config file for subsciber ID.
|
Add config file for subsciber ID.
|
Python
|
mit
|
gnfrazier/textsentiment
|
Add config file for subsciber ID.
|
from azure.mgmt.cognitiveservices import CognitiveServicesManagementClient
from azure.common.credentials import UserPassCredentials
import cfg
c = cfg.read_cfg()
subscription_id = c['txtkey']
|
<commit_before><commit_msg>Add config file for subsciber ID.<commit_after>
|
from azure.mgmt.cognitiveservices import CognitiveServicesManagementClient
from azure.common.credentials import UserPassCredentials
import cfg
c = cfg.read_cfg()
subscription_id = c['txtkey']
|
Add config file for subsciber ID.from azure.mgmt.cognitiveservices import CognitiveServicesManagementClient
from azure.common.credentials import UserPassCredentials
import cfg
c = cfg.read_cfg()
subscription_id = c['txtkey']
|
<commit_before><commit_msg>Add config file for subsciber ID.<commit_after>from azure.mgmt.cognitiveservices import CognitiveServicesManagementClient
from azure.common.credentials import UserPassCredentials
import cfg
c = cfg.read_cfg()
subscription_id = c['txtkey']
|
|
19a58255f247199d0e60408cab8220a8c2a1ff3b
|
qxlc/minifier.py
|
qxlc/minifier.py
|
import htmlmin
from markupsafe import Markup
from qxlc import app
@app.template_filter("minify")
def minify_filter(text):
return Markup(htmlmin.minify(text.unescape(), remove_comments=True, remove_empty_space=True))
|
import htmlmin
from markupsafe import Markup
from qxlc import app
@app.template_filter("minify")
def minify_filter(s):
return Markup(htmlmin.minify(str(s), remove_comments=True, remove_empty_space=True))
|
Use str(s) instead of s.unescape() to add support for escaping things inside. (took me a while to find that str() worked)
|
Use str(s) instead of s.unescape() to add support for escaping things inside. (took me a while to find that str() worked)
|
Python
|
apache-2.0
|
daboross/qxlc,daboross/qxlc
|
import htmlmin
from markupsafe import Markup
from qxlc import app
@app.template_filter("minify")
def minify_filter(text):
return Markup(htmlmin.minify(text.unescape(), remove_comments=True, remove_empty_space=True))
Use str(s) instead of s.unescape() to add support for escaping things inside. (took me a while to find that str() worked)
|
import htmlmin
from markupsafe import Markup
from qxlc import app
@app.template_filter("minify")
def minify_filter(s):
return Markup(htmlmin.minify(str(s), remove_comments=True, remove_empty_space=True))
|
<commit_before>import htmlmin
from markupsafe import Markup
from qxlc import app
@app.template_filter("minify")
def minify_filter(text):
return Markup(htmlmin.minify(text.unescape(), remove_comments=True, remove_empty_space=True))
<commit_msg>Use str(s) instead of s.unescape() to add support for escaping things inside. (took me a while to find that str() worked)<commit_after>
|
import htmlmin
from markupsafe import Markup
from qxlc import app
@app.template_filter("minify")
def minify_filter(s):
return Markup(htmlmin.minify(str(s), remove_comments=True, remove_empty_space=True))
|
import htmlmin
from markupsafe import Markup
from qxlc import app
@app.template_filter("minify")
def minify_filter(text):
return Markup(htmlmin.minify(text.unescape(), remove_comments=True, remove_empty_space=True))
Use str(s) instead of s.unescape() to add support for escaping things inside. (took me a while to find that str() worked)import htmlmin
from markupsafe import Markup
from qxlc import app
@app.template_filter("minify")
def minify_filter(s):
return Markup(htmlmin.minify(str(s), remove_comments=True, remove_empty_space=True))
|
<commit_before>import htmlmin
from markupsafe import Markup
from qxlc import app
@app.template_filter("minify")
def minify_filter(text):
return Markup(htmlmin.minify(text.unescape(), remove_comments=True, remove_empty_space=True))
<commit_msg>Use str(s) instead of s.unescape() to add support for escaping things inside. (took me a while to find that str() worked)<commit_after>import htmlmin
from markupsafe import Markup
from qxlc import app
@app.template_filter("minify")
def minify_filter(s):
return Markup(htmlmin.minify(str(s), remove_comments=True, remove_empty_space=True))
|
fbae436ae2d9ee29b64f81331ee3b316b153f750
|
locksmith/common.py
|
locksmith/common.py
|
import hashlib
import hmac
import urllib, urllib2
KEY_STATUSES = (
('U', 'Unactivated'),
('A', 'Active'),
('S', 'Suspended')
)
UNPUBLISHED, PUBLISHED, NEEDS_UPDATE = range(3)
PUB_STATUSES = (
(UNPUBLISHED, 'Unpublished'),
(PUBLISHED, 'Published'),
(NEEDS_UPDATE, 'Needs Update'),
)
def get_signature(params, signkey):
# sorted k,v pairs of everything but signature
data = sorted([(k,v) for k,v in params.iteritems() if k != 'signature'])
qs = urllib.urlencode(data)
return hmac.new(str(signkey), qs, hashlib.sha1).hexdigest()
def apicall(url, signkey, **params):
params['signature'] = get_signature(params, signkey)
data = sorted([(k,v) for k,v in params.iteritems()])
body = urllib.urlencode(data)
urllib2.urlopen(url, body)
|
import hashlib
import hmac
import urllib, urllib2
KEY_STATUSES = (
('U', 'Unactivated'),
('A', 'Active'),
('S', 'Suspended')
)
UNPUBLISHED, PUBLISHED, NEEDS_UPDATE = range(3)
PUB_STATUSES = (
(UNPUBLISHED, 'Unpublished'),
(PUBLISHED, 'Published'),
(NEEDS_UPDATE, 'Needs Update'),
)
def get_signature(params, signkey):
# sorted k,v pairs of everything but signature
data = sorted([(k,v.encode('utf-8')) for k,v in params.iteritems() if k != 'signature'])
qs = urllib.urlencode(data)
return hmac.new(str(signkey), qs, hashlib.sha1).hexdigest()
def apicall(url, signkey, **params):
params['signature'] = get_signature(params, signkey)
data = sorted([(k,v) for k,v in params.iteritems()])
body = urllib.urlencode(data)
urllib2.urlopen(url, body)
|
Make get_signature support unicode characters by encoding to utf-8 instead of ascii.
|
Make get_signature support unicode characters by encoding to utf-8 instead of ascii.
|
Python
|
bsd-3-clause
|
sunlightlabs/django-locksmith,sunlightlabs/django-locksmith,sunlightlabs/django-locksmith
|
import hashlib
import hmac
import urllib, urllib2
KEY_STATUSES = (
('U', 'Unactivated'),
('A', 'Active'),
('S', 'Suspended')
)
UNPUBLISHED, PUBLISHED, NEEDS_UPDATE = range(3)
PUB_STATUSES = (
(UNPUBLISHED, 'Unpublished'),
(PUBLISHED, 'Published'),
(NEEDS_UPDATE, 'Needs Update'),
)
def get_signature(params, signkey):
# sorted k,v pairs of everything but signature
data = sorted([(k,v) for k,v in params.iteritems() if k != 'signature'])
qs = urllib.urlencode(data)
return hmac.new(str(signkey), qs, hashlib.sha1).hexdigest()
def apicall(url, signkey, **params):
params['signature'] = get_signature(params, signkey)
data = sorted([(k,v) for k,v in params.iteritems()])
body = urllib.urlencode(data)
urllib2.urlopen(url, body)
Make get_signature support unicode characters by encoding to utf-8 instead of ascii.
|
import hashlib
import hmac
import urllib, urllib2
KEY_STATUSES = (
('U', 'Unactivated'),
('A', 'Active'),
('S', 'Suspended')
)
UNPUBLISHED, PUBLISHED, NEEDS_UPDATE = range(3)
PUB_STATUSES = (
(UNPUBLISHED, 'Unpublished'),
(PUBLISHED, 'Published'),
(NEEDS_UPDATE, 'Needs Update'),
)
def get_signature(params, signkey):
# sorted k,v pairs of everything but signature
data = sorted([(k,v.encode('utf-8')) for k,v in params.iteritems() if k != 'signature'])
qs = urllib.urlencode(data)
return hmac.new(str(signkey), qs, hashlib.sha1).hexdigest()
def apicall(url, signkey, **params):
params['signature'] = get_signature(params, signkey)
data = sorted([(k,v) for k,v in params.iteritems()])
body = urllib.urlencode(data)
urllib2.urlopen(url, body)
|
<commit_before>import hashlib
import hmac
import urllib, urllib2
KEY_STATUSES = (
('U', 'Unactivated'),
('A', 'Active'),
('S', 'Suspended')
)
UNPUBLISHED, PUBLISHED, NEEDS_UPDATE = range(3)
PUB_STATUSES = (
(UNPUBLISHED, 'Unpublished'),
(PUBLISHED, 'Published'),
(NEEDS_UPDATE, 'Needs Update'),
)
def get_signature(params, signkey):
# sorted k,v pairs of everything but signature
data = sorted([(k,v) for k,v in params.iteritems() if k != 'signature'])
qs = urllib.urlencode(data)
return hmac.new(str(signkey), qs, hashlib.sha1).hexdigest()
def apicall(url, signkey, **params):
params['signature'] = get_signature(params, signkey)
data = sorted([(k,v) for k,v in params.iteritems()])
body = urllib.urlencode(data)
urllib2.urlopen(url, body)
<commit_msg>Make get_signature support unicode characters by encoding to utf-8 instead of ascii.<commit_after>
|
import hashlib
import hmac
import urllib, urllib2
KEY_STATUSES = (
('U', 'Unactivated'),
('A', 'Active'),
('S', 'Suspended')
)
UNPUBLISHED, PUBLISHED, NEEDS_UPDATE = range(3)
PUB_STATUSES = (
(UNPUBLISHED, 'Unpublished'),
(PUBLISHED, 'Published'),
(NEEDS_UPDATE, 'Needs Update'),
)
def get_signature(params, signkey):
# sorted k,v pairs of everything but signature
data = sorted([(k,v.encode('utf-8')) for k,v in params.iteritems() if k != 'signature'])
qs = urllib.urlencode(data)
return hmac.new(str(signkey), qs, hashlib.sha1).hexdigest()
def apicall(url, signkey, **params):
params['signature'] = get_signature(params, signkey)
data = sorted([(k,v) for k,v in params.iteritems()])
body = urllib.urlencode(data)
urllib2.urlopen(url, body)
|
import hashlib
import hmac
import urllib, urllib2
KEY_STATUSES = (
('U', 'Unactivated'),
('A', 'Active'),
('S', 'Suspended')
)
UNPUBLISHED, PUBLISHED, NEEDS_UPDATE = range(3)
PUB_STATUSES = (
(UNPUBLISHED, 'Unpublished'),
(PUBLISHED, 'Published'),
(NEEDS_UPDATE, 'Needs Update'),
)
def get_signature(params, signkey):
# sorted k,v pairs of everything but signature
data = sorted([(k,v) for k,v in params.iteritems() if k != 'signature'])
qs = urllib.urlencode(data)
return hmac.new(str(signkey), qs, hashlib.sha1).hexdigest()
def apicall(url, signkey, **params):
params['signature'] = get_signature(params, signkey)
data = sorted([(k,v) for k,v in params.iteritems()])
body = urllib.urlencode(data)
urllib2.urlopen(url, body)
Make get_signature support unicode characters by encoding to utf-8 instead of ascii.import hashlib
import hmac
import urllib, urllib2
KEY_STATUSES = (
('U', 'Unactivated'),
('A', 'Active'),
('S', 'Suspended')
)
UNPUBLISHED, PUBLISHED, NEEDS_UPDATE = range(3)
PUB_STATUSES = (
(UNPUBLISHED, 'Unpublished'),
(PUBLISHED, 'Published'),
(NEEDS_UPDATE, 'Needs Update'),
)
def get_signature(params, signkey):
# sorted k,v pairs of everything but signature
data = sorted([(k,v.encode('utf-8')) for k,v in params.iteritems() if k != 'signature'])
qs = urllib.urlencode(data)
return hmac.new(str(signkey), qs, hashlib.sha1).hexdigest()
def apicall(url, signkey, **params):
params['signature'] = get_signature(params, signkey)
data = sorted([(k,v) for k,v in params.iteritems()])
body = urllib.urlencode(data)
urllib2.urlopen(url, body)
|
<commit_before>import hashlib
import hmac
import urllib, urllib2
KEY_STATUSES = (
('U', 'Unactivated'),
('A', 'Active'),
('S', 'Suspended')
)
UNPUBLISHED, PUBLISHED, NEEDS_UPDATE = range(3)
PUB_STATUSES = (
(UNPUBLISHED, 'Unpublished'),
(PUBLISHED, 'Published'),
(NEEDS_UPDATE, 'Needs Update'),
)
def get_signature(params, signkey):
# sorted k,v pairs of everything but signature
data = sorted([(k,v) for k,v in params.iteritems() if k != 'signature'])
qs = urllib.urlencode(data)
return hmac.new(str(signkey), qs, hashlib.sha1).hexdigest()
def apicall(url, signkey, **params):
params['signature'] = get_signature(params, signkey)
data = sorted([(k,v) for k,v in params.iteritems()])
body = urllib.urlencode(data)
urllib2.urlopen(url, body)
<commit_msg>Make get_signature support unicode characters by encoding to utf-8 instead of ascii.<commit_after>import hashlib
import hmac
import urllib, urllib2
KEY_STATUSES = (
('U', 'Unactivated'),
('A', 'Active'),
('S', 'Suspended')
)
UNPUBLISHED, PUBLISHED, NEEDS_UPDATE = range(3)
PUB_STATUSES = (
(UNPUBLISHED, 'Unpublished'),
(PUBLISHED, 'Published'),
(NEEDS_UPDATE, 'Needs Update'),
)
def get_signature(params, signkey):
# sorted k,v pairs of everything but signature
data = sorted([(k,v.encode('utf-8')) for k,v in params.iteritems() if k != 'signature'])
qs = urllib.urlencode(data)
return hmac.new(str(signkey), qs, hashlib.sha1).hexdigest()
def apicall(url, signkey, **params):
params['signature'] = get_signature(params, signkey)
data = sorted([(k,v) for k,v in params.iteritems()])
body = urllib.urlencode(data)
urllib2.urlopen(url, body)
|
f4ea4ac658a5120bacf80e3c6ca86cf4afa794e0
|
src/encoded/commands/es_index_data.py
|
src/encoded/commands/es_index_data.py
|
from pyramid.paster import get_app
import logging
from webtest import TestApp
index = 'encoded'
EPILOG = __doc__
def run(app, collections=None, record=False):
environ = {
'HTTP_ACCEPT': 'application/json',
'REMOTE_USER': 'INDEXER',
}
testapp = TestApp(app, environ)
testapp.post_json('/index', {
'last_xmin': None,
'types': collections,
'recovery': True
}
)
def main():
''' Indexes app data loaded to elasticsearch '''
import argparse
parser = argparse.ArgumentParser(
description="Index data in Elastic Search", epilog=EPILOG,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.add_argument('--item-type', action='append', help="Item type")
parser.add_argument('--record', default=False, action='store_true', help="Record the xmin in ES meta")
parser.add_argument('--app-name', help="Pyramid app name in configfile")
parser.add_argument('config_uri', help="path to configfile")
args = parser.parse_args()
logging.basicConfig()
app = get_app(args.config_uri, args.app_name)
# Loading app will have configured from config file. Reconfigure here:
logging.getLogger('encoded').setLevel(logging.DEBUG)
return run(app, args.item_type, args.record)
if __name__ == '__main__':
main()
|
from pyramid.paster import get_app
import logging
from webtest import TestApp
index = 'encoded'
EPILOG = __doc__
def run(app, collections=None, record=False):
environ = {
'HTTP_ACCEPT': 'application/json',
'REMOTE_USER': 'INDEXER',
}
testapp = TestApp(app, environ)
testapp.post_json('/index', {
'last_xmin': None,
'types': collections,
'recovery': True
}
)
def main():
''' Indexes app data loaded to elasticsearch '''
import argparse
parser = argparse.ArgumentParser(
description="Index data in Elastic Search", epilog=EPILOG,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.add_argument('--item-type', action='append', help="Item type")
parser.add_argument('--record', default=False, action='store_true', help="Record the xmin in ES meta")
parser.add_argument('--app-name', help="Pyramid app name in configfile")
parser.add_argument('config_uri', help="path to configfile")
args = parser.parse_args()
logging.basicConfig()
options = {
'embed_cache.capacity': '5000',
'indexer': 'true',
}
app = get_app(args.config_uri, args.app_name, options)
# Loading app will have configured from config file. Reconfigure here:
logging.getLogger('encoded').setLevel(logging.DEBUG)
return run(app, args.item_type, args.record)
if __name__ == '__main__':
main()
|
Make es-index-data command use multiprocess indexing.
|
Make es-index-data command use multiprocess indexing.
|
Python
|
mit
|
ENCODE-DCC/snovault,ENCODE-DCC/snovault,hms-dbmi/fourfront,T2DREAM/t2dream-portal,T2DREAM/t2dream-portal,4dn-dcic/fourfront,ENCODE-DCC/snovault,ENCODE-DCC/snovault,T2DREAM/t2dream-portal,hms-dbmi/fourfront,4dn-dcic/fourfront,ENCODE-DCC/snovault,hms-dbmi/fourfront,ENCODE-DCC/encoded,ENCODE-DCC/encoded,4dn-dcic/fourfront,T2DREAM/t2dream-portal,4dn-dcic/fourfront,ENCODE-DCC/encoded,ENCODE-DCC/encoded,hms-dbmi/fourfront,hms-dbmi/fourfront
|
from pyramid.paster import get_app
import logging
from webtest import TestApp
index = 'encoded'
EPILOG = __doc__
def run(app, collections=None, record=False):
environ = {
'HTTP_ACCEPT': 'application/json',
'REMOTE_USER': 'INDEXER',
}
testapp = TestApp(app, environ)
testapp.post_json('/index', {
'last_xmin': None,
'types': collections,
'recovery': True
}
)
def main():
''' Indexes app data loaded to elasticsearch '''
import argparse
parser = argparse.ArgumentParser(
description="Index data in Elastic Search", epilog=EPILOG,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.add_argument('--item-type', action='append', help="Item type")
parser.add_argument('--record', default=False, action='store_true', help="Record the xmin in ES meta")
parser.add_argument('--app-name', help="Pyramid app name in configfile")
parser.add_argument('config_uri', help="path to configfile")
args = parser.parse_args()
logging.basicConfig()
app = get_app(args.config_uri, args.app_name)
# Loading app will have configured from config file. Reconfigure here:
logging.getLogger('encoded').setLevel(logging.DEBUG)
return run(app, args.item_type, args.record)
if __name__ == '__main__':
main()
Make es-index-data command use multiprocess indexing.
|
from pyramid.paster import get_app
import logging
from webtest import TestApp
index = 'encoded'
EPILOG = __doc__
def run(app, collections=None, record=False):
environ = {
'HTTP_ACCEPT': 'application/json',
'REMOTE_USER': 'INDEXER',
}
testapp = TestApp(app, environ)
testapp.post_json('/index', {
'last_xmin': None,
'types': collections,
'recovery': True
}
)
def main():
''' Indexes app data loaded to elasticsearch '''
import argparse
parser = argparse.ArgumentParser(
description="Index data in Elastic Search", epilog=EPILOG,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.add_argument('--item-type', action='append', help="Item type")
parser.add_argument('--record', default=False, action='store_true', help="Record the xmin in ES meta")
parser.add_argument('--app-name', help="Pyramid app name in configfile")
parser.add_argument('config_uri', help="path to configfile")
args = parser.parse_args()
logging.basicConfig()
options = {
'embed_cache.capacity': '5000',
'indexer': 'true',
}
app = get_app(args.config_uri, args.app_name, options)
# Loading app will have configured from config file. Reconfigure here:
logging.getLogger('encoded').setLevel(logging.DEBUG)
return run(app, args.item_type, args.record)
if __name__ == '__main__':
main()
|
<commit_before>from pyramid.paster import get_app
import logging
from webtest import TestApp
index = 'encoded'
EPILOG = __doc__
def run(app, collections=None, record=False):
environ = {
'HTTP_ACCEPT': 'application/json',
'REMOTE_USER': 'INDEXER',
}
testapp = TestApp(app, environ)
testapp.post_json('/index', {
'last_xmin': None,
'types': collections,
'recovery': True
}
)
def main():
''' Indexes app data loaded to elasticsearch '''
import argparse
parser = argparse.ArgumentParser(
description="Index data in Elastic Search", epilog=EPILOG,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.add_argument('--item-type', action='append', help="Item type")
parser.add_argument('--record', default=False, action='store_true', help="Record the xmin in ES meta")
parser.add_argument('--app-name', help="Pyramid app name in configfile")
parser.add_argument('config_uri', help="path to configfile")
args = parser.parse_args()
logging.basicConfig()
app = get_app(args.config_uri, args.app_name)
# Loading app will have configured from config file. Reconfigure here:
logging.getLogger('encoded').setLevel(logging.DEBUG)
return run(app, args.item_type, args.record)
if __name__ == '__main__':
main()
<commit_msg>Make es-index-data command use multiprocess indexing.<commit_after>
|
from pyramid.paster import get_app
import logging
from webtest import TestApp
index = 'encoded'
EPILOG = __doc__
def run(app, collections=None, record=False):
environ = {
'HTTP_ACCEPT': 'application/json',
'REMOTE_USER': 'INDEXER',
}
testapp = TestApp(app, environ)
testapp.post_json('/index', {
'last_xmin': None,
'types': collections,
'recovery': True
}
)
def main():
''' Indexes app data loaded to elasticsearch '''
import argparse
parser = argparse.ArgumentParser(
description="Index data in Elastic Search", epilog=EPILOG,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.add_argument('--item-type', action='append', help="Item type")
parser.add_argument('--record', default=False, action='store_true', help="Record the xmin in ES meta")
parser.add_argument('--app-name', help="Pyramid app name in configfile")
parser.add_argument('config_uri', help="path to configfile")
args = parser.parse_args()
logging.basicConfig()
options = {
'embed_cache.capacity': '5000',
'indexer': 'true',
}
app = get_app(args.config_uri, args.app_name, options)
# Loading app will have configured from config file. Reconfigure here:
logging.getLogger('encoded').setLevel(logging.DEBUG)
return run(app, args.item_type, args.record)
if __name__ == '__main__':
main()
|
from pyramid.paster import get_app
import logging
from webtest import TestApp
index = 'encoded'
EPILOG = __doc__
def run(app, collections=None, record=False):
environ = {
'HTTP_ACCEPT': 'application/json',
'REMOTE_USER': 'INDEXER',
}
testapp = TestApp(app, environ)
testapp.post_json('/index', {
'last_xmin': None,
'types': collections,
'recovery': True
}
)
def main():
''' Indexes app data loaded to elasticsearch '''
import argparse
parser = argparse.ArgumentParser(
description="Index data in Elastic Search", epilog=EPILOG,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.add_argument('--item-type', action='append', help="Item type")
parser.add_argument('--record', default=False, action='store_true', help="Record the xmin in ES meta")
parser.add_argument('--app-name', help="Pyramid app name in configfile")
parser.add_argument('config_uri', help="path to configfile")
args = parser.parse_args()
logging.basicConfig()
app = get_app(args.config_uri, args.app_name)
# Loading app will have configured from config file. Reconfigure here:
logging.getLogger('encoded').setLevel(logging.DEBUG)
return run(app, args.item_type, args.record)
if __name__ == '__main__':
main()
Make es-index-data command use multiprocess indexing.from pyramid.paster import get_app
import logging
from webtest import TestApp
index = 'encoded'
EPILOG = __doc__
def run(app, collections=None, record=False):
environ = {
'HTTP_ACCEPT': 'application/json',
'REMOTE_USER': 'INDEXER',
}
testapp = TestApp(app, environ)
testapp.post_json('/index', {
'last_xmin': None,
'types': collections,
'recovery': True
}
)
def main():
''' Indexes app data loaded to elasticsearch '''
import argparse
parser = argparse.ArgumentParser(
description="Index data in Elastic Search", epilog=EPILOG,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.add_argument('--item-type', action='append', help="Item type")
parser.add_argument('--record', default=False, action='store_true', help="Record the xmin in ES meta")
parser.add_argument('--app-name', help="Pyramid app name in configfile")
parser.add_argument('config_uri', help="path to configfile")
args = parser.parse_args()
logging.basicConfig()
options = {
'embed_cache.capacity': '5000',
'indexer': 'true',
}
app = get_app(args.config_uri, args.app_name, options)
# Loading app will have configured from config file. Reconfigure here:
logging.getLogger('encoded').setLevel(logging.DEBUG)
return run(app, args.item_type, args.record)
if __name__ == '__main__':
main()
|
<commit_before>from pyramid.paster import get_app
import logging
from webtest import TestApp
index = 'encoded'
EPILOG = __doc__
def run(app, collections=None, record=False):
environ = {
'HTTP_ACCEPT': 'application/json',
'REMOTE_USER': 'INDEXER',
}
testapp = TestApp(app, environ)
testapp.post_json('/index', {
'last_xmin': None,
'types': collections,
'recovery': True
}
)
def main():
''' Indexes app data loaded to elasticsearch '''
import argparse
parser = argparse.ArgumentParser(
description="Index data in Elastic Search", epilog=EPILOG,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.add_argument('--item-type', action='append', help="Item type")
parser.add_argument('--record', default=False, action='store_true', help="Record the xmin in ES meta")
parser.add_argument('--app-name', help="Pyramid app name in configfile")
parser.add_argument('config_uri', help="path to configfile")
args = parser.parse_args()
logging.basicConfig()
app = get_app(args.config_uri, args.app_name)
# Loading app will have configured from config file. Reconfigure here:
logging.getLogger('encoded').setLevel(logging.DEBUG)
return run(app, args.item_type, args.record)
if __name__ == '__main__':
main()
<commit_msg>Make es-index-data command use multiprocess indexing.<commit_after>from pyramid.paster import get_app
import logging
from webtest import TestApp
index = 'encoded'
EPILOG = __doc__
def run(app, collections=None, record=False):
environ = {
'HTTP_ACCEPT': 'application/json',
'REMOTE_USER': 'INDEXER',
}
testapp = TestApp(app, environ)
testapp.post_json('/index', {
'last_xmin': None,
'types': collections,
'recovery': True
}
)
def main():
''' Indexes app data loaded to elasticsearch '''
import argparse
parser = argparse.ArgumentParser(
description="Index data in Elastic Search", epilog=EPILOG,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.add_argument('--item-type', action='append', help="Item type")
parser.add_argument('--record', default=False, action='store_true', help="Record the xmin in ES meta")
parser.add_argument('--app-name', help="Pyramid app name in configfile")
parser.add_argument('config_uri', help="path to configfile")
args = parser.parse_args()
logging.basicConfig()
options = {
'embed_cache.capacity': '5000',
'indexer': 'true',
}
app = get_app(args.config_uri, args.app_name, options)
# Loading app will have configured from config file. Reconfigure here:
logging.getLogger('encoded').setLevel(logging.DEBUG)
return run(app, args.item_type, args.record)
if __name__ == '__main__':
main()
|
f180f75d97439b10e2325c1e85b88c0ecfb03e73
|
bmi_tester/tests/__init__.py
|
bmi_tester/tests/__init__.py
|
# Both of these variables should be overriden to test a particular
# BMI class
Bmi = None
INPUT_FILE = None
|
# Both of these variables should be overriden to test a particular
# BMI class
Bmi = None
INPUT_FILE = None
BMI_VERSION_STRING = '1.1'
|
Set default for BMI_VERSION_STRING to 1.1.
|
Set default for BMI_VERSION_STRING to 1.1.
|
Python
|
mit
|
csdms/bmi-tester
|
# Both of these variables should be overriden to test a particular
# BMI class
Bmi = None
INPUT_FILE = None
Set default for BMI_VERSION_STRING to 1.1.
|
# Both of these variables should be overriden to test a particular
# BMI class
Bmi = None
INPUT_FILE = None
BMI_VERSION_STRING = '1.1'
|
<commit_before># Both of these variables should be overriden to test a particular
# BMI class
Bmi = None
INPUT_FILE = None
<commit_msg>Set default for BMI_VERSION_STRING to 1.1.<commit_after>
|
# Both of these variables should be overriden to test a particular
# BMI class
Bmi = None
INPUT_FILE = None
BMI_VERSION_STRING = '1.1'
|
# Both of these variables should be overriden to test a particular
# BMI class
Bmi = None
INPUT_FILE = None
Set default for BMI_VERSION_STRING to 1.1.# Both of these variables should be overriden to test a particular
# BMI class
Bmi = None
INPUT_FILE = None
BMI_VERSION_STRING = '1.1'
|
<commit_before># Both of these variables should be overriden to test a particular
# BMI class
Bmi = None
INPUT_FILE = None
<commit_msg>Set default for BMI_VERSION_STRING to 1.1.<commit_after># Both of these variables should be overriden to test a particular
# BMI class
Bmi = None
INPUT_FILE = None
BMI_VERSION_STRING = '1.1'
|
f81e409ab1666a8a3bb1ff1806d256644712382f
|
structures/__init__.py
|
structures/__init__.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from .base import DBStructure, _Generated
from .files.main import *
from .files.custom import *
from ..locales import L
class StructureError(Exception):
pass
class StructureLoader():
wowfiles = None
@classmethod
def setup(cls):
if cls.wowfiles is None:
cls.wowfiles = {}
for name in globals():
try:
if not issubclass(globals()[name], DBStructure):
continue
except TypeError:
continue
cls.wowfiles[name.lower()] = globals()[name]
@classmethod
def getstructure(cls, name, build=0, parent=None):
if name in cls.wowfiles:
return cls.wowfiles[name](build, parent)
raise StructureError(L["STRUCTURE_NOT_FOUND"] % name)
StructureLoader.setup()
getstructure = StructureLoader.getstructure
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from .base import DBStructure, _Generated
from .files.main import *
from .files.custom import *
from ..locales import L
class UnknownStructure(Exception):
pass
class StructureLoader():
wowfiles = None
@classmethod
def setup(cls):
if cls.wowfiles is None:
cls.wowfiles = {}
for name in globals():
try:
if not issubclass(globals()[name], DBStructure):
continue
except TypeError:
continue
cls.wowfiles[name.lower()] = globals()[name]
@classmethod
def getstructure(cls, name, build=0, parent=None):
if name in cls.wowfiles:
return cls.wowfiles[name](build, parent)
raise UnknownStructure(L["STRUCTURE_NOT_FOUND"] % name)
StructureLoader.setup()
getstructure = StructureLoader.getstructure
|
Raise more appropriate UnknownStructure exception rather than StructureError if a structure is not found.
|
structures: Raise more appropriate UnknownStructure exception rather than StructureError if a structure is not found.
|
Python
|
cc0-1.0
|
jleclanche/pywow,jleclanche/pywow,jleclanche/pywow,jleclanche/pywow,jleclanche/pywow,jleclanche/pywow
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from .base import DBStructure, _Generated
from .files.main import *
from .files.custom import *
from ..locales import L
class StructureError(Exception):
pass
class StructureLoader():
wowfiles = None
@classmethod
def setup(cls):
if cls.wowfiles is None:
cls.wowfiles = {}
for name in globals():
try:
if not issubclass(globals()[name], DBStructure):
continue
except TypeError:
continue
cls.wowfiles[name.lower()] = globals()[name]
@classmethod
def getstructure(cls, name, build=0, parent=None):
if name in cls.wowfiles:
return cls.wowfiles[name](build, parent)
raise StructureError(L["STRUCTURE_NOT_FOUND"] % name)
StructureLoader.setup()
getstructure = StructureLoader.getstructure
structures: Raise more appropriate UnknownStructure exception rather than StructureError if a structure is not found.
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from .base import DBStructure, _Generated
from .files.main import *
from .files.custom import *
from ..locales import L
class UnknownStructure(Exception):
pass
class StructureLoader():
wowfiles = None
@classmethod
def setup(cls):
if cls.wowfiles is None:
cls.wowfiles = {}
for name in globals():
try:
if not issubclass(globals()[name], DBStructure):
continue
except TypeError:
continue
cls.wowfiles[name.lower()] = globals()[name]
@classmethod
def getstructure(cls, name, build=0, parent=None):
if name in cls.wowfiles:
return cls.wowfiles[name](build, parent)
raise UnknownStructure(L["STRUCTURE_NOT_FOUND"] % name)
StructureLoader.setup()
getstructure = StructureLoader.getstructure
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
from .base import DBStructure, _Generated
from .files.main import *
from .files.custom import *
from ..locales import L
class StructureError(Exception):
pass
class StructureLoader():
wowfiles = None
@classmethod
def setup(cls):
if cls.wowfiles is None:
cls.wowfiles = {}
for name in globals():
try:
if not issubclass(globals()[name], DBStructure):
continue
except TypeError:
continue
cls.wowfiles[name.lower()] = globals()[name]
@classmethod
def getstructure(cls, name, build=0, parent=None):
if name in cls.wowfiles:
return cls.wowfiles[name](build, parent)
raise StructureError(L["STRUCTURE_NOT_FOUND"] % name)
StructureLoader.setup()
getstructure = StructureLoader.getstructure
<commit_msg>structures: Raise more appropriate UnknownStructure exception rather than StructureError if a structure is not found.<commit_after>
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from .base import DBStructure, _Generated
from .files.main import *
from .files.custom import *
from ..locales import L
class UnknownStructure(Exception):
pass
class StructureLoader():
wowfiles = None
@classmethod
def setup(cls):
if cls.wowfiles is None:
cls.wowfiles = {}
for name in globals():
try:
if not issubclass(globals()[name], DBStructure):
continue
except TypeError:
continue
cls.wowfiles[name.lower()] = globals()[name]
@classmethod
def getstructure(cls, name, build=0, parent=None):
if name in cls.wowfiles:
return cls.wowfiles[name](build, parent)
raise UnknownStructure(L["STRUCTURE_NOT_FOUND"] % name)
StructureLoader.setup()
getstructure = StructureLoader.getstructure
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from .base import DBStructure, _Generated
from .files.main import *
from .files.custom import *
from ..locales import L
class StructureError(Exception):
pass
class StructureLoader():
wowfiles = None
@classmethod
def setup(cls):
if cls.wowfiles is None:
cls.wowfiles = {}
for name in globals():
try:
if not issubclass(globals()[name], DBStructure):
continue
except TypeError:
continue
cls.wowfiles[name.lower()] = globals()[name]
@classmethod
def getstructure(cls, name, build=0, parent=None):
if name in cls.wowfiles:
return cls.wowfiles[name](build, parent)
raise StructureError(L["STRUCTURE_NOT_FOUND"] % name)
StructureLoader.setup()
getstructure = StructureLoader.getstructure
structures: Raise more appropriate UnknownStructure exception rather than StructureError if a structure is not found.#!/usr/bin/python
# -*- coding: utf-8 -*-
from .base import DBStructure, _Generated
from .files.main import *
from .files.custom import *
from ..locales import L
class UnknownStructure(Exception):
pass
class StructureLoader():
wowfiles = None
@classmethod
def setup(cls):
if cls.wowfiles is None:
cls.wowfiles = {}
for name in globals():
try:
if not issubclass(globals()[name], DBStructure):
continue
except TypeError:
continue
cls.wowfiles[name.lower()] = globals()[name]
@classmethod
def getstructure(cls, name, build=0, parent=None):
if name in cls.wowfiles:
return cls.wowfiles[name](build, parent)
raise UnknownStructure(L["STRUCTURE_NOT_FOUND"] % name)
StructureLoader.setup()
getstructure = StructureLoader.getstructure
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
from .base import DBStructure, _Generated
from .files.main import *
from .files.custom import *
from ..locales import L
class StructureError(Exception):
pass
class StructureLoader():
wowfiles = None
@classmethod
def setup(cls):
if cls.wowfiles is None:
cls.wowfiles = {}
for name in globals():
try:
if not issubclass(globals()[name], DBStructure):
continue
except TypeError:
continue
cls.wowfiles[name.lower()] = globals()[name]
@classmethod
def getstructure(cls, name, build=0, parent=None):
if name in cls.wowfiles:
return cls.wowfiles[name](build, parent)
raise StructureError(L["STRUCTURE_NOT_FOUND"] % name)
StructureLoader.setup()
getstructure = StructureLoader.getstructure
<commit_msg>structures: Raise more appropriate UnknownStructure exception rather than StructureError if a structure is not found.<commit_after>#!/usr/bin/python
# -*- coding: utf-8 -*-
from .base import DBStructure, _Generated
from .files.main import *
from .files.custom import *
from ..locales import L
class UnknownStructure(Exception):
pass
class StructureLoader():
wowfiles = None
@classmethod
def setup(cls):
if cls.wowfiles is None:
cls.wowfiles = {}
for name in globals():
try:
if not issubclass(globals()[name], DBStructure):
continue
except TypeError:
continue
cls.wowfiles[name.lower()] = globals()[name]
@classmethod
def getstructure(cls, name, build=0, parent=None):
if name in cls.wowfiles:
return cls.wowfiles[name](build, parent)
raise UnknownStructure(L["STRUCTURE_NOT_FOUND"] % name)
StructureLoader.setup()
getstructure = StructureLoader.getstructure
|
f8d551627781ea9568b97a426135bce74adf3adf
|
utils/helpers.py
|
utils/helpers.py
|
import boto3
class Helpers(object):
"""A container class for convenience functions."""
_aws_account_id = None
@classmethod
def aws_account_id(cls):
"""Query for the current account ID by inspecting the default security group."""
if cls._aws_account_id is None:
cls._aws_account_id = int(boto3.client('ec2').describe_security_groups(
GroupNames=['default'])['SecurityGroups'][0]['OwnerId'])
return cls._aws_account_id
|
import boto3
class Helpers(object):
"""A container class for convenience functions."""
_aws_account_id = None
@classmethod
def aws_account_id(cls):
"""Query for the current account ID by inspecting the caller identity."""
if cls._aws_account_id is None:
caller_data = boto3.client('sts').get_caller_identity()
cls._aws_account_id = caller_data['Arn'].split(':')[4]
return cls._aws_account_id
|
Use get_caller_identity instead of default SG to determine account id
|
Use get_caller_identity instead of default SG to determine account id
|
Python
|
mit
|
dliggat/local-lambda-toolkit
|
import boto3
class Helpers(object):
"""A container class for convenience functions."""
_aws_account_id = None
@classmethod
def aws_account_id(cls):
"""Query for the current account ID by inspecting the default security group."""
if cls._aws_account_id is None:
cls._aws_account_id = int(boto3.client('ec2').describe_security_groups(
GroupNames=['default'])['SecurityGroups'][0]['OwnerId'])
return cls._aws_account_id
Use get_caller_identity instead of default SG to determine account id
|
import boto3
class Helpers(object):
"""A container class for convenience functions."""
_aws_account_id = None
@classmethod
def aws_account_id(cls):
"""Query for the current account ID by inspecting the caller identity."""
if cls._aws_account_id is None:
caller_data = boto3.client('sts').get_caller_identity()
cls._aws_account_id = caller_data['Arn'].split(':')[4]
return cls._aws_account_id
|
<commit_before>import boto3
class Helpers(object):
"""A container class for convenience functions."""
_aws_account_id = None
@classmethod
def aws_account_id(cls):
"""Query for the current account ID by inspecting the default security group."""
if cls._aws_account_id is None:
cls._aws_account_id = int(boto3.client('ec2').describe_security_groups(
GroupNames=['default'])['SecurityGroups'][0]['OwnerId'])
return cls._aws_account_id
<commit_msg>Use get_caller_identity instead of default SG to determine account id<commit_after>
|
import boto3
class Helpers(object):
"""A container class for convenience functions."""
_aws_account_id = None
@classmethod
def aws_account_id(cls):
"""Query for the current account ID by inspecting the caller identity."""
if cls._aws_account_id is None:
caller_data = boto3.client('sts').get_caller_identity()
cls._aws_account_id = caller_data['Arn'].split(':')[4]
return cls._aws_account_id
|
import boto3
class Helpers(object):
"""A container class for convenience functions."""
_aws_account_id = None
@classmethod
def aws_account_id(cls):
"""Query for the current account ID by inspecting the default security group."""
if cls._aws_account_id is None:
cls._aws_account_id = int(boto3.client('ec2').describe_security_groups(
GroupNames=['default'])['SecurityGroups'][0]['OwnerId'])
return cls._aws_account_id
Use get_caller_identity instead of default SG to determine account idimport boto3
class Helpers(object):
"""A container class for convenience functions."""
_aws_account_id = None
@classmethod
def aws_account_id(cls):
"""Query for the current account ID by inspecting the caller identity."""
if cls._aws_account_id is None:
caller_data = boto3.client('sts').get_caller_identity()
cls._aws_account_id = caller_data['Arn'].split(':')[4]
return cls._aws_account_id
|
<commit_before>import boto3
class Helpers(object):
"""A container class for convenience functions."""
_aws_account_id = None
@classmethod
def aws_account_id(cls):
"""Query for the current account ID by inspecting the default security group."""
if cls._aws_account_id is None:
cls._aws_account_id = int(boto3.client('ec2').describe_security_groups(
GroupNames=['default'])['SecurityGroups'][0]['OwnerId'])
return cls._aws_account_id
<commit_msg>Use get_caller_identity instead of default SG to determine account id<commit_after>import boto3
class Helpers(object):
"""A container class for convenience functions."""
_aws_account_id = None
@classmethod
def aws_account_id(cls):
"""Query for the current account ID by inspecting the caller identity."""
if cls._aws_account_id is None:
caller_data = boto3.client('sts').get_caller_identity()
cls._aws_account_id = caller_data['Arn'].split(':')[4]
return cls._aws_account_id
|
b5ee460f06fbbf12fcdf38a873a317854533c7ca
|
tools/vardict/split.py
|
tools/vardict/split.py
|
import sys
fai = sys.argv[1]
chunk_size = int(sys.argv[2])
overlap = 150 # Base pairs
with open(fai, 'r') as infile:
for line in infile:
name = line.split('\t')[0]
stop = int(line.split('\t')[1])
start = 1
while start < stop:
start = max(1, start - overlap)
print('\t'.join([name, str(start),
str(min(start + chunk_size, stop))])
start += chunk_size
|
import sys
fai = sys.argv[1]
chunk_size = int(sys.argv[2])
overlap = 150 # Base pairs
with open(fai, 'r') as infile:
for line in infile:
name = line.split('\t')[0]
stop = int(line.split('\t')[1])
start = 1
while start < stop:
start = max(1, start - overlap)
print('\t'.join([name, str(start),
str(min(start + chunk_size, stop))]))
start += chunk_size
|
Fix syntax error, missing ')'
|
Fix syntax error, missing ')'
|
Python
|
mit
|
blankenberg/tools-iuc,nsoranzo/tools-iuc,mvdbeek/tools-iuc,jj-umn/tools-iuc,gregvonkuster/tools-iuc,nekrut/tools-iuc,natefoo/tools-iuc,loraine-gueguen/tools-iuc,gregvonkuster/tools-iuc,natefoo/tools-iuc,Delphine-L/tools-iuc,galaxyproject/tools-iuc,mvdbeek/tools-iuc,loraine-gueguen/tools-iuc,jj-umn/tools-iuc,nekrut/tools-iuc,loraine-gueguen/tools-iuc,Delphine-L/tools-iuc,Delphine-L/tools-iuc,nsoranzo/tools-iuc,ieguinoa/tools-iuc,mvdbeek/tools-iuc,blankenberg/tools-iuc,galaxyproject/tools-iuc,nekrut/tools-iuc,nsoranzo/tools-iuc,nekrut/tools-iuc,pavanvidem/tools-iuc,loraine-gueguen/tools-iuc,natefoo/tools-iuc,pavanvidem/tools-iuc,galaxyproject/tools-iuc,galaxyproject/tools-iuc,Delphine-L/tools-iuc,nekrut/tools-iuc,blankenberg/tools-iuc,pavanvidem/tools-iuc,nsoranzo/tools-iuc,pavanvidem/tools-iuc,natefoo/tools-iuc,Delphine-L/tools-iuc,loraine-gueguen/tools-iuc,gregvonkuster/tools-iuc,nekrut/tools-iuc,ieguinoa/tools-iuc,pavanvidem/tools-iuc,nsoranzo/tools-iuc,gregvonkuster/tools-iuc,blankenberg/tools-iuc,Delphine-L/tools-iuc,Delphine-L/tools-iuc,pavanvidem/tools-iuc,mvdbeek/tools-iuc,gregvonkuster/tools-iuc,ieguinoa/tools-iuc,jj-umn/tools-iuc,galaxyproject/tools-iuc,natefoo/tools-iuc,ieguinoa/tools-iuc,natefoo/tools-iuc,nekrut/tools-iuc,jj-umn/tools-iuc,gregvonkuster/tools-iuc,loraine-gueguen/tools-iuc,jj-umn/tools-iuc,nsoranzo/tools-iuc,jj-umn/tools-iuc,mvdbeek/tools-iuc,ieguinoa/tools-iuc,ieguinoa/tools-iuc,blankenberg/tools-iuc,pavanvidem/tools-iuc,nsoranzo/tools-iuc,gregvonkuster/tools-iuc,loraine-gueguen/tools-iuc,natefoo/tools-iuc,mvdbeek/tools-iuc,galaxyproject/tools-iuc,blankenberg/tools-iuc,galaxyproject/tools-iuc,jj-umn/tools-iuc,blankenberg/tools-iuc,mvdbeek/tools-iuc,ieguinoa/tools-iuc
|
import sys
fai = sys.argv[1]
chunk_size = int(sys.argv[2])
overlap = 150 # Base pairs
with open(fai, 'r') as infile:
for line in infile:
name = line.split('\t')[0]
stop = int(line.split('\t')[1])
start = 1
while start < stop:
start = max(1, start - overlap)
print('\t'.join([name, str(start),
str(min(start + chunk_size, stop))])
start += chunk_size
Fix syntax error, missing ')'
|
import sys
fai = sys.argv[1]
chunk_size = int(sys.argv[2])
overlap = 150 # Base pairs
with open(fai, 'r') as infile:
for line in infile:
name = line.split('\t')[0]
stop = int(line.split('\t')[1])
start = 1
while start < stop:
start = max(1, start - overlap)
print('\t'.join([name, str(start),
str(min(start + chunk_size, stop))]))
start += chunk_size
|
<commit_before>import sys
fai = sys.argv[1]
chunk_size = int(sys.argv[2])
overlap = 150 # Base pairs
with open(fai, 'r') as infile:
for line in infile:
name = line.split('\t')[0]
stop = int(line.split('\t')[1])
start = 1
while start < stop:
start = max(1, start - overlap)
print('\t'.join([name, str(start),
str(min(start + chunk_size, stop))])
start += chunk_size
<commit_msg>Fix syntax error, missing ')'<commit_after>
|
import sys
fai = sys.argv[1]
chunk_size = int(sys.argv[2])
overlap = 150 # Base pairs
with open(fai, 'r') as infile:
for line in infile:
name = line.split('\t')[0]
stop = int(line.split('\t')[1])
start = 1
while start < stop:
start = max(1, start - overlap)
print('\t'.join([name, str(start),
str(min(start + chunk_size, stop))]))
start += chunk_size
|
import sys
fai = sys.argv[1]
chunk_size = int(sys.argv[2])
overlap = 150 # Base pairs
with open(fai, 'r') as infile:
for line in infile:
name = line.split('\t')[0]
stop = int(line.split('\t')[1])
start = 1
while start < stop:
start = max(1, start - overlap)
print('\t'.join([name, str(start),
str(min(start + chunk_size, stop))])
start += chunk_size
Fix syntax error, missing ')'import sys
fai = sys.argv[1]
chunk_size = int(sys.argv[2])
overlap = 150 # Base pairs
with open(fai, 'r') as infile:
for line in infile:
name = line.split('\t')[0]
stop = int(line.split('\t')[1])
start = 1
while start < stop:
start = max(1, start - overlap)
print('\t'.join([name, str(start),
str(min(start + chunk_size, stop))]))
start += chunk_size
|
<commit_before>import sys
fai = sys.argv[1]
chunk_size = int(sys.argv[2])
overlap = 150 # Base pairs
with open(fai, 'r') as infile:
for line in infile:
name = line.split('\t')[0]
stop = int(line.split('\t')[1])
start = 1
while start < stop:
start = max(1, start - overlap)
print('\t'.join([name, str(start),
str(min(start + chunk_size, stop))])
start += chunk_size
<commit_msg>Fix syntax error, missing ')'<commit_after>import sys
fai = sys.argv[1]
chunk_size = int(sys.argv[2])
overlap = 150 # Base pairs
with open(fai, 'r') as infile:
for line in infile:
name = line.split('\t')[0]
stop = int(line.split('\t')[1])
start = 1
while start < stop:
start = max(1, start - overlap)
print('\t'.join([name, str(start),
str(min(start + chunk_size, stop))]))
start += chunk_size
|
db2375425f9c125349f7ed5434175adb80c0ce95
|
students/utils.py
|
students/utils.py
|
from datetime import datetime
from .exceptions import ClientError
from .models import Booking, RobotTerminal
def get_robot_terminal_or_error(robot_id, user):
# Check if the user is logged in
if not user.is_authenticated():
raise ClientError("USER_HAS_TO_LOGIN")
# get the robot_terminal with the given id
try:
robot = RobotTerminal.objects.get(pk=robot_id)
except RobotTerminal.DoesNotExist:
raise ClientError("ROBOT_INVALID")
# Check permissions
now = datetime.now()
has_booking = len(Booking.objects.filter(user=user, start_time__lte=now, end_time__gte=now)) > 0
if not (user.is_staff or has_booking):
raise ClientError("ROBOT_ACCESS_DENIED")
return robot
|
from datetime import datetime
from .exceptions import ClientError
from .models import Booking, RobotTerminal
def get_robot_terminal_or_error(robot_id, user):
# Check if the user is logged in
if not user.is_authenticated():
raise ClientError("USER_HAS_TO_LOGIN")
# get the robot_terminal with the given id
try:
robot = RobotTerminal.objects.get(pk=robot_id)
except RobotTerminal.DoesNotExist:
raise ClientError("ROBOT_INVALID")
# Check permissions
now = datetime.now()
has_booking = len(Booking.objects.filter(user=user, start_time__lte=now, end_time__gte=now)) > 0
if not (user.is_staff or has_booking):
raise ClientError("ROBOT_ACCESS_DENIED")
return robot
def get_booked_robot(user):
now = datetime.now()
has_booking = len(Booking.objects.filter(user=user, start_time__lte=now, end_time__gte=now)) > 0
if has_booking or user.is_staff:
# TODO: To support multiple robots, we will need to pull in the robot_id from the booking
return RobotTerminal.objects.first()
|
Add utility function to get the robot that a given user currently has booked out (or None).
|
Add utility function to get the robot that a given user currently has booked out (or None).
|
Python
|
mit
|
muhummadPatel/raspied,muhummadPatel/raspied,muhummadPatel/raspied
|
from datetime import datetime
from .exceptions import ClientError
from .models import Booking, RobotTerminal
def get_robot_terminal_or_error(robot_id, user):
# Check if the user is logged in
if not user.is_authenticated():
raise ClientError("USER_HAS_TO_LOGIN")
# get the robot_terminal with the given id
try:
robot = RobotTerminal.objects.get(pk=robot_id)
except RobotTerminal.DoesNotExist:
raise ClientError("ROBOT_INVALID")
# Check permissions
now = datetime.now()
has_booking = len(Booking.objects.filter(user=user, start_time__lte=now, end_time__gte=now)) > 0
if not (user.is_staff or has_booking):
raise ClientError("ROBOT_ACCESS_DENIED")
return robot
Add utility function to get the robot that a given user currently has booked out (or None).
|
from datetime import datetime
from .exceptions import ClientError
from .models import Booking, RobotTerminal
def get_robot_terminal_or_error(robot_id, user):
# Check if the user is logged in
if not user.is_authenticated():
raise ClientError("USER_HAS_TO_LOGIN")
# get the robot_terminal with the given id
try:
robot = RobotTerminal.objects.get(pk=robot_id)
except RobotTerminal.DoesNotExist:
raise ClientError("ROBOT_INVALID")
# Check permissions
now = datetime.now()
has_booking = len(Booking.objects.filter(user=user, start_time__lte=now, end_time__gte=now)) > 0
if not (user.is_staff or has_booking):
raise ClientError("ROBOT_ACCESS_DENIED")
return robot
def get_booked_robot(user):
now = datetime.now()
has_booking = len(Booking.objects.filter(user=user, start_time__lte=now, end_time__gte=now)) > 0
if has_booking or user.is_staff:
# TODO: To support multiple robots, we will need to pull in the robot_id from the booking
return RobotTerminal.objects.first()
|
<commit_before>from datetime import datetime
from .exceptions import ClientError
from .models import Booking, RobotTerminal
def get_robot_terminal_or_error(robot_id, user):
# Check if the user is logged in
if not user.is_authenticated():
raise ClientError("USER_HAS_TO_LOGIN")
# get the robot_terminal with the given id
try:
robot = RobotTerminal.objects.get(pk=robot_id)
except RobotTerminal.DoesNotExist:
raise ClientError("ROBOT_INVALID")
# Check permissions
now = datetime.now()
has_booking = len(Booking.objects.filter(user=user, start_time__lte=now, end_time__gte=now)) > 0
if not (user.is_staff or has_booking):
raise ClientError("ROBOT_ACCESS_DENIED")
return robot
<commit_msg>Add utility function to get the robot that a given user currently has booked out (or None).<commit_after>
|
from datetime import datetime
from .exceptions import ClientError
from .models import Booking, RobotTerminal
def get_robot_terminal_or_error(robot_id, user):
# Check if the user is logged in
if not user.is_authenticated():
raise ClientError("USER_HAS_TO_LOGIN")
# get the robot_terminal with the given id
try:
robot = RobotTerminal.objects.get(pk=robot_id)
except RobotTerminal.DoesNotExist:
raise ClientError("ROBOT_INVALID")
# Check permissions
now = datetime.now()
has_booking = len(Booking.objects.filter(user=user, start_time__lte=now, end_time__gte=now)) > 0
if not (user.is_staff or has_booking):
raise ClientError("ROBOT_ACCESS_DENIED")
return robot
def get_booked_robot(user):
now = datetime.now()
has_booking = len(Booking.objects.filter(user=user, start_time__lte=now, end_time__gte=now)) > 0
if has_booking or user.is_staff:
# TODO: To support multiple robots, we will need to pull in the robot_id from the booking
return RobotTerminal.objects.first()
|
from datetime import datetime
from .exceptions import ClientError
from .models import Booking, RobotTerminal
def get_robot_terminal_or_error(robot_id, user):
# Check if the user is logged in
if not user.is_authenticated():
raise ClientError("USER_HAS_TO_LOGIN")
# get the robot_terminal with the given id
try:
robot = RobotTerminal.objects.get(pk=robot_id)
except RobotTerminal.DoesNotExist:
raise ClientError("ROBOT_INVALID")
# Check permissions
now = datetime.now()
has_booking = len(Booking.objects.filter(user=user, start_time__lte=now, end_time__gte=now)) > 0
if not (user.is_staff or has_booking):
raise ClientError("ROBOT_ACCESS_DENIED")
return robot
Add utility function to get the robot that a given user currently has booked out (or None).from datetime import datetime
from .exceptions import ClientError
from .models import Booking, RobotTerminal
def get_robot_terminal_or_error(robot_id, user):
# Check if the user is logged in
if not user.is_authenticated():
raise ClientError("USER_HAS_TO_LOGIN")
# get the robot_terminal with the given id
try:
robot = RobotTerminal.objects.get(pk=robot_id)
except RobotTerminal.DoesNotExist:
raise ClientError("ROBOT_INVALID")
# Check permissions
now = datetime.now()
has_booking = len(Booking.objects.filter(user=user, start_time__lte=now, end_time__gte=now)) > 0
if not (user.is_staff or has_booking):
raise ClientError("ROBOT_ACCESS_DENIED")
return robot
def get_booked_robot(user):
now = datetime.now()
has_booking = len(Booking.objects.filter(user=user, start_time__lte=now, end_time__gte=now)) > 0
if has_booking or user.is_staff:
# TODO: To support multiple robots, we will need to pull in the robot_id from the booking
return RobotTerminal.objects.first()
|
<commit_before>from datetime import datetime
from .exceptions import ClientError
from .models import Booking, RobotTerminal
def get_robot_terminal_or_error(robot_id, user):
# Check if the user is logged in
if not user.is_authenticated():
raise ClientError("USER_HAS_TO_LOGIN")
# get the robot_terminal with the given id
try:
robot = RobotTerminal.objects.get(pk=robot_id)
except RobotTerminal.DoesNotExist:
raise ClientError("ROBOT_INVALID")
# Check permissions
now = datetime.now()
has_booking = len(Booking.objects.filter(user=user, start_time__lte=now, end_time__gte=now)) > 0
if not (user.is_staff or has_booking):
raise ClientError("ROBOT_ACCESS_DENIED")
return robot
<commit_msg>Add utility function to get the robot that a given user currently has booked out (or None).<commit_after>from datetime import datetime
from .exceptions import ClientError
from .models import Booking, RobotTerminal
def get_robot_terminal_or_error(robot_id, user):
# Check if the user is logged in
if not user.is_authenticated():
raise ClientError("USER_HAS_TO_LOGIN")
# get the robot_terminal with the given id
try:
robot = RobotTerminal.objects.get(pk=robot_id)
except RobotTerminal.DoesNotExist:
raise ClientError("ROBOT_INVALID")
# Check permissions
now = datetime.now()
has_booking = len(Booking.objects.filter(user=user, start_time__lte=now, end_time__gte=now)) > 0
if not (user.is_staff or has_booking):
raise ClientError("ROBOT_ACCESS_DENIED")
return robot
def get_booked_robot(user):
now = datetime.now()
has_booking = len(Booking.objects.filter(user=user, start_time__lte=now, end_time__gte=now)) > 0
if has_booking or user.is_staff:
# TODO: To support multiple robots, we will need to pull in the robot_id from the booking
return RobotTerminal.objects.first()
|
55e316a45256d054d19425015ef13868a84c5ff1
|
src/pip/_internal/resolution/resolvelib/reporter.py
|
src/pip/_internal/resolution/resolvelib/reporter.py
|
from collections import defaultdict
from logging import getLogger
from pip._vendor.resolvelib.reporters import BaseReporter
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import DefaultDict
from .base import Candidate
logger = getLogger(__name__)
class PipReporter(BaseReporter):
def __init__(self):
# type: () -> None
self.backtracks_by_package = defaultdict(int) # type: DefaultDict[str, int]
self._messages_at_backtrack = {
1: (
"pip is looking at multiple versions of this package to determine "
"which version is compatible with other requirements. "
"This could take a while."
),
8: (
"pip is looking at multiple versions of this package to determine "
"which version is compatible with other requirements. "
"This could take a while."
),
13: (
"This is taking longer than usual. You might need to provide the "
"dependency resolver with stricter constraints to reduce runtime."
"If you want to abort this run, you can press Ctrl + C to do so."
)
}
def backtracking(self, candidate):
# type: (Candidate) -> None
self.backtracks_by_package[candidate.name] += 1
count = self.backtracks_by_package[candidate.name]
if count not in self._messages_at_backtrack:
return
message = self._messages_at_backtrack[count]
logger.info("INFO: %s", message)
|
from collections import defaultdict
from logging import getLogger
from pip._vendor.resolvelib.reporters import BaseReporter
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import DefaultDict
from .base import Candidate
logger = getLogger(__name__)
class PipReporter(BaseReporter):
def __init__(self):
# type: () -> None
self.backtracks_by_package = defaultdict(int) # type: DefaultDict[str, int]
self._messages_at_backtrack = {
1: (
"pip is looking at multiple versions of this package to determine "
"which version is compatible with other requirements. "
"This could take a while."
),
8: (
"pip is looking at multiple versions of this package to determine "
"which version is compatible with other requirements. "
"This could take a while."
),
13: (
"This is taking longer than usual. You might need to provide the "
"dependency resolver with stricter constraints to reduce runtime."
"If you want to abort this run, you can press Ctrl + C to do so."
"To improve how pip performs, tell us that this happened here: "
"https://pip.pypa.io/surveys/backtracking"
)
}
def backtracking(self, candidate):
# type: (Candidate) -> None
self.backtracks_by_package[candidate.name] += 1
count = self.backtracks_by_package[candidate.name]
if count not in self._messages_at_backtrack:
return
message = self._messages_at_backtrack[count]
logger.info("INFO: %s", message)
|
Add the last line to the info message
|
Add the last line to the info message
|
Python
|
mit
|
sbidoul/pip,pradyunsg/pip,pypa/pip,pypa/pip,sbidoul/pip,pfmoore/pip,pfmoore/pip,pradyunsg/pip
|
from collections import defaultdict
from logging import getLogger
from pip._vendor.resolvelib.reporters import BaseReporter
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import DefaultDict
from .base import Candidate
logger = getLogger(__name__)
class PipReporter(BaseReporter):
def __init__(self):
# type: () -> None
self.backtracks_by_package = defaultdict(int) # type: DefaultDict[str, int]
self._messages_at_backtrack = {
1: (
"pip is looking at multiple versions of this package to determine "
"which version is compatible with other requirements. "
"This could take a while."
),
8: (
"pip is looking at multiple versions of this package to determine "
"which version is compatible with other requirements. "
"This could take a while."
),
13: (
"This is taking longer than usual. You might need to provide the "
"dependency resolver with stricter constraints to reduce runtime."
"If you want to abort this run, you can press Ctrl + C to do so."
)
}
def backtracking(self, candidate):
# type: (Candidate) -> None
self.backtracks_by_package[candidate.name] += 1
count = self.backtracks_by_package[candidate.name]
if count not in self._messages_at_backtrack:
return
message = self._messages_at_backtrack[count]
logger.info("INFO: %s", message)
Add the last line to the info message
|
from collections import defaultdict
from logging import getLogger
from pip._vendor.resolvelib.reporters import BaseReporter
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import DefaultDict
from .base import Candidate
logger = getLogger(__name__)
class PipReporter(BaseReporter):
def __init__(self):
# type: () -> None
self.backtracks_by_package = defaultdict(int) # type: DefaultDict[str, int]
self._messages_at_backtrack = {
1: (
"pip is looking at multiple versions of this package to determine "
"which version is compatible with other requirements. "
"This could take a while."
),
8: (
"pip is looking at multiple versions of this package to determine "
"which version is compatible with other requirements. "
"This could take a while."
),
13: (
"This is taking longer than usual. You might need to provide the "
"dependency resolver with stricter constraints to reduce runtime."
"If you want to abort this run, you can press Ctrl + C to do so."
"To improve how pip performs, tell us that this happened here: "
"https://pip.pypa.io/surveys/backtracking"
)
}
def backtracking(self, candidate):
# type: (Candidate) -> None
self.backtracks_by_package[candidate.name] += 1
count = self.backtracks_by_package[candidate.name]
if count not in self._messages_at_backtrack:
return
message = self._messages_at_backtrack[count]
logger.info("INFO: %s", message)
|
<commit_before>from collections import defaultdict
from logging import getLogger
from pip._vendor.resolvelib.reporters import BaseReporter
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import DefaultDict
from .base import Candidate
logger = getLogger(__name__)
class PipReporter(BaseReporter):
def __init__(self):
# type: () -> None
self.backtracks_by_package = defaultdict(int) # type: DefaultDict[str, int]
self._messages_at_backtrack = {
1: (
"pip is looking at multiple versions of this package to determine "
"which version is compatible with other requirements. "
"This could take a while."
),
8: (
"pip is looking at multiple versions of this package to determine "
"which version is compatible with other requirements. "
"This could take a while."
),
13: (
"This is taking longer than usual. You might need to provide the "
"dependency resolver with stricter constraints to reduce runtime."
"If you want to abort this run, you can press Ctrl + C to do so."
)
}
def backtracking(self, candidate):
# type: (Candidate) -> None
self.backtracks_by_package[candidate.name] += 1
count = self.backtracks_by_package[candidate.name]
if count not in self._messages_at_backtrack:
return
message = self._messages_at_backtrack[count]
logger.info("INFO: %s", message)
<commit_msg>Add the last line to the info message<commit_after>
|
from collections import defaultdict
from logging import getLogger
from pip._vendor.resolvelib.reporters import BaseReporter
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import DefaultDict
from .base import Candidate
logger = getLogger(__name__)
class PipReporter(BaseReporter):
def __init__(self):
# type: () -> None
self.backtracks_by_package = defaultdict(int) # type: DefaultDict[str, int]
self._messages_at_backtrack = {
1: (
"pip is looking at multiple versions of this package to determine "
"which version is compatible with other requirements. "
"This could take a while."
),
8: (
"pip is looking at multiple versions of this package to determine "
"which version is compatible with other requirements. "
"This could take a while."
),
13: (
"This is taking longer than usual. You might need to provide the "
"dependency resolver with stricter constraints to reduce runtime."
"If you want to abort this run, you can press Ctrl + C to do so."
"To improve how pip performs, tell us that this happened here: "
"https://pip.pypa.io/surveys/backtracking"
)
}
def backtracking(self, candidate):
# type: (Candidate) -> None
self.backtracks_by_package[candidate.name] += 1
count = self.backtracks_by_package[candidate.name]
if count not in self._messages_at_backtrack:
return
message = self._messages_at_backtrack[count]
logger.info("INFO: %s", message)
|
from collections import defaultdict
from logging import getLogger
from pip._vendor.resolvelib.reporters import BaseReporter
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import DefaultDict
from .base import Candidate
logger = getLogger(__name__)
class PipReporter(BaseReporter):
def __init__(self):
# type: () -> None
self.backtracks_by_package = defaultdict(int) # type: DefaultDict[str, int]
self._messages_at_backtrack = {
1: (
"pip is looking at multiple versions of this package to determine "
"which version is compatible with other requirements. "
"This could take a while."
),
8: (
"pip is looking at multiple versions of this package to determine "
"which version is compatible with other requirements. "
"This could take a while."
),
13: (
"This is taking longer than usual. You might need to provide the "
"dependency resolver with stricter constraints to reduce runtime."
"If you want to abort this run, you can press Ctrl + C to do so."
)
}
def backtracking(self, candidate):
# type: (Candidate) -> None
self.backtracks_by_package[candidate.name] += 1
count = self.backtracks_by_package[candidate.name]
if count not in self._messages_at_backtrack:
return
message = self._messages_at_backtrack[count]
logger.info("INFO: %s", message)
Add the last line to the info messagefrom collections import defaultdict
from logging import getLogger
from pip._vendor.resolvelib.reporters import BaseReporter
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import DefaultDict
from .base import Candidate
logger = getLogger(__name__)
class PipReporter(BaseReporter):
def __init__(self):
# type: () -> None
self.backtracks_by_package = defaultdict(int) # type: DefaultDict[str, int]
self._messages_at_backtrack = {
1: (
"pip is looking at multiple versions of this package to determine "
"which version is compatible with other requirements. "
"This could take a while."
),
8: (
"pip is looking at multiple versions of this package to determine "
"which version is compatible with other requirements. "
"This could take a while."
),
13: (
"This is taking longer than usual. You might need to provide the "
"dependency resolver with stricter constraints to reduce runtime."
"If you want to abort this run, you can press Ctrl + C to do so."
"To improve how pip performs, tell us that this happened here: "
"https://pip.pypa.io/surveys/backtracking"
)
}
def backtracking(self, candidate):
# type: (Candidate) -> None
self.backtracks_by_package[candidate.name] += 1
count = self.backtracks_by_package[candidate.name]
if count not in self._messages_at_backtrack:
return
message = self._messages_at_backtrack[count]
logger.info("INFO: %s", message)
|
<commit_before>from collections import defaultdict
from logging import getLogger
from pip._vendor.resolvelib.reporters import BaseReporter
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import DefaultDict
from .base import Candidate
logger = getLogger(__name__)
class PipReporter(BaseReporter):
def __init__(self):
# type: () -> None
self.backtracks_by_package = defaultdict(int) # type: DefaultDict[str, int]
self._messages_at_backtrack = {
1: (
"pip is looking at multiple versions of this package to determine "
"which version is compatible with other requirements. "
"This could take a while."
),
8: (
"pip is looking at multiple versions of this package to determine "
"which version is compatible with other requirements. "
"This could take a while."
),
13: (
"This is taking longer than usual. You might need to provide the "
"dependency resolver with stricter constraints to reduce runtime."
"If you want to abort this run, you can press Ctrl + C to do so."
)
}
def backtracking(self, candidate):
# type: (Candidate) -> None
self.backtracks_by_package[candidate.name] += 1
count = self.backtracks_by_package[candidate.name]
if count not in self._messages_at_backtrack:
return
message = self._messages_at_backtrack[count]
logger.info("INFO: %s", message)
<commit_msg>Add the last line to the info message<commit_after>from collections import defaultdict
from logging import getLogger
from pip._vendor.resolvelib.reporters import BaseReporter
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import DefaultDict
from .base import Candidate
logger = getLogger(__name__)
class PipReporter(BaseReporter):
def __init__(self):
# type: () -> None
self.backtracks_by_package = defaultdict(int) # type: DefaultDict[str, int]
self._messages_at_backtrack = {
1: (
"pip is looking at multiple versions of this package to determine "
"which version is compatible with other requirements. "
"This could take a while."
),
8: (
"pip is looking at multiple versions of this package to determine "
"which version is compatible with other requirements. "
"This could take a while."
),
13: (
"This is taking longer than usual. You might need to provide the "
"dependency resolver with stricter constraints to reduce runtime."
"If you want to abort this run, you can press Ctrl + C to do so."
"To improve how pip performs, tell us that this happened here: "
"https://pip.pypa.io/surveys/backtracking"
)
}
def backtracking(self, candidate):
# type: (Candidate) -> None
self.backtracks_by_package[candidate.name] += 1
count = self.backtracks_by_package[candidate.name]
if count not in self._messages_at_backtrack:
return
message = self._messages_at_backtrack[count]
logger.info("INFO: %s", message)
|
fb65fedbf60481d37e097ea9db290f53b84cae26
|
giveaminute/migrations/versions/001_Initial_models.py
|
giveaminute/migrations/versions/001_Initial_models.py
|
from sqlalchemy import *
from migrate import *
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind migrate_engine
# to your metadata
import os
with open(os.path.join(os.path.dirname(__file__), '000_Initial_models.sql')) as initial_file:
sql = initial_file.read()
migrate_engine.execute(sql)
def downgrade(migrate_engine):
# Operations to reverse the above upgrade go here.
pass
|
from sqlalchemy import *
from migrate import *
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind migrate_engine
# to your metadata
import os
# Uncomment the following lines if you do not yet have a database to set up.
# If you run this migration, it will blow away the data currently contained
# in your database and start new.
#
# with open(os.path.join(os.path.dirname(__file__), '000_Initial_models.sql')) as initial_file:
# sql = initial_file.read()
# migrate_engine.execute(sql)
def downgrade(migrate_engine):
# Operations to reverse the above upgrade go here.
pass
|
Comment out the initial migration step by default (so that we're not inadvertently blowing peoples databases away
|
Comment out the initial migration step by default (so that we're not inadvertently blowing peoples databases away
|
Python
|
agpl-3.0
|
codeforamerica/Change-By-Us,localprojects/Change-By-Us,watchcat/cbu-rotterdam,watchcat/cbu-rotterdam,localprojects/Change-By-Us,codeforeurope/Change-By-Us,codeforeurope/Change-By-Us,codeforamerica/Change-By-Us,watchcat/cbu-rotterdam,watchcat/cbu-rotterdam,localprojects/Change-By-Us,localprojects/Change-By-Us,watchcat/cbu-rotterdam,codeforeurope/Change-By-Us,codeforamerica/Change-By-Us,codeforamerica/Change-By-Us,codeforeurope/Change-By-Us
|
from sqlalchemy import *
from migrate import *
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind migrate_engine
# to your metadata
import os
with open(os.path.join(os.path.dirname(__file__), '000_Initial_models.sql')) as initial_file:
sql = initial_file.read()
migrate_engine.execute(sql)
def downgrade(migrate_engine):
# Operations to reverse the above upgrade go here.
pass
Comment out the initial migration step by default (so that we're not inadvertently blowing peoples databases away
|
from sqlalchemy import *
from migrate import *
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind migrate_engine
# to your metadata
import os
# Uncomment the following lines if you do not yet have a database to set up.
# If you run this migration, it will blow away the data currently contained
# in your database and start new.
#
# with open(os.path.join(os.path.dirname(__file__), '000_Initial_models.sql')) as initial_file:
# sql = initial_file.read()
# migrate_engine.execute(sql)
def downgrade(migrate_engine):
# Operations to reverse the above upgrade go here.
pass
|
<commit_before>from sqlalchemy import *
from migrate import *
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind migrate_engine
# to your metadata
import os
with open(os.path.join(os.path.dirname(__file__), '000_Initial_models.sql')) as initial_file:
sql = initial_file.read()
migrate_engine.execute(sql)
def downgrade(migrate_engine):
# Operations to reverse the above upgrade go here.
pass
<commit_msg>Comment out the initial migration step by default (so that we're not inadvertently blowing peoples databases away<commit_after>
|
from sqlalchemy import *
from migrate import *
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind migrate_engine
# to your metadata
import os
# Uncomment the following lines if you do not yet have a database to set up.
# If you run this migration, it will blow away the data currently contained
# in your database and start new.
#
# with open(os.path.join(os.path.dirname(__file__), '000_Initial_models.sql')) as initial_file:
# sql = initial_file.read()
# migrate_engine.execute(sql)
def downgrade(migrate_engine):
# Operations to reverse the above upgrade go here.
pass
|
from sqlalchemy import *
from migrate import *
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind migrate_engine
# to your metadata
import os
with open(os.path.join(os.path.dirname(__file__), '000_Initial_models.sql')) as initial_file:
sql = initial_file.read()
migrate_engine.execute(sql)
def downgrade(migrate_engine):
# Operations to reverse the above upgrade go here.
pass
Comment out the initial migration step by default (so that we're not inadvertently blowing peoples databases awayfrom sqlalchemy import *
from migrate import *
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind migrate_engine
# to your metadata
import os
# Uncomment the following lines if you do not yet have a database to set up.
# If you run this migration, it will blow away the data currently contained
# in your database and start new.
#
# with open(os.path.join(os.path.dirname(__file__), '000_Initial_models.sql')) as initial_file:
# sql = initial_file.read()
# migrate_engine.execute(sql)
def downgrade(migrate_engine):
# Operations to reverse the above upgrade go here.
pass
|
<commit_before>from sqlalchemy import *
from migrate import *
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind migrate_engine
# to your metadata
import os
with open(os.path.join(os.path.dirname(__file__), '000_Initial_models.sql')) as initial_file:
sql = initial_file.read()
migrate_engine.execute(sql)
def downgrade(migrate_engine):
# Operations to reverse the above upgrade go here.
pass
<commit_msg>Comment out the initial migration step by default (so that we're not inadvertently blowing peoples databases away<commit_after>from sqlalchemy import *
from migrate import *
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind migrate_engine
# to your metadata
import os
# Uncomment the following lines if you do not yet have a database to set up.
# If you run this migration, it will blow away the data currently contained
# in your database and start new.
#
# with open(os.path.join(os.path.dirname(__file__), '000_Initial_models.sql')) as initial_file:
# sql = initial_file.read()
# migrate_engine.execute(sql)
def downgrade(migrate_engine):
# Operations to reverse the above upgrade go here.
pass
|
a0c9e2d6d5115aba04a650281b10d47e31873671
|
tensorflow/contrib/distributions/python/__init__.py
|
tensorflow/contrib/distributions/python/__init__.py
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""ops module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
|
Fix futures test Change: 115766190
|
Fix futures test
Change: 115766190
|
Python
|
apache-2.0
|
mortada/tensorflow,maciekcc/tensorflow,yaroslavvb/tensorflow,AnishShah/tensorflow,pcm17/tensorflow,eaplatanios/tensorflow,arborh/tensorflow,tornadozou/tensorflow,horance-liu/tensorflow,Intel-Corporation/tensorflow,tntnatbry/tensorflow,gojira/tensorflow,brchiu/tensorflow,nburn42/tensorflow,tillahoffmann/tensorflow,horance-liu/tensorflow,jwlawson/tensorflow,Carmezim/tensorflow,ravindrapanda/tensorflow,alheinecke/tensorflow-xsmm,davidzchen/tensorflow,kamcpp/tensorflow,alshedivat/tensorflow,karllessard/tensorflow,yanchen036/tensorflow,JingJunYin/tensorflow,nolanliou/tensorflow,tongwang01/tensorflow,xodus7/tensorflow,jbedorf/tensorflow,dancingdan/tensorflow,raymondxyang/tensorflow,adit-chandra/tensorflow,LUTAN/tensorflow,neilhan/tensorflow,awni/tensorflow,allenlavoie/tensorflow,drpngx/tensorflow,awni/tensorflow,gojira/tensorflow,chris-chris/tensorflow,whn09/tensorflow,nikste/tensorflow,gautam1858/tensorflow,kobejean/tensorflow,jeffzheng1/tensorflow,haeusser/tensorflow,naturali/tensorflow,chemelnucfin/tensorflow,awni/tensorflow,jbedorf/tensorflow,ZhangXinNan/tensorflow,hfp/tensorflow-xsmm,panmari/tensorflow,taknevski/tensorflow-xsmm,dongjoon-hyun/tensorflow,jbedorf/tensorflow,benoitsteiner/tensorflow-xsmm,kobejean/tensorflow,lakshayg/tensorflow,Xeralux/tensorflow,alistairlow/tensorflow,llhe/tensorflow,HKUST-SING/tensorflow,jart/tensorflow,chris-chris/tensorflow,asadziach/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,brchiu/tensorflow,Carmezim/tensorflow,karllessard/tensorflow,yufengg/tensorflow,alheinecke/tensorflow-xsmm,jhseu/tensorflow,anand-c-goog/tensorflow,renyi533/tensorflow,wangyum/tensorflow,Bismarrck/tensorflow,strint/tensorflow,HaebinShin/tensorflow,sjperkins/tensorflow,LUTAN/tensorflow,elingg/tensorflow,alivecor/tensorflow,anilmuthineni/tensorflow,ville-k/tensorflow,a-doumoulakis/tensorflow,scenarios/tensorflow,DavidNorman/tensorflow,ibab/tensorflow,pavelchristof/gomoku-ai,alisidd/tensorflow,handroissuazo/tensorflow,dyoung418/tensorflow,peterbraden/tensorflow,gunan/tensorflow,yongtang/tensorflow,jendap/tensorflow,nanditav/15712-TensorFlow,jendap/tensorflow,Carmezim/tensorflow,meteorcloudy/tensorflow,eaplatanios/tensorflow,meteorcloudy/tensorflow,markslwong/tensorflow,johndpope/tensorflow,kchodorow/tensorflow,xodus7/tensorflow,taknevski/tensorflow-xsmm,benoitsteiner/tensorflow,xzturn/tensorflow,mrry/tensorflow,with-git/tensorflow,Mistobaan/tensorflow,tensorflow/tensorflow,ravindrapanda/tensorflow,cxxgtxy/tensorflow,adit-chandra/tensorflow,handroissuazo/tensorflow,sarvex/tensorflow,guschmue/tensorflow,nanditav/15712-TensorFlow,apark263/tensorflow,cxxgtxy/tensorflow,benoitsteiner/tensorflow,wangyum/tensorflow,sandeepgupta2k4/tensorflow,davidzchen/tensorflow,adit-chandra/tensorflow,gibiansky/tensorflow,sandeepdsouza93/TensorFlow-15712,scenarios/tensorflow,Moriadry/tensorflow,ZhangXinNan/tensorflow,lukeiwanski/tensorflow,neilhan/tensorflow,DavidNorman/tensorflow,gojira/tensorflow,hfp/tensorflow-xsmm,apark263/tensorflow,laszlocsomor/tensorflow,scenarios/tensorflow,mixturemodel-flow/tensorflow,eerwitt/tensorflow,pcm17/tensorflow,alshedivat/tensorflow,alivecor/tensorflow,seaotterman/tensorflow,vrv/tensorflow,asimshankar/tensorflow,annarev/tensorflow,manjunaths/tensorflow,chenjun0210/tensorflow,mavenlin/tensorflow,benoitsteiner/tensorflow,adit-chandra/tensorflow,drpngx/tensorflow,Mazecreator/tensorflow,gnieboer/tensorflow,nightjean/Deep-Learning,awni/tensorflow,whn09/tensorflow,aselle/tensorflow,strint/tensorflow,alheinecke/tensorflow-xsmm,chris-chris/tensorflow,petewarden/tensorflow,ppwwyyxx/tensorflow,vrv/tensorflow,codrut3/tensorflow,kobejean/tensorflow,alshedivat/tensorflow,DavidNorman/tensorflow,rabipanda/tensorflow,meteorcloudy/tensorflow,tornadozou/tensorflow,naturali/tensorflow,jart/tensorflow,manjunaths/tensorflow,ppwwyyxx/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,adit-chandra/tensorflow,horance-liu/tensorflow,sandeepdsouza93/TensorFlow-15712,RapidApplicationDevelopment/tensorflow,sandeepgupta2k4/tensorflow,Xeralux/tensorflow,cg31/tensorflow,petewarden/tensorflow_makefile,chemelnucfin/tensorflow,aam-at/tensorflow,paolodedios/tensorflow,alsrgv/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,tiagofrepereira2012/tensorflow,alshedivat/tensorflow,MoamerEncsConcordiaCa/tensorflow,gibiansky/tensorflow,jostep/tensorflow,meteorcloudy/tensorflow,frreiss/tensorflow-fred,laszlocsomor/tensorflow,chenjun0210/tensorflow,raymondxyang/tensorflow,rabipanda/tensorflow,Mistobaan/tensorflow,nikste/tensorflow,brchiu/tensorflow,XueqingLin/tensorflow,gautam1858/tensorflow,adit-chandra/tensorflow,jalexvig/tensorflow,xzturn/tensorflow,meteorcloudy/tensorflow,laosiaudi/tensorflow,awni/tensorflow,jostep/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,dhalleine/tensorflow,nightjean/Deep-Learning,caisq/tensorflow,tomasreimers/tensorflow-emscripten,aam-at/tensorflow,vrv/tensorflow,annarev/tensorflow,alshedivat/tensorflow,alsrgv/tensorflow,thesuperzapper/tensorflow,calebfoss/tensorflow,martinwicke/tensorflow,llhe/tensorflow,ville-k/tensorflow,Intel-tensorflow/tensorflow,admcrae/tensorflow,SnakeJenny/TensorFlow,pavelchristof/gomoku-ai,asimshankar/tensorflow,jhaux/tensorflow,Kongsea/tensorflow,xzturn/tensorflow,code-sauce/tensorflow,unsiloai/syntaxnet-ops-hack,anand-c-goog/tensorflow,pavelchristof/gomoku-ai,haeusser/tensorflow,chemelnucfin/tensorflow,alheinecke/tensorflow-xsmm,seanli9jan/tensorflow,RapidApplicationDevelopment/tensorflow,awni/tensorflow,gnieboer/tensorflow,HKUST-SING/tensorflow,rabipanda/tensorflow,zycdragonball/tensorflow,nolanliou/tensorflow,ninotoshi/tensorflow,tongwang01/tensorflow,zycdragonball/tensorflow,lakshayg/tensorflow,adit-chandra/tensorflow,frreiss/tensorflow-fred,jbedorf/tensorflow,benoitsteiner/tensorflow-xsmm,apark263/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,suiyuan2009/tensorflow,tntnatbry/tensorflow,jhseu/tensorflow,lakshayg/tensorflow,seaotterman/tensorflow,sjperkins/tensorflow,eaplatanios/tensorflow,kevin-coder/tensorflow-fork,gojira/tensorflow,maciekcc/tensorflow,suiyuan2009/tensorflow,strint/tensorflow,Kongsea/tensorflow,ZhangXinNan/tensorflow,unsiloai/syntaxnet-ops-hack,jbedorf/tensorflow,peterbraden/tensorflow,paolodedios/tensorflow,haeusser/tensorflow,drpngx/tensorflow,av8ramit/tensorflow,HaebinShin/tensorflow,hsaputra/tensorflow,kchodorow/tensorflow,Mazecreator/tensorflow,juharris/tensorflow,paolodedios/tensorflow,thesuperzapper/tensorflow,mavenlin/tensorflow,zasdfgbnm/tensorflow,anand-c-goog/tensorflow,cg31/tensorflow,DavidNorman/tensorflow,adamtiger/tensorflow,mavenlin/tensorflow,ran5515/DeepDecision,allenlavoie/tensorflow,alsrgv/tensorflow,johndpope/tensorflow,tensorflow/tensorflow-pywrap_saved_model,SnakeJenny/TensorFlow,tensorflow/tensorflow-pywrap_saved_model,with-git/tensorflow,alisidd/tensorflow,alshedivat/tensorflow,alheinecke/tensorflow-xsmm,mengxn/tensorflow,nolanliou/tensorflow,freedomtan/tensorflow,chris-chris/tensorflow,gnieboer/tensorflow,martinwicke/tensorflow,rdipietro/tensorflow,MycChiu/tensorflow,markslwong/tensorflow,aselle/tensorflow,theflofly/tensorflow,code-sauce/tensorflow,llhe/tensorflow,allenlavoie/tensorflow,wchan/tensorflow,nolanliou/tensorflow,manjunaths/tensorflow,rdipietro/tensorflow,theflofly/tensorflow,zycdragonball/tensorflow,calebfoss/tensorflow,lukeiwanski/tensorflow-opencl,naturali/tensorflow,TakayukiSakai/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Carmezim/tensorflow,ninotoshi/tensorflow,Mistobaan/tensorflow,admcrae/tensorflow,jart/tensorflow,Moriadry/tensorflow,zycdragonball/tensorflow,ninotoshi/tensorflow,hehongliang/tensorflow,elingg/tensorflow,dongjoon-hyun/tensorflow,ninotoshi/tensorflow,calebfoss/tensorflow,jhseu/tensorflow,girving/tensorflow,wchan/tensorflow,whn09/tensorflow,Bulochkin/tensorflow_pack,alivecor/tensorflow,tillahoffmann/tensorflow,Bismarrck/tensorflow,drpngx/tensorflow,paolodedios/tensorflow,asimshankar/tensorflow,kevin-coder/tensorflow-fork,johndpope/tensorflow,sjperkins/tensorflow,vrv/tensorflow,MycChiu/tensorflow,jwlawson/tensorflow,jalexvig/tensorflow,mrry/tensorflow,Intel-tensorflow/tensorflow,sandeepdsouza93/TensorFlow-15712,raymondxyang/tensorflow,ishay2b/tensorflow,annarev/tensorflow,jhseu/tensorflow,dancingdan/tensorflow,memo/tensorflow,codrut3/tensorflow,annarev/tensorflow,mortada/tensorflow,pavelchristof/gomoku-ai,dancingdan/tensorflow,MostafaGazar/tensorflow,XueqingLin/tensorflow,gibiansky/tensorflow,karllessard/tensorflow,jwlawson/tensorflow,neilhan/tensorflow,manazhao/tf_recsys,krikru/tensorflow-opencl,guschmue/tensorflow,wchan/tensorflow,johndpope/tensorflow,jhseu/tensorflow,unsiloai/syntaxnet-ops-hack,drpngx/tensorflow,AnishShah/tensorflow,ychfan/tensorflow,tornadozou/tensorflow,nightjean/Deep-Learning,tensorflow/tensorflow,alshedivat/tensorflow,tomasreimers/tensorflow-emscripten,hfp/tensorflow-xsmm,DCSaunders/tensorflow,annarev/tensorflow,chris-chris/tensorflow,adit-chandra/tensorflow,chris-chris/tensorflow,alivecor/tensorflow,unsiloai/syntaxnet-ops-hack,gnieboer/tensorflow,dongjoon-hyun/tensorflow,peterbraden/tensorflow,Mazecreator/tensorflow,neilhan/tensorflow,mixturemodel-flow/tensorflow,lukeiwanski/tensorflow-opencl,alshedivat/tensorflow,handroissuazo/tensorflow,ghchinoy/tensorflow,kamcpp/tensorflow,Xeralux/tensorflow,caisq/tensorflow,snnn/tensorflow,mrry/tensorflow,tensorflow/tensorflow,mrry/tensorflow,tiagofrepereira2012/tensorflow,caisq/tensorflow,guschmue/tensorflow,yufengg/tensorflow,MoamerEncsConcordiaCa/tensorflow,jalexvig/tensorflow,neilhan/tensorflow,admcrae/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow,jhseu/tensorflow,mdrumond/tensorflow,chris-chris/tensorflow,arborh/tensorflow,martinwicke/tensorflow,memo/tensorflow,seaotterman/tensorflow,whn09/tensorflow,tensorflow/tensorflow-pywrap_saved_model,manjunaths/tensorflow,wangyum/tensorflow,HaebinShin/tensorflow,jwlawson/tensorflow,ibmsoe/tensorflow,aam-at/tensorflow,elingg/tensorflow,Xeralux/tensorflow,av8ramit/tensorflow,Intel-Corporation/tensorflow,lukeiwanski/tensorflow-opencl,codrut3/tensorflow,SnakeJenny/TensorFlow,dhalleine/tensorflow,anand-c-goog/tensorflow,ychfan/tensorflow,arborh/tensorflow,seanli9jan/tensorflow,AndreasMadsen/tensorflow,tensorflow/tensorflow-pywrap_saved_model,dancingdan/tensorflow,johndpope/tensorflow,arborh/tensorflow,petewarden/tensorflow_makefile,memo/tensorflow,lukeiwanski/tensorflow,alisidd/tensorflow,chenjun0210/tensorflow,lukeiwanski/tensorflow-opencl,tntnatbry/tensorflow,asimshankar/tensorflow,jbedorf/tensorflow,anand-c-goog/tensorflow,zycdragonball/tensorflow,DCSaunders/tensorflow,petewarden/tensorflow_makefile,ibab/tensorflow,annarev/tensorflow,jhaux/tensorflow,thesuperzapper/tensorflow,dendisuhubdy/tensorflow,benoitsteiner/tensorflow,jbedorf/tensorflow,rabipanda/tensorflow,brchiu/tensorflow,renyi533/tensorflow,petewarden/tensorflow_makefile,kchodorow/tensorflow,thjashin/tensorflow,ran5515/DeepDecision,abhitopia/tensorflow,ageron/tensorflow,calebfoss/tensorflow,yongtang/tensorflow,arborh/tensorflow,mortada/tensorflow,gunan/tensorflow,nburn42/tensorflow,juharris/tensorflow,naturali/tensorflow,eaplatanios/tensorflow,gunan/tensorflow,kamcpp/tensorflow,thjashin/tensorflow,ivano666/tensorflow,ychfan/tensorflow,juharris/tensorflow,rdipietro/tensorflow,cancan101/tensorflow,paolodedios/tensorflow,jhseu/tensorflow,panmari/tensorflow,whn09/tensorflow,JingJunYin/tensorflow,hfp/tensorflow-xsmm,ishay2b/tensorflow,av8ramit/tensorflow,MostafaGazar/tensorflow,yongtang/tensorflow,cancan101/tensorflow,ageron/tensorflow,jalexvig/tensorflow,Moriadry/tensorflow,dhalleine/tensorflow,thjashin/tensorflow,yaroslavvb/tensorflow,odejesush/tensorflow,kevin-coder/tensorflow-fork,cg31/tensorflow,yanchen036/tensorflow,alsrgv/tensorflow,dhalleine/tensorflow,kevin-coder/tensorflow-fork,laosiaudi/tensorflow,nburn42/tensorflow,ArtsiomCh/tensorflow,markslwong/tensorflow,aldian/tensorflow,nightjean/Deep-Learning,ageron/tensorflow,krikru/tensorflow-opencl,neilhan/tensorflow,jeffzheng1/tensorflow,cancan101/tensorflow,laszlocsomor/tensorflow,seaotterman/tensorflow,SnakeJenny/TensorFlow,ibmsoe/tensorflow,ArtsiomCh/tensorflow,markslwong/tensorflow,meteorcloudy/tensorflow,laszlocsomor/tensorflow,dendisuhubdy/tensorflow,zasdfgbnm/tensorflow,theflofly/tensorflow,eaplatanios/tensorflow,laosiaudi/tensorflow,MostafaGazar/tensorflow,a-doumoulakis/tensorflow,petewarden/tensorflow,av8ramit/tensorflow,cancan101/tensorflow,Intel-Corporation/tensorflow,alisidd/tensorflow,MoamerEncsConcordiaCa/tensorflow,jhaux/tensorflow,ivano666/tensorflow,hsaputra/tensorflow,aam-at/tensorflow,DCSaunders/tensorflow,allenlavoie/tensorflow,av8ramit/tensorflow,MycChiu/tensorflow,theflofly/tensorflow,tomasreimers/tensorflow-emscripten,rdipietro/tensorflow,maciekcc/tensorflow,tomasreimers/tensorflow-emscripten,jhaux/tensorflow,martinwicke/tensorflow,xodus7/tensorflow,EvenStrangest/tensorflow,ZhangXinNan/tensorflow,llhe/tensorflow,kchodorow/tensorflow,manipopopo/tensorflow,mengxn/tensorflow,handroissuazo/tensorflow,DCSaunders/tensorflow,ghchinoy/tensorflow,karllessard/tensorflow,ageron/tensorflow,ArtsiomCh/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,sjperkins/tensorflow,Intel-tensorflow/tensorflow,jendap/tensorflow,petewarden/tensorflow,admcrae/tensorflow,pavelchristof/gomoku-ai,krikru/tensorflow-opencl,RapidApplicationDevelopment/tensorflow,XueqingLin/tensorflow,dongjoon-hyun/tensorflow,EvenStrangest/tensorflow,arborh/tensorflow,petewarden/tensorflow_makefile,AndreasMadsen/tensorflow,ravindrapanda/tensorflow,HaebinShin/tensorflow,mdrumond/tensorflow,jwlawson/tensorflow,frreiss/tensorflow-fred,ppwwyyxx/tensorflow,allenlavoie/tensorflow,strint/tensorflow,yaroslavvb/tensorflow,pavelchristof/gomoku-ai,petewarden/tensorflow,nanditav/15712-TensorFlow,tntnatbry/tensorflow,freedomtan/tensorflow,a-doumoulakis/tensorflow,annarev/tensorflow,gautam1858/tensorflow,dongjoon-hyun/tensorflow,andrewcmyers/tensorflow,renyi533/tensorflow,renyi533/tensorflow,nikste/tensorflow,aselle/tensorflow,jbedorf/tensorflow,snnn/tensorflow,dendisuhubdy/tensorflow,thjashin/tensorflow,manipopopo/tensorflow,girving/tensorflow,nolanliou/tensorflow,theflofly/tensorflow,dancingdan/tensorflow,JVillella/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Xeralux/tensorflow,alsrgv/tensorflow,with-git/tensorflow,caisq/tensorflow,wchan/tensorflow,ran5515/DeepDecision,jeffzheng1/tensorflow,ageron/tensorflow,dyoung418/tensorflow,ychfan/tensorflow,paolodedios/tensorflow,ravindrapanda/tensorflow,gojira/tensorflow,adamtiger/tensorflow,gojira/tensorflow,eerwitt/tensorflow,benoitsteiner/tensorflow,a-doumoulakis/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,anilmuthineni/tensorflow,sandeepdsouza93/TensorFlow-15712,HKUST-SING/tensorflow,zycdragonball/tensorflow,thjashin/tensorflow,ychfan/tensorflow,chenjun0210/tensorflow,laszlocsomor/tensorflow,dyoung418/tensorflow,wangyum/tensorflow,bowang/tensorflow,tillahoffmann/tensorflow,suiyuan2009/tensorflow,petewarden/tensorflow,neilhan/tensorflow,elingg/tensorflow,anand-c-goog/tensorflow,thjashin/tensorflow,martinbede/second-sight,admcrae/tensorflow,gnieboer/tensorflow,nburn42/tensorflow,martinwicke/tensorflow,frreiss/tensorflow-fred,horance-liu/tensorflow,pcm17/tensorflow,benoitsteiner/tensorflow-opencl,frreiss/tensorflow-fred,hfp/tensorflow-xsmm,yufengg/tensorflow,wchan/tensorflow,andrewcmyers/tensorflow,Intel-Corporation/tensorflow,ppwwyyxx/tensorflow,MycChiu/tensorflow,MoamerEncsConcordiaCa/tensorflow,peterbraden/tensorflow,odejesush/tensorflow,ArtsiomCh/tensorflow,Bismarrck/tensorflow,Intel-tensorflow/tensorflow,jalexvig/tensorflow,nikste/tensorflow,xzturn/tensorflow,tongwang01/tensorflow,jostep/tensorflow,TakayukiSakai/tensorflow,alistairlow/tensorflow,DavidNorman/tensorflow,alshedivat/tensorflow,Bulochkin/tensorflow_pack,karllessard/tensorflow,LUTAN/tensorflow,thjashin/tensorflow,martinbede/second-sight,manazhao/tf_recsys,kobejean/tensorflow,mrry/tensorflow,arborh/tensorflow,jhaux/tensorflow,hsaputra/tensorflow,snnn/tensorflow,XueqingLin/tensorflow,lukeiwanski/tensorflow,haeusser/tensorflow,alsrgv/tensorflow,benoitsteiner/tensorflow,tiagofrepereira2012/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow,davidzchen/tensorflow,xodus7/tensorflow,theflofly/tensorflow,Mistobaan/tensorflow,aam-at/tensorflow,elingg/tensorflow,dendisuhubdy/tensorflow,RapidApplicationDevelopment/tensorflow,rdipietro/tensorflow,caisq/tensorflow,jwlawson/tensorflow,pcm17/tensorflow,chris-chris/tensorflow,Bulochkin/tensorflow_pack,tensorflow/tensorflow-experimental_link_static_libraries_once,seanli9jan/tensorflow,ville-k/tensorflow,benoitsteiner/tensorflow-xsmm,caisq/tensorflow,gunan/tensorflow,petewarden/tensorflow_makefile,elingg/tensorflow,laosiaudi/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Bismarrck/tensorflow,JingJunYin/tensorflow,mixturemodel-flow/tensorflow,Carmezim/tensorflow,anilmuthineni/tensorflow,dendisuhubdy/tensorflow,odejesush/tensorflow,brchiu/tensorflow,dancingdan/tensorflow,aam-at/tensorflow,odejesush/tensorflow,guschmue/tensorflow,gojira/tensorflow,eadgarchen/tensorflow,ageron/tensorflow,mixturemodel-flow/tensorflow,ravindrapanda/tensorflow,handroissuazo/tensorflow,guschmue/tensorflow,Mistobaan/tensorflow,av8ramit/tensorflow,yanchen036/tensorflow,ibab/tensorflow,abhitopia/tensorflow,TakayukiSakai/tensorflow,ppries/tensorflow,alistairlow/tensorflow,hehongliang/tensorflow,yufengg/tensorflow,markslwong/tensorflow,DCSaunders/tensorflow,andrewcmyers/tensorflow,Bismarrck/tensorflow,jostep/tensorflow,eadgarchen/tensorflow,manazhao/tf_recsys,sandeepgupta2k4/tensorflow,codrut3/tensorflow,xodus7/tensorflow,DavidNorman/tensorflow,kchodorow/tensorflow,theflofly/tensorflow,ibmsoe/tensorflow,laszlocsomor/tensorflow,Mazecreator/tensorflow,ageron/tensorflow,asadziach/tensorflow,ppries/tensorflow,Mazecreator/tensorflow,mengxn/tensorflow,haeusser/tensorflow,paolodedios/tensorflow,Kongsea/tensorflow,cancan101/tensorflow,asimshankar/tensorflow,MostafaGazar/tensorflow,apark263/tensorflow,lukeiwanski/tensorflow,AnishShah/tensorflow,Bismarrck/tensorflow,jendap/tensorflow,manipopopo/tensorflow,ninotoshi/tensorflow,ppries/tensorflow,ville-k/tensorflow,aselle/tensorflow,wangyum/tensorflow,aldian/tensorflow,ArtsiomCh/tensorflow,snnn/tensorflow,hehongliang/tensorflow,gautam1858/tensorflow,av8ramit/tensorflow,moonboots/tensorflow,tntnatbry/tensorflow,Bismarrck/tensorflow,krikru/tensorflow-opencl,lakshayg/tensorflow,ZhangXinNan/tensorflow,gunan/tensorflow,benoitsteiner/tensorflow-xsmm,gibiansky/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,sjperkins/tensorflow,jendap/tensorflow,Bulochkin/tensorflow_pack,jbedorf/tensorflow,ibab/tensorflow,brchiu/tensorflow,nburn42/tensorflow,RapidApplicationDevelopment/tensorflow,markslwong/tensorflow,mrry/tensorflow,AndreasMadsen/tensorflow,tensorflow/tensorflow,Bismarrck/tensorflow,taknevski/tensorflow-xsmm,MoamerEncsConcordiaCa/tensorflow,anilmuthineni/tensorflow,zycdragonball/tensorflow,scenarios/tensorflow,dancingdan/tensorflow,JVillella/tensorflow,brchiu/tensorflow,chemelnucfin/tensorflow,Xeralux/tensorflow,sandeepgupta2k4/tensorflow,kevin-coder/tensorflow-fork,kamcpp/tensorflow,kobejean/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,nanditav/15712-TensorFlow,abhitopia/tensorflow,jwlawson/tensorflow,tongwang01/tensorflow,abhitopia/tensorflow,code-sauce/tensorflow,allenlavoie/tensorflow,aselle/tensorflow,frreiss/tensorflow-fred,ageron/tensorflow,chemelnucfin/tensorflow,martinbede/second-sight,Mistobaan/tensorflow,benoitsteiner/tensorflow,handroissuazo/tensorflow,jwlawson/tensorflow,martinwicke/tensorflow,Intel-Corporation/tensorflow,girving/tensorflow,snnn/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,taknevski/tensorflow-xsmm,hehongliang/tensorflow,gunan/tensorflow,4Quant/tensorflow,aldian/tensorflow,cg31/tensorflow,jendap/tensorflow,asadziach/tensorflow,andrewcmyers/tensorflow,benoitsteiner/tensorflow-xsmm,mdrumond/tensorflow,gunan/tensorflow,davidzchen/tensorflow,kobejean/tensorflow,AndreasMadsen/tensorflow,alisidd/tensorflow,horance-liu/tensorflow,allenlavoie/tensorflow,apark263/tensorflow,krikru/tensorflow-opencl,nightjean/Deep-Learning,cancan101/tensorflow,Kongsea/tensorflow,alivecor/tensorflow,jhaux/tensorflow,nikste/tensorflow,aam-at/tensorflow,ishay2b/tensorflow,AnishShah/tensorflow,adamtiger/tensorflow,nolanliou/tensorflow,annarev/tensorflow,nolanliou/tensorflow,Carmezim/tensorflow,jart/tensorflow,dyoung418/tensorflow,RapidApplicationDevelopment/tensorflow,dancingdan/tensorflow,maciekcc/tensorflow,tntnatbry/tensorflow,cancan101/tensorflow,Bulochkin/tensorflow_pack,maciekcc/tensorflow,petewarden/tensorflow,hsaputra/tensorflow,whn09/tensorflow,tntnatbry/tensorflow,calebfoss/tensorflow,MycChiu/tensorflow,tensorflow/tensorflow,adit-chandra/tensorflow,LUTAN/tensorflow,dongjoon-hyun/tensorflow,llhe/tensorflow,handroissuazo/tensorflow,sandeepdsouza93/TensorFlow-15712,girving/tensorflow,ivano666/tensorflow,benoitsteiner/tensorflow-opencl,yufengg/tensorflow,chemelnucfin/tensorflow,cg31/tensorflow,wchan/tensorflow,vrv/tensorflow,jhseu/tensorflow,ghchinoy/tensorflow,admcrae/tensorflow,guschmue/tensorflow,seaotterman/tensorflow,laosiaudi/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,rdipietro/tensorflow,ibab/tensorflow,davidzchen/tensorflow,llhe/tensorflow,moonboots/tensorflow,benoitsteiner/tensorflow-xsmm,karllessard/tensorflow,tornadozou/tensorflow,hsaputra/tensorflow,kamcpp/tensorflow,ishay2b/tensorflow,maciekcc/tensorflow,gojira/tensorflow,TakayukiSakai/tensorflow,panmari/tensorflow,eerwitt/tensorflow,manazhao/tf_recsys,mengxn/tensorflow,theflofly/tensorflow,dyoung418/tensorflow,theflofly/tensorflow,nikste/tensorflow,xzturn/tensorflow,nburn42/tensorflow,Mazecreator/tensorflow,mixturemodel-flow/tensorflow,eerwitt/tensorflow,paolodedios/tensorflow,petewarden/tensorflow,ppwwyyxx/tensorflow,sandeepgupta2k4/tensorflow,kchodorow/tensorflow,karllessard/tensorflow,hfp/tensorflow-xsmm,strint/tensorflow,Kongsea/tensorflow,Bulochkin/tensorflow_pack,ivano666/tensorflow,ageron/tensorflow,MycChiu/tensorflow,chenjun0210/tensorflow,aselle/tensorflow,freedomtan/tensorflow,cxxgtxy/tensorflow,odejesush/tensorflow,AnishShah/tensorflow,asadziach/tensorflow,lakshayg/tensorflow,krikru/tensorflow-opencl,sarvex/tensorflow,AndreasMadsen/tensorflow,anand-c-goog/tensorflow,JingJunYin/tensorflow,freedomtan/tensorflow,chenjun0210/tensorflow,chenjun0210/tensorflow,davidzchen/tensorflow,ivano666/tensorflow,dendisuhubdy/tensorflow,yongtang/tensorflow,ychfan/tensorflow,ibmsoe/tensorflow,juharris/tensorflow,ZhangXinNan/tensorflow,naturali/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,with-git/tensorflow,yaroslavvb/tensorflow,scenarios/tensorflow,alsrgv/tensorflow,dongjoon-hyun/tensorflow,davidzchen/tensorflow,nolanliou/tensorflow,tillahoffmann/tensorflow,aam-at/tensorflow,awni/tensorflow,kchodorow/tensorflow,dhalleine/tensorflow,andrewcmyers/tensorflow,HKUST-SING/tensorflow,jwlawson/tensorflow,sjperkins/tensorflow,AnishShah/tensorflow,ivano666/tensorflow,davidzchen/tensorflow,mavenlin/tensorflow,peterbraden/tensorflow,freedomtan/tensorflow,kamcpp/tensorflow,thesuperzapper/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,ghchinoy/tensorflow,xodus7/tensorflow,jostep/tensorflow,eadgarchen/tensorflow,XueqingLin/tensorflow,alivecor/tensorflow,yanchen036/tensorflow,arborh/tensorflow,seaotterman/tensorflow,pcm17/tensorflow,seanli9jan/tensorflow,HKUST-SING/tensorflow,ninotoshi/tensorflow,taknevski/tensorflow-xsmm,abhitopia/tensorflow,tiagofrepereira2012/tensorflow,haeusser/tensorflow,lukeiwanski/tensorflow-opencl,hsaputra/tensorflow,moonboots/tensorflow,horance-liu/tensorflow,manipopopo/tensorflow,tiagofrepereira2012/tensorflow,johndpope/tensorflow,thjashin/tensorflow,manazhao/tf_recsys,scenarios/tensorflow,lukeiwanski/tensorflow,arborh/tensorflow,kevin-coder/tensorflow-fork,jendap/tensorflow,gibiansky/tensorflow,pcm17/tensorflow,4Quant/tensorflow,girving/tensorflow,zasdfgbnm/tensorflow,jwlawson/tensorflow,alisidd/tensorflow,jalexvig/tensorflow,benoitsteiner/tensorflow-xsmm,ibab/tensorflow,xzturn/tensorflow,seaotterman/tensorflow,ychfan/tensorflow,aldian/tensorflow,ppwwyyxx/tensorflow,memo/tensorflow,jhseu/tensorflow,tongwang01/tensorflow,Moriadry/tensorflow,eadgarchen/tensorflow,pierreg/tensorflow,frreiss/tensorflow-fred,mavenlin/tensorflow,sjperkins/tensorflow,hfp/tensorflow-xsmm,ibmsoe/tensorflow,neilhan/tensorflow,mrry/tensorflow,snnn/tensorflow,tomasreimers/tensorflow-emscripten,arborh/tensorflow,alsrgv/tensorflow,HKUST-SING/tensorflow,ageron/tensorflow,JingJunYin/tensorflow,caisq/tensorflow,ville-k/tensorflow,ishay2b/tensorflow,renyi533/tensorflow,Carmezim/tensorflow,code-sauce/tensorflow,ageron/tensorflow,with-git/tensorflow,taknevski/tensorflow-xsmm,kobejean/tensorflow,benoitsteiner/tensorflow-xsmm,allenlavoie/tensorflow,horance-liu/tensorflow,MycChiu/tensorflow,xodus7/tensorflow,ghchinoy/tensorflow,handroissuazo/tensorflow,bowang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,alheinecke/tensorflow-xsmm,ninotoshi/tensorflow,mixturemodel-flow/tensorflow,benoitsteiner/tensorflow-opencl,laszlocsomor/tensorflow,brchiu/tensorflow,mortada/tensorflow,ppwwyyxx/tensorflow,sarvex/tensorflow,apark263/tensorflow,ibab/tensorflow,snnn/tensorflow,Bulochkin/tensorflow_pack,moonboots/tensorflow,whn09/tensorflow,Mazecreator/tensorflow,kevin-coder/tensorflow-fork,dyoung418/tensorflow,seanli9jan/tensorflow,petewarden/tensorflow,AnishShah/tensorflow,tensorflow/tensorflow-pywrap_saved_model,codrut3/tensorflow,wangyum/tensorflow,cg31/tensorflow,tillahoffmann/tensorflow,eaplatanios/tensorflow,ZhangXinNan/tensorflow,mortada/tensorflow,nanditav/15712-TensorFlow,jalexvig/tensorflow,jart/tensorflow,abhitopia/tensorflow,scenarios/tensorflow,adit-chandra/tensorflow,jalexvig/tensorflow,jeffzheng1/tensorflow,cancan101/tensorflow,vrv/tensorflow,taknevski/tensorflow-xsmm,frreiss/tensorflow-fred,Intel-tensorflow/tensorflow,HaebinShin/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,ArtsiomCh/tensorflow,petewarden/tensorflow,panmari/tensorflow,4Quant/tensorflow,ychfan/tensorflow,mdrumond/tensorflow,calebfoss/tensorflow,suiyuan2009/tensorflow,codrut3/tensorflow,ppries/tensorflow,code-sauce/tensorflow,zasdfgbnm/tensorflow,panmari/tensorflow,4Quant/tensorflow,guschmue/tensorflow,aam-at/tensorflow,alisidd/tensorflow,horance-liu/tensorflow,MostafaGazar/tensorflow,hehongliang/tensorflow,sarvex/tensorflow,girving/tensorflow,jart/tensorflow,cg31/tensorflow,tensorflow/tensorflow,benoitsteiner/tensorflow-opencl,martinwicke/tensorflow,xodus7/tensorflow,rabipanda/tensorflow,tornadozou/tensorflow,ibmsoe/tensorflow,karllessard/tensorflow,asadziach/tensorflow,mdrumond/tensorflow,hsaputra/tensorflow,petewarden/tensorflow,yufengg/tensorflow,adamtiger/tensorflow,ZhangXinNan/tensorflow,aldian/tensorflow,freedomtan/tensorflow,gautam1858/tensorflow,nburn42/tensorflow,karllessard/tensorflow,hehongliang/tensorflow,RapidApplicationDevelopment/tensorflow,markslwong/tensorflow,mdrumond/tensorflow,yaroslavvb/tensorflow,codrut3/tensorflow,benoitsteiner/tensorflow,thjashin/tensorflow,jart/tensorflow,guschmue/tensorflow,tntnatbry/tensorflow,caisq/tensorflow,gunan/tensorflow,kevin-coder/tensorflow-fork,vrv/tensorflow,bowang/tensorflow,ran5515/DeepDecision,drpngx/tensorflow,a-doumoulakis/tensorflow,HaebinShin/tensorflow,aselle/tensorflow,freedomtan/tensorflow,tiagofrepereira2012/tensorflow,a-doumoulakis/tensorflow,gojira/tensorflow,Bulochkin/tensorflow_pack,gnieboer/tensorflow,arborh/tensorflow,EvenStrangest/tensorflow,aam-at/tensorflow,meteorcloudy/tensorflow,strint/tensorflow,seanli9jan/tensorflow,nikste/tensorflow,benoitsteiner/tensorflow-opencl,apark263/tensorflow,caisq/tensorflow,paolodedios/tensorflow,Mazecreator/tensorflow,peterbraden/tensorflow,awni/tensorflow,theflofly/tensorflow,nburn42/tensorflow,ghchinoy/tensorflow,rdipietro/tensorflow,petewarden/tensorflow_makefile,sandeepgupta2k4/tensorflow,MoamerEncsConcordiaCa/tensorflow,DCSaunders/tensorflow,chemelnucfin/tensorflow,laszlocsomor/tensorflow,elingg/tensorflow,Intel-tensorflow/tensorflow,ivano666/tensorflow,DCSaunders/tensorflow,kamcpp/tensorflow,AndreasMadsen/tensorflow,Carmezim/tensorflow,thesuperzapper/tensorflow,chemelnucfin/tensorflow,sandeepdsouza93/TensorFlow-15712,snnn/tensorflow,thesuperzapper/tensorflow,memo/tensorflow,dendisuhubdy/tensorflow,lukeiwanski/tensorflow,memo/tensorflow,nightjean/Deep-Learning,4Quant/tensorflow,alisidd/tensorflow,cxxgtxy/tensorflow,mdrumond/tensorflow,manipopopo/tensorflow,renyi533/tensorflow,yanchen036/tensorflow,ppries/tensorflow,wangyum/tensorflow,tillahoffmann/tensorflow,wangyum/tensorflow,petewarden/tensorflow_makefile,chemelnucfin/tensorflow,chemelnucfin/tensorflow,ibmsoe/tensorflow,with-git/tensorflow,gunan/tensorflow,XueqingLin/tensorflow,ZhangXinNan/tensorflow,freedomtan/tensorflow,strint/tensorflow,AnishShah/tensorflow,tongwang01/tensorflow,martinbede/second-sight,mdrumond/tensorflow,xzturn/tensorflow,bowang/tensorflow,laszlocsomor/tensorflow,elingg/tensorflow,dongjoon-hyun/tensorflow,admcrae/tensorflow,benoitsteiner/tensorflow-opencl,dancingdan/tensorflow,sandeepdsouza93/TensorFlow-15712,laosiaudi/tensorflow,Bismarrck/tensorflow,LUTAN/tensorflow,mortada/tensorflow,cancan101/tensorflow,taknevski/tensorflow-xsmm,gibiansky/tensorflow,apark263/tensorflow,dhalleine/tensorflow,tornadozou/tensorflow,johndpope/tensorflow,zasdfgbnm/tensorflow,nightjean/Deep-Learning,sarvex/tensorflow,gautam1858/tensorflow,a-doumoulakis/tensorflow,sandeepgupta2k4/tensorflow,nanditav/15712-TensorFlow,rdipietro/tensorflow,av8ramit/tensorflow,EvenStrangest/tensorflow,gnieboer/tensorflow,tornadozou/tensorflow,eaplatanios/tensorflow,zasdfgbnm/tensorflow,4Quant/tensorflow,tiagofrepereira2012/tensorflow,ghchinoy/tensorflow,wchan/tensorflow,davidzchen/tensorflow,AnishShah/tensorflow,tillahoffmann/tensorflow,EvenStrangest/tensorflow,hsaputra/tensorflow,ZhangXinNan/tensorflow,hfp/tensorflow-xsmm,jeffzheng1/tensorflow,jbedorf/tensorflow,seaotterman/tensorflow,EvenStrangest/tensorflow,pcm17/tensorflow,Intel-Corporation/tensorflow,lukeiwanski/tensorflow,bowang/tensorflow,tillahoffmann/tensorflow,andrewcmyers/tensorflow,ppries/tensorflow,jendap/tensorflow,eaplatanios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,krikru/tensorflow-opencl,girving/tensorflow,sarvex/tensorflow,MoamerEncsConcordiaCa/tensorflow,ravindrapanda/tensorflow,Bulochkin/tensorflow_pack,with-git/tensorflow,code-sauce/tensorflow,drpngx/tensorflow,manipopopo/tensorflow,memo/tensorflow,jart/tensorflow,DavidNorman/tensorflow,jeffzheng1/tensorflow,Kongsea/tensorflow,jbedorf/tensorflow,neilhan/tensorflow,jostep/tensorflow,brchiu/tensorflow,dendisuhubdy/tensorflow,ishay2b/tensorflow,DCSaunders/tensorflow,alistairlow/tensorflow,gibiansky/tensorflow,manjunaths/tensorflow,rabipanda/tensorflow,caisq/tensorflow,raymondxyang/tensorflow,frreiss/tensorflow-fred,martinbede/second-sight,ravindrapanda/tensorflow,bowang/tensorflow,manjunaths/tensorflow,thesuperzapper/tensorflow,ppries/tensorflow,jhaux/tensorflow,calebfoss/tensorflow,abhitopia/tensorflow,tongwang01/tensorflow,ravindrapanda/tensorflow,renyi533/tensorflow,LUTAN/tensorflow,Moriadry/tensorflow,asimshankar/tensorflow,chemelnucfin/tensorflow,DavidNorman/tensorflow,benoitsteiner/tensorflow-xsmm,JingJunYin/tensorflow,frreiss/tensorflow-fred,aldian/tensorflow,mixturemodel-flow/tensorflow,ran5515/DeepDecision,freedomtan/tensorflow,admcrae/tensorflow,yanchen036/tensorflow,chris-chris/tensorflow,Xeralux/tensorflow,rabipanda/tensorflow,jhaux/tensorflow,tomasreimers/tensorflow-emscripten,nanditav/15712-TensorFlow,jendap/tensorflow,zasdfgbnm/tensorflow,MoamerEncsConcordiaCa/tensorflow,jhaux/tensorflow,ageron/tensorflow,apark263/tensorflow,laosiaudi/tensorflow,freedomtan/tensorflow,ppwwyyxx/tensorflow,4Quant/tensorflow,martinbede/second-sight,jbedorf/tensorflow,EvenStrangest/tensorflow,Mazecreator/tensorflow,dhalleine/tensorflow,markslwong/tensorflow,ravindrapanda/tensorflow,dongjoon-hyun/tensorflow,SnakeJenny/TensorFlow,juharris/tensorflow,adamtiger/tensorflow,TakayukiSakai/tensorflow,yaroslavvb/tensorflow,yongtang/tensorflow,markslwong/tensorflow,rdipietro/tensorflow,seaotterman/tensorflow,MostafaGazar/tensorflow,snnn/tensorflow,suiyuan2009/tensorflow,AnishShah/tensorflow,frreiss/tensorflow-fred,nikste/tensorflow,av8ramit/tensorflow,MostafaGazar/tensorflow,gibiansky/tensorflow,memo/tensorflow,guschmue/tensorflow,pierreg/tensorflow,lukeiwanski/tensorflow-opencl,yongtang/tensorflow,pcm17/tensorflow,tensorflow/tensorflow-pywrap_saved_model,lukeiwanski/tensorflow,alistairlow/tensorflow,sarvex/tensorflow,xzturn/tensorflow,ivano666/tensorflow,horance-liu/tensorflow,thesuperzapper/tensorflow,ran5515/DeepDecision,martinwicke/tensorflow,aam-at/tensorflow,freedomtan/tensorflow,hehongliang/tensorflow,krikru/tensorflow-opencl,bowang/tensorflow,jalexvig/tensorflow,sandeepdsouza93/TensorFlow-15712,peterbraden/tensorflow,JingJunYin/tensorflow,Bismarrck/tensorflow,ppwwyyxx/tensorflow,yanchen036/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,alsrgv/tensorflow,jalexvig/tensorflow,krikru/tensorflow-opencl,hfp/tensorflow-xsmm,petewarden/tensorflow,DavidNorman/tensorflow,odejesush/tensorflow,yanchen036/tensorflow,unsiloai/syntaxnet-ops-hack,ychfan/tensorflow,aam-at/tensorflow,ville-k/tensorflow,alistairlow/tensorflow,eerwitt/tensorflow,lukeiwanski/tensorflow-opencl,anilmuthineni/tensorflow,frreiss/tensorflow-fred,martinbede/second-sight,asimshankar/tensorflow,ArtsiomCh/tensorflow,DavidNorman/tensorflow,ghchinoy/tensorflow,raymondxyang/tensorflow,tomasreimers/tensorflow-emscripten,gautam1858/tensorflow,yongtang/tensorflow,alistairlow/tensorflow,AndreasMadsen/tensorflow,annarev/tensorflow,XueqingLin/tensorflow,jeffzheng1/tensorflow,anilmuthineni/tensorflow,mrry/tensorflow,Moriadry/tensorflow,asimshankar/tensorflow,calebfoss/tensorflow,tntnatbry/tensorflow,manjunaths/tensorflow,ville-k/tensorflow,ibmsoe/tensorflow,yaroslavvb/tensorflow,mrry/tensorflow,mengxn/tensorflow,raymondxyang/tensorflow,JVillella/tensorflow,AnishShah/tensorflow,maciekcc/tensorflow,thesuperzapper/tensorflow,aselle/tensorflow,juharris/tensorflow,theflofly/tensorflow,Bulochkin/tensorflow_pack,jalexvig/tensorflow,av8ramit/tensorflow,pcm17/tensorflow,gautam1858/tensorflow,eadgarchen/tensorflow,moonboots/tensorflow,Bulochkin/tensorflow_pack,alshedivat/tensorflow,asimshankar/tensorflow,abhitopia/tensorflow,eerwitt/tensorflow,kobejean/tensorflow,dancingdan/tensorflow,allenlavoie/tensorflow,drpngx/tensorflow,jhaux/tensorflow,renyi533/tensorflow,johndpope/tensorflow,ppwwyyxx/tensorflow,JingJunYin/tensorflow,MycChiu/tensorflow,ishay2b/tensorflow,brchiu/tensorflow,yaroslavvb/tensorflow,Mistobaan/tensorflow,abhitopia/tensorflow,Xeralux/tensorflow,kobejean/tensorflow,ran5515/DeepDecision,pierreg/tensorflow,sjperkins/tensorflow,gibiansky/tensorflow,EvenStrangest/tensorflow,taknevski/tensorflow-xsmm,LUTAN/tensorflow,alisidd/tensorflow,ppwwyyxx/tensorflow,davidzchen/tensorflow,snnn/tensorflow,dendisuhubdy/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,drpngx/tensorflow,hfp/tensorflow-xsmm,benoitsteiner/tensorflow-opencl,TakayukiSakai/tensorflow,adamtiger/tensorflow,asadziach/tensorflow,jendap/tensorflow,tiagofrepereira2012/tensorflow,ghchinoy/tensorflow,sjperkins/tensorflow,xodus7/tensorflow,lakshayg/tensorflow,jwlawson/tensorflow,eadgarchen/tensorflow,SnakeJenny/TensorFlow,benoitsteiner/tensorflow-opencl,jhseu/tensorflow,lakshayg/tensorflow,benoitsteiner/tensorflow-xsmm,Mistobaan/tensorflow,ZhangXinNan/tensorflow,lakshayg/tensorflow,calebfoss/tensorflow,nburn42/tensorflow,a-doumoulakis/tensorflow,gautam1858/tensorflow,unsiloai/syntaxnet-ops-hack,renyi533/tensorflow,Mistobaan/tensorflow,rabipanda/tensorflow,mortada/tensorflow,eadgarchen/tensorflow,maciekcc/tensorflow,eaplatanios/tensorflow,adit-chandra/tensorflow,cxxgtxy/tensorflow,mavenlin/tensorflow,SnakeJenny/TensorFlow,gnieboer/tensorflow,ArtsiomCh/tensorflow,gunan/tensorflow,aldian/tensorflow,nolanliou/tensorflow,naturali/tensorflow,nightjean/Deep-Learning,whn09/tensorflow,eerwitt/tensorflow,ibmsoe/tensorflow,andrewcmyers/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,manazhao/tf_recsys,peterbraden/tensorflow,Xeralux/tensorflow,DCSaunders/tensorflow,aldian/tensorflow,laosiaudi/tensorflow,Intel-tensorflow/tensorflow,tomasreimers/tensorflow-emscripten,Kongsea/tensorflow,suiyuan2009/tensorflow,zasdfgbnm/tensorflow,alsrgv/tensorflow,snnn/tensorflow,ppries/tensorflow,zasdfgbnm/tensorflow,Intel-tensorflow/tensorflow,RapidApplicationDevelopment/tensorflow,adit-chandra/tensorflow,XueqingLin/tensorflow,freedomtan/tensorflow,eerwitt/tensorflow,anand-c-goog/tensorflow,AndreasMadsen/tensorflow,Moriadry/tensorflow,JVillella/tensorflow,eaplatanios/tensorflow,naturali/tensorflow,girving/tensorflow,gautam1858/tensorflow,gunan/tensorflow,ibab/tensorflow,HKUST-SING/tensorflow,pavelchristof/gomoku-ai,tensorflow/tensorflow-pywrap_saved_model,alsrgv/tensorflow,gojira/tensorflow,vrv/tensorflow,strint/tensorflow,tensorflow/tensorflow,girving/tensorflow,JingJunYin/tensorflow,asimshankar/tensorflow,girving/tensorflow,cxxgtxy/tensorflow,kevin-coder/tensorflow-fork,renyi533/tensorflow,nanditav/15712-TensorFlow,whn09/tensorflow,DCSaunders/tensorflow,meteorcloudy/tensorflow,Kongsea/tensorflow,wangyum/tensorflow,code-sauce/tensorflow,JingJunYin/tensorflow,nburn42/tensorflow,raymondxyang/tensorflow,tornadozou/tensorflow,alistairlow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,rabipanda/tensorflow,Mistobaan/tensorflow,anilmuthineni/tensorflow,moonboots/tensorflow,rabipanda/tensorflow,llhe/tensorflow,4Quant/tensorflow,scenarios/tensorflow,Moriadry/tensorflow,guschmue/tensorflow,TakayukiSakai/tensorflow,jhseu/tensorflow,DavidNorman/tensorflow,DavidNorman/tensorflow,xzturn/tensorflow,kchodorow/tensorflow,juharris/tensorflow,gautam1858/tensorflow,xzturn/tensorflow,odejesush/tensorflow,benoitsteiner/tensorflow,mengxn/tensorflow,mengxn/tensorflow,asadziach/tensorflow,mdrumond/tensorflow,kevin-coder/tensorflow-fork,renyi533/tensorflow,aselle/tensorflow,jeffzheng1/tensorflow,alsrgv/tensorflow,sarvex/tensorflow,benoitsteiner/tensorflow,scenarios/tensorflow,yongtang/tensorflow,Xeralux/tensorflow,suiyuan2009/tensorflow,LUTAN/tensorflow,seanli9jan/tensorflow,ville-k/tensorflow,manipopopo/tensorflow,eadgarchen/tensorflow,pierreg/tensorflow,nolanliou/tensorflow,vrv/tensorflow,ville-k/tensorflow,tensorflow/tensorflow,brchiu/tensorflow,chemelnucfin/tensorflow,manjunaths/tensorflow,nanditav/15712-TensorFlow,codrut3/tensorflow,mengxn/tensorflow,mixturemodel-flow/tensorflow,benoitsteiner/tensorflow-xsmm,sandeepgupta2k4/tensorflow,juharris/tensorflow,memo/tensorflow,lukeiwanski/tensorflow,mortada/tensorflow,manipopopo/tensorflow,dyoung418/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,code-sauce/tensorflow,asadziach/tensorflow,Bulochkin/tensorflow_pack,unsiloai/syntaxnet-ops-hack,lukeiwanski/tensorflow-opencl,LUTAN/tensorflow,sandeepgupta2k4/tensorflow,handroissuazo/tensorflow,eerwitt/tensorflow,manipopopo/tensorflow,laszlocsomor/tensorflow,asimshankar/tensorflow,haeusser/tensorflow,yongtang/tensorflow,kamcpp/tensorflow,RapidApplicationDevelopment/tensorflow,XueqingLin/tensorflow,ghchinoy/tensorflow,xzturn/tensorflow,unsiloai/syntaxnet-ops-hack,HaebinShin/tensorflow,HKUST-SING/tensorflow,eadgarchen/tensorflow,yongtang/tensorflow,kamcpp/tensorflow,bowang/tensorflow,JVillella/tensorflow,theflofly/tensorflow,lukeiwanski/tensorflow-opencl,zasdfgbnm/tensorflow,benoitsteiner/tensorflow-opencl,anand-c-goog/tensorflow,mavenlin/tensorflow,llhe/tensorflow,MoamerEncsConcordiaCa/tensorflow,manipopopo/tensorflow,SnakeJenny/TensorFlow,dyoung418/tensorflow,AndreasMadsen/tensorflow,martinbede/second-sight,allenlavoie/tensorflow,alivecor/tensorflow,jart/tensorflow,jeffzheng1/tensorflow,odejesush/tensorflow,sjperkins/tensorflow,girving/tensorflow,hfp/tensorflow-xsmm,alistairlow/tensorflow,cxxgtxy/tensorflow,seanli9jan/tensorflow,Xeralux/tensorflow,HaebinShin/tensorflow,lukeiwanski/tensorflow,naturali/tensorflow,cg31/tensorflow,dhalleine/tensorflow,haeusser/tensorflow,Intel-tensorflow/tensorflow,alheinecke/tensorflow-xsmm,chenjun0210/tensorflow,av8ramit/tensorflow,ppries/tensorflow,chenjun0210/tensorflow,xzturn/tensorflow,alheinecke/tensorflow-xsmm,renyi533/tensorflow,dendisuhubdy/tensorflow,TakayukiSakai/tensorflow,apark263/tensorflow,codrut3/tensorflow,gnieboer/tensorflow,zasdfgbnm/tensorflow,moonboots/tensorflow,eadgarchen/tensorflow,xodus7/tensorflow,ghchinoy/tensorflow,seanli9jan/tensorflow,MostafaGazar/tensorflow,MycChiu/tensorflow,nburn42/tensorflow,cxxgtxy/tensorflow,kevin-coder/tensorflow-fork,martinwicke/tensorflow,meteorcloudy/tensorflow,odejesush/tensorflow,alheinecke/tensorflow-xsmm,with-git/tensorflow,seanli9jan/tensorflow,eaplatanios/tensorflow,rabipanda/tensorflow,johndpope/tensorflow,JVillella/tensorflow,ghchinoy/tensorflow,davidzchen/tensorflow,mengxn/tensorflow,ninotoshi/tensorflow,kobejean/tensorflow,andrewcmyers/tensorflow,jhseu/tensorflow,allenlavoie/tensorflow,elingg/tensorflow,johndpope/tensorflow,panmari/tensorflow,meteorcloudy/tensorflow,raymondxyang/tensorflow,paolodedios/tensorflow,pierreg/tensorflow,codrut3/tensorflow,seanli9jan/tensorflow,drpngx/tensorflow,jostep/tensorflow,alshedivat/tensorflow,jendap/tensorflow,pierreg/tensorflow,haeusser/tensorflow,adamtiger/tensorflow,kobejean/tensorflow,yufengg/tensorflow,hsaputra/tensorflow,ppwwyyxx/tensorflow,Intel-Corporation/tensorflow,code-sauce/tensorflow,strint/tensorflow,wchan/tensorflow,arborh/tensorflow,gojira/tensorflow,Mistobaan/tensorflow,alistairlow/tensorflow,Carmezim/tensorflow,yaroslavvb/tensorflow,llhe/tensorflow,Bismarrck/tensorflow,laosiaudi/tensorflow,ville-k/tensorflow,horance-liu/tensorflow,MostafaGazar/tensorflow,petewarden/tensorflow,asadziach/tensorflow,pierreg/tensorflow,dongjoon-hyun/tensorflow,dongjoon-hyun/tensorflow,gunan/tensorflow,cg31/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,aselle/tensorflow,alivecor/tensorflow,manjunaths/tensorflow,xodus7/tensorflow,annarev/tensorflow,tongwang01/tensorflow,apark263/tensorflow,mortada/tensorflow,pavelchristof/gomoku-ai,moonboots/tensorflow,anilmuthineni/tensorflow,manipopopo/tensorflow,tensorflow/tensorflow-pywrap_saved_model,HKUST-SING/tensorflow,admcrae/tensorflow,jart/tensorflow,kchodorow/tensorflow,mavenlin/tensorflow,hsaputra/tensorflow,panmari/tensorflow,aselle/tensorflow,karllessard/tensorflow,tomasreimers/tensorflow-emscripten,pierreg/tensorflow,annarev/tensorflow,panmari/tensorflow,sandeepdsouza93/TensorFlow-15712,llhe/tensorflow,JVillella/tensorflow,jostep/tensorflow,dancingdan/tensorflow,davidzchen/tensorflow,nikste/tensorflow,manazhao/tf_recsys,anilmuthineni/tensorflow,yongtang/tensorflow
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
Fix futures test
Change: 115766190
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""ops module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
|
<commit_before># Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
<commit_msg>Fix futures test
Change: 115766190<commit_after>
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""ops module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
Fix futures test
Change: 115766190# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""ops module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
|
<commit_before># Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
<commit_msg>Fix futures test
Change: 115766190<commit_after># Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""ops module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
|
9846559d9164216924e5f8bb1544148b3e6965b6
|
tensorflow_time_two/python/ops/time_two_ops_test.py
|
tensorflow_time_two/python/ops/time_two_ops_test.py
|
# Copyright 2018 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for time_two ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.platform import test
from time_two_ops import time_two
class TimeTwoTest(test.TestCase):
def testTimeTwo(self):
with self.test_session():
self.assertAllClose(
time_two([[1, 2], [3, 4]]).eval(), np.array([[2, 4], [6, 8]]))
if __name__ == '__main__':
test.main()
|
# Copyright 2018 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for time_two ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.platform import test
try:
from tensorflow_time_two.python.ops import time_two_ops
except ImportError:
import time_two_ops
class TimeTwoTest(test.TestCase):
def testTimeTwo(self):
with self.test_session():
self.assertAllClose(
time_two_ops.time_two([[1, 2], [3, 4]]).eval(), np.array([[2, 4], [6, 8]]))
if __name__ == '__main__':
test.main()
|
Make test works with make and bazel
|
Make test works with make and bazel
|
Python
|
apache-2.0
|
tensorflow/custom-op,tensorflow/custom-op,tensorflow/custom-op
|
# Copyright 2018 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for time_two ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.platform import test
from time_two_ops import time_two
class TimeTwoTest(test.TestCase):
def testTimeTwo(self):
with self.test_session():
self.assertAllClose(
time_two([[1, 2], [3, 4]]).eval(), np.array([[2, 4], [6, 8]]))
if __name__ == '__main__':
test.main()
Make test works with make and bazel
|
# Copyright 2018 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for time_two ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.platform import test
try:
from tensorflow_time_two.python.ops import time_two_ops
except ImportError:
import time_two_ops
class TimeTwoTest(test.TestCase):
def testTimeTwo(self):
with self.test_session():
self.assertAllClose(
time_two_ops.time_two([[1, 2], [3, 4]]).eval(), np.array([[2, 4], [6, 8]]))
if __name__ == '__main__':
test.main()
|
<commit_before># Copyright 2018 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for time_two ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.platform import test
from time_two_ops import time_two
class TimeTwoTest(test.TestCase):
def testTimeTwo(self):
with self.test_session():
self.assertAllClose(
time_two([[1, 2], [3, 4]]).eval(), np.array([[2, 4], [6, 8]]))
if __name__ == '__main__':
test.main()
<commit_msg>Make test works with make and bazel<commit_after>
|
# Copyright 2018 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for time_two ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.platform import test
try:
from tensorflow_time_two.python.ops import time_two_ops
except ImportError:
import time_two_ops
class TimeTwoTest(test.TestCase):
def testTimeTwo(self):
with self.test_session():
self.assertAllClose(
time_two_ops.time_two([[1, 2], [3, 4]]).eval(), np.array([[2, 4], [6, 8]]))
if __name__ == '__main__':
test.main()
|
# Copyright 2018 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for time_two ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.platform import test
from time_two_ops import time_two
class TimeTwoTest(test.TestCase):
def testTimeTwo(self):
with self.test_session():
self.assertAllClose(
time_two([[1, 2], [3, 4]]).eval(), np.array([[2, 4], [6, 8]]))
if __name__ == '__main__':
test.main()
Make test works with make and bazel# Copyright 2018 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for time_two ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.platform import test
try:
from tensorflow_time_two.python.ops import time_two_ops
except ImportError:
import time_two_ops
class TimeTwoTest(test.TestCase):
def testTimeTwo(self):
with self.test_session():
self.assertAllClose(
time_two_ops.time_two([[1, 2], [3, 4]]).eval(), np.array([[2, 4], [6, 8]]))
if __name__ == '__main__':
test.main()
|
<commit_before># Copyright 2018 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for time_two ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.platform import test
from time_two_ops import time_two
class TimeTwoTest(test.TestCase):
def testTimeTwo(self):
with self.test_session():
self.assertAllClose(
time_two([[1, 2], [3, 4]]).eval(), np.array([[2, 4], [6, 8]]))
if __name__ == '__main__':
test.main()
<commit_msg>Make test works with make and bazel<commit_after># Copyright 2018 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for time_two ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.platform import test
try:
from tensorflow_time_two.python.ops import time_two_ops
except ImportError:
import time_two_ops
class TimeTwoTest(test.TestCase):
def testTimeTwo(self):
with self.test_session():
self.assertAllClose(
time_two_ops.time_two([[1, 2], [3, 4]]).eval(), np.array([[2, 4], [6, 8]]))
if __name__ == '__main__':
test.main()
|
00110b05bba01f087fed37c5bb3ad5fede04d492
|
app/core/user.py
|
app/core/user.py
|
import hashlib
import datetime
from flask_login import UserMixin, AnonymousUserMixin
import mediacloud as mcapi
from app.core import db, mc
# User class
class User(UserMixin):
def __init__(self, name, userid, active=True):
self.name = name
self.id = userid
self.active = active
self.created = datetime.datetime.now()
def is_active(self):
return self.active
def is_anonymous(self):
return False
def is_authenticated(self):
return True
@classmethod
def get(cls, userid):
try:
return User.cached[userid]
except KeyError:
return None
User.cached = {}
def authenticate_user_key(username, key):
user_mc = mcapi.MediaCloud(key)
if user_mc.verifyUserAuthToken():
user = User(username, key)
User.cached[user.id] = user
return user
return AnonymousUserMixin()
def authenticate_user(username, password):
try:
key = mc.userAuthToken(username, password)
user = User(username, key)
User.cached[user.id] = user
return user
except Exception:
return AnonymousUserMixin()
|
import hashlib
import datetime
from flask_login import UserMixin, AnonymousUserMixin
import mediacloud as mcapi
from app.core import db, mc
# User class
class User(UserMixin):
def __init__(self, name, userid, active=True):
self.name = name
self.id = userid
self.active = active
self.created = datetime.datetime.now()
def is_active(self):
return self.active
def is_anonymous(self):
return False
def is_authenticated(self):
return True
@classmethod
def get(cls, userid):
try:
return User.cached[userid]
except KeyError:
return None
User.cached = {}
def authenticate_user_key(username, key):
user_mc = mcapi.MediaCloud(key)
if user_mc.verifyAuthToken():
user = User(username, key)
User.cached[user.id] = user
return user
return AnonymousUserMixin()
def authenticate_user(username, password):
try:
key = mc.userAuthToken(username, password)
user = User(username, key)
User.cached[user.id] = user
return user
except Exception:
return AnonymousUserMixin()
|
Fix authentication from cookied token.
|
Fix authentication from cookied token.
|
Python
|
apache-2.0
|
c4fcm/MediaMeter-Dashboard,c4fcm/MediaMeter-Skeleton,c4fcm/MediaMeter-Dashboard,c4fcm/MediaMeter-Dashboard,c4fcm/MediaMeter-Skeleton,c4fcm/MediaMeter-Skeleton
|
import hashlib
import datetime
from flask_login import UserMixin, AnonymousUserMixin
import mediacloud as mcapi
from app.core import db, mc
# User class
class User(UserMixin):
def __init__(self, name, userid, active=True):
self.name = name
self.id = userid
self.active = active
self.created = datetime.datetime.now()
def is_active(self):
return self.active
def is_anonymous(self):
return False
def is_authenticated(self):
return True
@classmethod
def get(cls, userid):
try:
return User.cached[userid]
except KeyError:
return None
User.cached = {}
def authenticate_user_key(username, key):
user_mc = mcapi.MediaCloud(key)
if user_mc.verifyUserAuthToken():
user = User(username, key)
User.cached[user.id] = user
return user
return AnonymousUserMixin()
def authenticate_user(username, password):
try:
key = mc.userAuthToken(username, password)
user = User(username, key)
User.cached[user.id] = user
return user
except Exception:
return AnonymousUserMixin()
Fix authentication from cookied token.
|
import hashlib
import datetime
from flask_login import UserMixin, AnonymousUserMixin
import mediacloud as mcapi
from app.core import db, mc
# User class
class User(UserMixin):
def __init__(self, name, userid, active=True):
self.name = name
self.id = userid
self.active = active
self.created = datetime.datetime.now()
def is_active(self):
return self.active
def is_anonymous(self):
return False
def is_authenticated(self):
return True
@classmethod
def get(cls, userid):
try:
return User.cached[userid]
except KeyError:
return None
User.cached = {}
def authenticate_user_key(username, key):
user_mc = mcapi.MediaCloud(key)
if user_mc.verifyAuthToken():
user = User(username, key)
User.cached[user.id] = user
return user
return AnonymousUserMixin()
def authenticate_user(username, password):
try:
key = mc.userAuthToken(username, password)
user = User(username, key)
User.cached[user.id] = user
return user
except Exception:
return AnonymousUserMixin()
|
<commit_before>import hashlib
import datetime
from flask_login import UserMixin, AnonymousUserMixin
import mediacloud as mcapi
from app.core import db, mc
# User class
class User(UserMixin):
def __init__(self, name, userid, active=True):
self.name = name
self.id = userid
self.active = active
self.created = datetime.datetime.now()
def is_active(self):
return self.active
def is_anonymous(self):
return False
def is_authenticated(self):
return True
@classmethod
def get(cls, userid):
try:
return User.cached[userid]
except KeyError:
return None
User.cached = {}
def authenticate_user_key(username, key):
user_mc = mcapi.MediaCloud(key)
if user_mc.verifyUserAuthToken():
user = User(username, key)
User.cached[user.id] = user
return user
return AnonymousUserMixin()
def authenticate_user(username, password):
try:
key = mc.userAuthToken(username, password)
user = User(username, key)
User.cached[user.id] = user
return user
except Exception:
return AnonymousUserMixin()
<commit_msg>Fix authentication from cookied token.<commit_after>
|
import hashlib
import datetime
from flask_login import UserMixin, AnonymousUserMixin
import mediacloud as mcapi
from app.core import db, mc
# User class
class User(UserMixin):
def __init__(self, name, userid, active=True):
self.name = name
self.id = userid
self.active = active
self.created = datetime.datetime.now()
def is_active(self):
return self.active
def is_anonymous(self):
return False
def is_authenticated(self):
return True
@classmethod
def get(cls, userid):
try:
return User.cached[userid]
except KeyError:
return None
User.cached = {}
def authenticate_user_key(username, key):
user_mc = mcapi.MediaCloud(key)
if user_mc.verifyAuthToken():
user = User(username, key)
User.cached[user.id] = user
return user
return AnonymousUserMixin()
def authenticate_user(username, password):
try:
key = mc.userAuthToken(username, password)
user = User(username, key)
User.cached[user.id] = user
return user
except Exception:
return AnonymousUserMixin()
|
import hashlib
import datetime
from flask_login import UserMixin, AnonymousUserMixin
import mediacloud as mcapi
from app.core import db, mc
# User class
class User(UserMixin):
def __init__(self, name, userid, active=True):
self.name = name
self.id = userid
self.active = active
self.created = datetime.datetime.now()
def is_active(self):
return self.active
def is_anonymous(self):
return False
def is_authenticated(self):
return True
@classmethod
def get(cls, userid):
try:
return User.cached[userid]
except KeyError:
return None
User.cached = {}
def authenticate_user_key(username, key):
user_mc = mcapi.MediaCloud(key)
if user_mc.verifyUserAuthToken():
user = User(username, key)
User.cached[user.id] = user
return user
return AnonymousUserMixin()
def authenticate_user(username, password):
try:
key = mc.userAuthToken(username, password)
user = User(username, key)
User.cached[user.id] = user
return user
except Exception:
return AnonymousUserMixin()
Fix authentication from cookied token.import hashlib
import datetime
from flask_login import UserMixin, AnonymousUserMixin
import mediacloud as mcapi
from app.core import db, mc
# User class
class User(UserMixin):
def __init__(self, name, userid, active=True):
self.name = name
self.id = userid
self.active = active
self.created = datetime.datetime.now()
def is_active(self):
return self.active
def is_anonymous(self):
return False
def is_authenticated(self):
return True
@classmethod
def get(cls, userid):
try:
return User.cached[userid]
except KeyError:
return None
User.cached = {}
def authenticate_user_key(username, key):
user_mc = mcapi.MediaCloud(key)
if user_mc.verifyAuthToken():
user = User(username, key)
User.cached[user.id] = user
return user
return AnonymousUserMixin()
def authenticate_user(username, password):
try:
key = mc.userAuthToken(username, password)
user = User(username, key)
User.cached[user.id] = user
return user
except Exception:
return AnonymousUserMixin()
|
<commit_before>import hashlib
import datetime
from flask_login import UserMixin, AnonymousUserMixin
import mediacloud as mcapi
from app.core import db, mc
# User class
class User(UserMixin):
def __init__(self, name, userid, active=True):
self.name = name
self.id = userid
self.active = active
self.created = datetime.datetime.now()
def is_active(self):
return self.active
def is_anonymous(self):
return False
def is_authenticated(self):
return True
@classmethod
def get(cls, userid):
try:
return User.cached[userid]
except KeyError:
return None
User.cached = {}
def authenticate_user_key(username, key):
user_mc = mcapi.MediaCloud(key)
if user_mc.verifyUserAuthToken():
user = User(username, key)
User.cached[user.id] = user
return user
return AnonymousUserMixin()
def authenticate_user(username, password):
try:
key = mc.userAuthToken(username, password)
user = User(username, key)
User.cached[user.id] = user
return user
except Exception:
return AnonymousUserMixin()
<commit_msg>Fix authentication from cookied token.<commit_after>import hashlib
import datetime
from flask_login import UserMixin, AnonymousUserMixin
import mediacloud as mcapi
from app.core import db, mc
# User class
class User(UserMixin):
def __init__(self, name, userid, active=True):
self.name = name
self.id = userid
self.active = active
self.created = datetime.datetime.now()
def is_active(self):
return self.active
def is_anonymous(self):
return False
def is_authenticated(self):
return True
@classmethod
def get(cls, userid):
try:
return User.cached[userid]
except KeyError:
return None
User.cached = {}
def authenticate_user_key(username, key):
user_mc = mcapi.MediaCloud(key)
if user_mc.verifyAuthToken():
user = User(username, key)
User.cached[user.id] = user
return user
return AnonymousUserMixin()
def authenticate_user(username, password):
try:
key = mc.userAuthToken(username, password)
user = User(username, key)
User.cached[user.id] = user
return user
except Exception:
return AnonymousUserMixin()
|
49d638fc4252f5fb5de079bce77cbf75362d13ad
|
guv/green/builtin.py
|
guv/green/builtin.py
|
"""
In order to detect a filehandle that's been closed, our only clue may be the operating system
returning the same filehandle in response to some other operation.
The builtins 'file' and 'open' are patched to collaborate with the notify_opened protocol.
"""
builtins_orig = __builtins__
from .. import hubs
from ..patcher import copy_attributes
__all__ = dir(builtins_orig)
__patched__ = ['open']
copy_attributes(builtins_orig, globals(), ignore=__patched__, srckeys=dir(builtins_orig))
hubs.get_hub()
__original_open = open
__opening = False
def open(*args):
global __opening
result = __original_open(*args)
if not __opening:
# This is incredibly ugly. 'open' is used under the hood by the import process. So, ensure
# we don't wind up in an infinite loop.
__opening = True
hubs.notify_opened(result.fileno())
__opening = False
return result
|
"""
In order to detect a filehandle that's been closed, our only clue may be the operating system
returning the same filehandle in response to some other operation.
The builtins 'file' and 'open' are patched to collaborate with the notify_opened protocol.
"""
builtins_orig = __builtins__
from .. import hubs
from ..patcher import copy_attributes
__all__ = dir(builtins_orig)
__patched__ = ['open']
copy_attributes(builtins_orig, globals(), ignore=__patched__, srckeys=dir(builtins_orig))
# TODO: should this even be here?
hubs.get_hub()
__original_open = open
__opening = False
def open(*args):
global __opening
result = __original_open(*args)
if not __opening:
# This is incredibly ugly. 'open' is used under the hood by the import process. So, ensure
# we don't wind up in an infinite loop.
__opening = True
hubs.notify_opened(result.fileno())
__opening = False
return result
|
Add TODO to check later if this should be here
|
Add TODO to check later if this should be here
It doesn't look like it serves any purpose.
|
Python
|
mit
|
veegee/guv,veegee/guv
|
"""
In order to detect a filehandle that's been closed, our only clue may be the operating system
returning the same filehandle in response to some other operation.
The builtins 'file' and 'open' are patched to collaborate with the notify_opened protocol.
"""
builtins_orig = __builtins__
from .. import hubs
from ..patcher import copy_attributes
__all__ = dir(builtins_orig)
__patched__ = ['open']
copy_attributes(builtins_orig, globals(), ignore=__patched__, srckeys=dir(builtins_orig))
hubs.get_hub()
__original_open = open
__opening = False
def open(*args):
global __opening
result = __original_open(*args)
if not __opening:
# This is incredibly ugly. 'open' is used under the hood by the import process. So, ensure
# we don't wind up in an infinite loop.
__opening = True
hubs.notify_opened(result.fileno())
__opening = False
return result
Add TODO to check later if this should be here
It doesn't look like it serves any purpose.
|
"""
In order to detect a filehandle that's been closed, our only clue may be the operating system
returning the same filehandle in response to some other operation.
The builtins 'file' and 'open' are patched to collaborate with the notify_opened protocol.
"""
builtins_orig = __builtins__
from .. import hubs
from ..patcher import copy_attributes
__all__ = dir(builtins_orig)
__patched__ = ['open']
copy_attributes(builtins_orig, globals(), ignore=__patched__, srckeys=dir(builtins_orig))
# TODO: should this even be here?
hubs.get_hub()
__original_open = open
__opening = False
def open(*args):
global __opening
result = __original_open(*args)
if not __opening:
# This is incredibly ugly. 'open' is used under the hood by the import process. So, ensure
# we don't wind up in an infinite loop.
__opening = True
hubs.notify_opened(result.fileno())
__opening = False
return result
|
<commit_before>"""
In order to detect a filehandle that's been closed, our only clue may be the operating system
returning the same filehandle in response to some other operation.
The builtins 'file' and 'open' are patched to collaborate with the notify_opened protocol.
"""
builtins_orig = __builtins__
from .. import hubs
from ..patcher import copy_attributes
__all__ = dir(builtins_orig)
__patched__ = ['open']
copy_attributes(builtins_orig, globals(), ignore=__patched__, srckeys=dir(builtins_orig))
hubs.get_hub()
__original_open = open
__opening = False
def open(*args):
global __opening
result = __original_open(*args)
if not __opening:
# This is incredibly ugly. 'open' is used under the hood by the import process. So, ensure
# we don't wind up in an infinite loop.
__opening = True
hubs.notify_opened(result.fileno())
__opening = False
return result
<commit_msg>Add TODO to check later if this should be here
It doesn't look like it serves any purpose.<commit_after>
|
"""
In order to detect a filehandle that's been closed, our only clue may be the operating system
returning the same filehandle in response to some other operation.
The builtins 'file' and 'open' are patched to collaborate with the notify_opened protocol.
"""
builtins_orig = __builtins__
from .. import hubs
from ..patcher import copy_attributes
__all__ = dir(builtins_orig)
__patched__ = ['open']
copy_attributes(builtins_orig, globals(), ignore=__patched__, srckeys=dir(builtins_orig))
# TODO: should this even be here?
hubs.get_hub()
__original_open = open
__opening = False
def open(*args):
global __opening
result = __original_open(*args)
if not __opening:
# This is incredibly ugly. 'open' is used under the hood by the import process. So, ensure
# we don't wind up in an infinite loop.
__opening = True
hubs.notify_opened(result.fileno())
__opening = False
return result
|
"""
In order to detect a filehandle that's been closed, our only clue may be the operating system
returning the same filehandle in response to some other operation.
The builtins 'file' and 'open' are patched to collaborate with the notify_opened protocol.
"""
builtins_orig = __builtins__
from .. import hubs
from ..patcher import copy_attributes
__all__ = dir(builtins_orig)
__patched__ = ['open']
copy_attributes(builtins_orig, globals(), ignore=__patched__, srckeys=dir(builtins_orig))
hubs.get_hub()
__original_open = open
__opening = False
def open(*args):
global __opening
result = __original_open(*args)
if not __opening:
# This is incredibly ugly. 'open' is used under the hood by the import process. So, ensure
# we don't wind up in an infinite loop.
__opening = True
hubs.notify_opened(result.fileno())
__opening = False
return result
Add TODO to check later if this should be here
It doesn't look like it serves any purpose."""
In order to detect a filehandle that's been closed, our only clue may be the operating system
returning the same filehandle in response to some other operation.
The builtins 'file' and 'open' are patched to collaborate with the notify_opened protocol.
"""
builtins_orig = __builtins__
from .. import hubs
from ..patcher import copy_attributes
__all__ = dir(builtins_orig)
__patched__ = ['open']
copy_attributes(builtins_orig, globals(), ignore=__patched__, srckeys=dir(builtins_orig))
# TODO: should this even be here?
hubs.get_hub()
__original_open = open
__opening = False
def open(*args):
global __opening
result = __original_open(*args)
if not __opening:
# This is incredibly ugly. 'open' is used under the hood by the import process. So, ensure
# we don't wind up in an infinite loop.
__opening = True
hubs.notify_opened(result.fileno())
__opening = False
return result
|
<commit_before>"""
In order to detect a filehandle that's been closed, our only clue may be the operating system
returning the same filehandle in response to some other operation.
The builtins 'file' and 'open' are patched to collaborate with the notify_opened protocol.
"""
builtins_orig = __builtins__
from .. import hubs
from ..patcher import copy_attributes
__all__ = dir(builtins_orig)
__patched__ = ['open']
copy_attributes(builtins_orig, globals(), ignore=__patched__, srckeys=dir(builtins_orig))
hubs.get_hub()
__original_open = open
__opening = False
def open(*args):
global __opening
result = __original_open(*args)
if not __opening:
# This is incredibly ugly. 'open' is used under the hood by the import process. So, ensure
# we don't wind up in an infinite loop.
__opening = True
hubs.notify_opened(result.fileno())
__opening = False
return result
<commit_msg>Add TODO to check later if this should be here
It doesn't look like it serves any purpose.<commit_after>"""
In order to detect a filehandle that's been closed, our only clue may be the operating system
returning the same filehandle in response to some other operation.
The builtins 'file' and 'open' are patched to collaborate with the notify_opened protocol.
"""
builtins_orig = __builtins__
from .. import hubs
from ..patcher import copy_attributes
__all__ = dir(builtins_orig)
__patched__ = ['open']
copy_attributes(builtins_orig, globals(), ignore=__patched__, srckeys=dir(builtins_orig))
# TODO: should this even be here?
hubs.get_hub()
__original_open = open
__opening = False
def open(*args):
global __opening
result = __original_open(*args)
if not __opening:
# This is incredibly ugly. 'open' is used under the hood by the import process. So, ensure
# we don't wind up in an infinite loop.
__opening = True
hubs.notify_opened(result.fileno())
__opening = False
return result
|
ac46a0fa28901bfd508c75ec71e9e13a02ac40aa
|
ard-mediathek.py
|
ard-mediathek.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import argparse
import contextlib
import sys
from ard_media_downloader import ArdMediathekDownloader
VERSION = "1.1"
def main(argv):
parser = argparse.ArgumentParser(description='Commandline python script tool to download videos from the online ARD mediathek. Version: %s' % VERSION)
parser.add_argument('url', type=str, help='URL pointing to the mediathek video')
parser.add_argument('--filename', '-f', type=str, default=None, help='target filename')
parser.add_argument('--quality', '-q', type=int, help='set the desired video quality', default=3, choices=[1,2,3])
parser.add_argument('--subtitles', '-ut', action = "store_true", help='download subtitle in srt format')
parser.add_argument('--derivefilename', '-dft', action="store_true", default=False, help='Get the video title from the video')
args = parser.parse_args()
amd = ArdMediathekDownloader(args.url)
amd.filename = args.filename
amd.quality = args.quality
amd.derive_filename = args.derivefilename
with contextlib.suppress(KeyboardInterrupt):
amd.download()
if args.subtitles:
amd.get_subtitles()
if __name__ == "__main__":
main(sys.argv)
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import argparse
import contextlib
import sys
from ard_media_downloader import ArdMediathekDownloader
VERSION = "1.1"
def main(argv):
parser = argparse.ArgumentParser(description='Commandline python script tool to download videos from the online ARD mediathek. Version: %s' % VERSION)
parser.add_argument('url', type=str, help='URL pointing to the mediathek video')
parser.add_argument('--filename', '-f', type=str, default=None, help='target filename')
parser.add_argument('--quality', '-q', type=int, help='set the desired video quality', default=3, choices=[1,2,3])
parser.add_argument('--subtitles', '-ut', action = "store_true", help='download subtitle in srt format')
parser.add_argument('--derivefilename', '-dft', action="store_true", default=False, help='Get the video title from the video')
args = parser.parse_args()
if "documentId" not in args.url:
print("ERROR: The URL does not contain a documentId. Start searching your video from "
"http://mediathek.daserste.de/ to get an URL with documentId.")
return
amd = ArdMediathekDownloader(args.url)
amd.filename = args.filename
amd.quality = args.quality
amd.derive_filename = args.derivefilename
with contextlib.suppress(KeyboardInterrupt):
amd.download()
if args.subtitles:
amd.get_subtitles()
if __name__ == "__main__":
main(sys.argv)
|
Add better error message if URL doe not contain documentId
|
Add better error message if URL doe not contain documentId
Improves error messages like the one reported with #24
|
Python
|
mit
|
Bouni/ard-mediathek
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import argparse
import contextlib
import sys
from ard_media_downloader import ArdMediathekDownloader
VERSION = "1.1"
def main(argv):
parser = argparse.ArgumentParser(description='Commandline python script tool to download videos from the online ARD mediathek. Version: %s' % VERSION)
parser.add_argument('url', type=str, help='URL pointing to the mediathek video')
parser.add_argument('--filename', '-f', type=str, default=None, help='target filename')
parser.add_argument('--quality', '-q', type=int, help='set the desired video quality', default=3, choices=[1,2,3])
parser.add_argument('--subtitles', '-ut', action = "store_true", help='download subtitle in srt format')
parser.add_argument('--derivefilename', '-dft', action="store_true", default=False, help='Get the video title from the video')
args = parser.parse_args()
amd = ArdMediathekDownloader(args.url)
amd.filename = args.filename
amd.quality = args.quality
amd.derive_filename = args.derivefilename
with contextlib.suppress(KeyboardInterrupt):
amd.download()
if args.subtitles:
amd.get_subtitles()
if __name__ == "__main__":
main(sys.argv)
Add better error message if URL doe not contain documentId
Improves error messages like the one reported with #24
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import argparse
import contextlib
import sys
from ard_media_downloader import ArdMediathekDownloader
VERSION = "1.1"
def main(argv):
parser = argparse.ArgumentParser(description='Commandline python script tool to download videos from the online ARD mediathek. Version: %s' % VERSION)
parser.add_argument('url', type=str, help='URL pointing to the mediathek video')
parser.add_argument('--filename', '-f', type=str, default=None, help='target filename')
parser.add_argument('--quality', '-q', type=int, help='set the desired video quality', default=3, choices=[1,2,3])
parser.add_argument('--subtitles', '-ut', action = "store_true", help='download subtitle in srt format')
parser.add_argument('--derivefilename', '-dft', action="store_true", default=False, help='Get the video title from the video')
args = parser.parse_args()
if "documentId" not in args.url:
print("ERROR: The URL does not contain a documentId. Start searching your video from "
"http://mediathek.daserste.de/ to get an URL with documentId.")
return
amd = ArdMediathekDownloader(args.url)
amd.filename = args.filename
amd.quality = args.quality
amd.derive_filename = args.derivefilename
with contextlib.suppress(KeyboardInterrupt):
amd.download()
if args.subtitles:
amd.get_subtitles()
if __name__ == "__main__":
main(sys.argv)
|
<commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import argparse
import contextlib
import sys
from ard_media_downloader import ArdMediathekDownloader
VERSION = "1.1"
def main(argv):
parser = argparse.ArgumentParser(description='Commandline python script tool to download videos from the online ARD mediathek. Version: %s' % VERSION)
parser.add_argument('url', type=str, help='URL pointing to the mediathek video')
parser.add_argument('--filename', '-f', type=str, default=None, help='target filename')
parser.add_argument('--quality', '-q', type=int, help='set the desired video quality', default=3, choices=[1,2,3])
parser.add_argument('--subtitles', '-ut', action = "store_true", help='download subtitle in srt format')
parser.add_argument('--derivefilename', '-dft', action="store_true", default=False, help='Get the video title from the video')
args = parser.parse_args()
amd = ArdMediathekDownloader(args.url)
amd.filename = args.filename
amd.quality = args.quality
amd.derive_filename = args.derivefilename
with contextlib.suppress(KeyboardInterrupt):
amd.download()
if args.subtitles:
amd.get_subtitles()
if __name__ == "__main__":
main(sys.argv)
<commit_msg>Add better error message if URL doe not contain documentId
Improves error messages like the one reported with #24<commit_after>
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import argparse
import contextlib
import sys
from ard_media_downloader import ArdMediathekDownloader
VERSION = "1.1"
def main(argv):
parser = argparse.ArgumentParser(description='Commandline python script tool to download videos from the online ARD mediathek. Version: %s' % VERSION)
parser.add_argument('url', type=str, help='URL pointing to the mediathek video')
parser.add_argument('--filename', '-f', type=str, default=None, help='target filename')
parser.add_argument('--quality', '-q', type=int, help='set the desired video quality', default=3, choices=[1,2,3])
parser.add_argument('--subtitles', '-ut', action = "store_true", help='download subtitle in srt format')
parser.add_argument('--derivefilename', '-dft', action="store_true", default=False, help='Get the video title from the video')
args = parser.parse_args()
if "documentId" not in args.url:
print("ERROR: The URL does not contain a documentId. Start searching your video from "
"http://mediathek.daserste.de/ to get an URL with documentId.")
return
amd = ArdMediathekDownloader(args.url)
amd.filename = args.filename
amd.quality = args.quality
amd.derive_filename = args.derivefilename
with contextlib.suppress(KeyboardInterrupt):
amd.download()
if args.subtitles:
amd.get_subtitles()
if __name__ == "__main__":
main(sys.argv)
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import argparse
import contextlib
import sys
from ard_media_downloader import ArdMediathekDownloader
VERSION = "1.1"
def main(argv):
parser = argparse.ArgumentParser(description='Commandline python script tool to download videos from the online ARD mediathek. Version: %s' % VERSION)
parser.add_argument('url', type=str, help='URL pointing to the mediathek video')
parser.add_argument('--filename', '-f', type=str, default=None, help='target filename')
parser.add_argument('--quality', '-q', type=int, help='set the desired video quality', default=3, choices=[1,2,3])
parser.add_argument('--subtitles', '-ut', action = "store_true", help='download subtitle in srt format')
parser.add_argument('--derivefilename', '-dft', action="store_true", default=False, help='Get the video title from the video')
args = parser.parse_args()
amd = ArdMediathekDownloader(args.url)
amd.filename = args.filename
amd.quality = args.quality
amd.derive_filename = args.derivefilename
with contextlib.suppress(KeyboardInterrupt):
amd.download()
if args.subtitles:
amd.get_subtitles()
if __name__ == "__main__":
main(sys.argv)
Add better error message if URL doe not contain documentId
Improves error messages like the one reported with #24#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import argparse
import contextlib
import sys
from ard_media_downloader import ArdMediathekDownloader
VERSION = "1.1"
def main(argv):
parser = argparse.ArgumentParser(description='Commandline python script tool to download videos from the online ARD mediathek. Version: %s' % VERSION)
parser.add_argument('url', type=str, help='URL pointing to the mediathek video')
parser.add_argument('--filename', '-f', type=str, default=None, help='target filename')
parser.add_argument('--quality', '-q', type=int, help='set the desired video quality', default=3, choices=[1,2,3])
parser.add_argument('--subtitles', '-ut', action = "store_true", help='download subtitle in srt format')
parser.add_argument('--derivefilename', '-dft', action="store_true", default=False, help='Get the video title from the video')
args = parser.parse_args()
if "documentId" not in args.url:
print("ERROR: The URL does not contain a documentId. Start searching your video from "
"http://mediathek.daserste.de/ to get an URL with documentId.")
return
amd = ArdMediathekDownloader(args.url)
amd.filename = args.filename
amd.quality = args.quality
amd.derive_filename = args.derivefilename
with contextlib.suppress(KeyboardInterrupt):
amd.download()
if args.subtitles:
amd.get_subtitles()
if __name__ == "__main__":
main(sys.argv)
|
<commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import argparse
import contextlib
import sys
from ard_media_downloader import ArdMediathekDownloader
VERSION = "1.1"
def main(argv):
parser = argparse.ArgumentParser(description='Commandline python script tool to download videos from the online ARD mediathek. Version: %s' % VERSION)
parser.add_argument('url', type=str, help='URL pointing to the mediathek video')
parser.add_argument('--filename', '-f', type=str, default=None, help='target filename')
parser.add_argument('--quality', '-q', type=int, help='set the desired video quality', default=3, choices=[1,2,3])
parser.add_argument('--subtitles', '-ut', action = "store_true", help='download subtitle in srt format')
parser.add_argument('--derivefilename', '-dft', action="store_true", default=False, help='Get the video title from the video')
args = parser.parse_args()
amd = ArdMediathekDownloader(args.url)
amd.filename = args.filename
amd.quality = args.quality
amd.derive_filename = args.derivefilename
with contextlib.suppress(KeyboardInterrupt):
amd.download()
if args.subtitles:
amd.get_subtitles()
if __name__ == "__main__":
main(sys.argv)
<commit_msg>Add better error message if URL doe not contain documentId
Improves error messages like the one reported with #24<commit_after>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import argparse
import contextlib
import sys
from ard_media_downloader import ArdMediathekDownloader
VERSION = "1.1"
def main(argv):
parser = argparse.ArgumentParser(description='Commandline python script tool to download videos from the online ARD mediathek. Version: %s' % VERSION)
parser.add_argument('url', type=str, help='URL pointing to the mediathek video')
parser.add_argument('--filename', '-f', type=str, default=None, help='target filename')
parser.add_argument('--quality', '-q', type=int, help='set the desired video quality', default=3, choices=[1,2,3])
parser.add_argument('--subtitles', '-ut', action = "store_true", help='download subtitle in srt format')
parser.add_argument('--derivefilename', '-dft', action="store_true", default=False, help='Get the video title from the video')
args = parser.parse_args()
if "documentId" not in args.url:
print("ERROR: The URL does not contain a documentId. Start searching your video from "
"http://mediathek.daserste.de/ to get an URL with documentId.")
return
amd = ArdMediathekDownloader(args.url)
amd.filename = args.filename
amd.quality = args.quality
amd.derive_filename = args.derivefilename
with contextlib.suppress(KeyboardInterrupt):
amd.download()
if args.subtitles:
amd.get_subtitles()
if __name__ == "__main__":
main(sys.argv)
|
7ee2ea4f3034a6bfc4bcfb78b7c2cc1e3887fb55
|
test/_common.py
|
test/_common.py
|
# encoding: utf-8
'''
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
'''
from __future__ import absolute_import, print_function, unicode_literals
def print_test_result(expected, actual):
print("[expected]\n{}\n".format(expected))
print("[actual]\n{}\n".format(actual))
|
# encoding: utf-8
'''
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
'''
from __future__ import absolute_import, print_function, unicode_literals
import sys
def print_test_result(expected, actual, error=None):
print("[expected]\n{}\n".format(expected))
print("[actual]\n{}\n".format(actual))
if error:
print(error, file=sys.stderr)
|
Add an argument to print stderr of test results
|
Add an argument to print stderr of test results
|
Python
|
mit
|
thombashi/pytablewriter
|
# encoding: utf-8
'''
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
'''
from __future__ import absolute_import, print_function, unicode_literals
def print_test_result(expected, actual):
print("[expected]\n{}\n".format(expected))
print("[actual]\n{}\n".format(actual))
Add an argument to print stderr of test results
|
# encoding: utf-8
'''
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
'''
from __future__ import absolute_import, print_function, unicode_literals
import sys
def print_test_result(expected, actual, error=None):
print("[expected]\n{}\n".format(expected))
print("[actual]\n{}\n".format(actual))
if error:
print(error, file=sys.stderr)
|
<commit_before># encoding: utf-8
'''
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
'''
from __future__ import absolute_import, print_function, unicode_literals
def print_test_result(expected, actual):
print("[expected]\n{}\n".format(expected))
print("[actual]\n{}\n".format(actual))
<commit_msg>Add an argument to print stderr of test results<commit_after>
|
# encoding: utf-8
'''
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
'''
from __future__ import absolute_import, print_function, unicode_literals
import sys
def print_test_result(expected, actual, error=None):
print("[expected]\n{}\n".format(expected))
print("[actual]\n{}\n".format(actual))
if error:
print(error, file=sys.stderr)
|
# encoding: utf-8
'''
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
'''
from __future__ import absolute_import, print_function, unicode_literals
def print_test_result(expected, actual):
print("[expected]\n{}\n".format(expected))
print("[actual]\n{}\n".format(actual))
Add an argument to print stderr of test results# encoding: utf-8
'''
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
'''
from __future__ import absolute_import, print_function, unicode_literals
import sys
def print_test_result(expected, actual, error=None):
print("[expected]\n{}\n".format(expected))
print("[actual]\n{}\n".format(actual))
if error:
print(error, file=sys.stderr)
|
<commit_before># encoding: utf-8
'''
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
'''
from __future__ import absolute_import, print_function, unicode_literals
def print_test_result(expected, actual):
print("[expected]\n{}\n".format(expected))
print("[actual]\n{}\n".format(actual))
<commit_msg>Add an argument to print stderr of test results<commit_after># encoding: utf-8
'''
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
'''
from __future__ import absolute_import, print_function, unicode_literals
import sys
def print_test_result(expected, actual, error=None):
print("[expected]\n{}\n".format(expected))
print("[actual]\n{}\n".format(actual))
if error:
print(error, file=sys.stderr)
|
8902c7612d84a2d14e90c4aac6db78ad95dd213c
|
trading_as/__init__.py
|
trading_as/__init__.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Trading As Brands
# Copyright (C) 2015 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
res_company_brand,
res_company,
res_partner,
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Trading As Brands
# Copyright (C) 2015 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
res_company_brand,
res_partner,
res_company,
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Change order of imports to fix install into blank db or as dependency of company config.
|
Change order of imports to fix install into blank db or as dependency of
company config.
|
Python
|
agpl-3.0
|
OpusVL/odoo-trading-as
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Trading As Brands
# Copyright (C) 2015 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
res_company_brand,
res_company,
res_partner,
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
Change order of imports to fix install into blank db or as dependency of
company config.
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Trading As Brands
# Copyright (C) 2015 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
res_company_brand,
res_partner,
res_company,
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Trading As Brands
# Copyright (C) 2015 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
res_company_brand,
res_company,
res_partner,
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
<commit_msg>Change order of imports to fix install into blank db or as dependency of
company config.<commit_after>
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Trading As Brands
# Copyright (C) 2015 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
res_company_brand,
res_partner,
res_company,
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Trading As Brands
# Copyright (C) 2015 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
res_company_brand,
res_company,
res_partner,
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
Change order of imports to fix install into blank db or as dependency of
company config.# -*- coding: utf-8 -*-
##############################################################################
#
# Trading As Brands
# Copyright (C) 2015 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
res_company_brand,
res_partner,
res_company,
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Trading As Brands
# Copyright (C) 2015 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
res_company_brand,
res_company,
res_partner,
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
<commit_msg>Change order of imports to fix install into blank db or as dependency of
company config.<commit_after># -*- coding: utf-8 -*-
##############################################################################
#
# Trading As Brands
# Copyright (C) 2015 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
res_company_brand,
res_partner,
res_company,
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
5a075e27617f7305dded058b71cb1a3385d3025c
|
docs/conf.py
|
docs/conf.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
extensions = [
'sphinx.ext.autodoc',
]
source_suffix = '.rst'
master_doc = 'index'
project = 'Javelin'
copyright = '2016, Ross Whitfield'
author = 'Ross Whitfield'
version = '0.1.0'
release = '0.1.0'
exclude_patterns = ['_build']
pygments_style = 'friendly'
html_theme = 'nature'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Javelindoc'
latex_documents = [
(master_doc, 'Javelin.tex', 'Javelin Documentation',
'Ross Whitfield', 'manual'),
]
autodoc_default_flags = ['members', 'undoc-members']
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
extensions = [
'sphinx.ext.autodoc',
]
source_suffix = '.rst'
master_doc = 'index'
project = 'Javelin'
copyright = '2016, Ross Whitfield'
author = 'Ross Whitfield'
version = '0.1.0'
release = '0.1.0'
exclude_patterns = ['_build']
pygments_style = 'friendly'
html_theme = 'sphinx_rtd_theme'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Javelindoc'
latex_documents = [
(master_doc, 'Javelin.tex', 'Javelin Documentation',
'Ross Whitfield', 'manual'),
]
autodoc_default_flags = ['members', 'undoc-members']
|
Change sphinx theme to rtd
|
Change sphinx theme to rtd
|
Python
|
mit
|
rosswhitfield/javelin
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
extensions = [
'sphinx.ext.autodoc',
]
source_suffix = '.rst'
master_doc = 'index'
project = 'Javelin'
copyright = '2016, Ross Whitfield'
author = 'Ross Whitfield'
version = '0.1.0'
release = '0.1.0'
exclude_patterns = ['_build']
pygments_style = 'friendly'
html_theme = 'nature'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Javelindoc'
latex_documents = [
(master_doc, 'Javelin.tex', 'Javelin Documentation',
'Ross Whitfield', 'manual'),
]
autodoc_default_flags = ['members', 'undoc-members']
Change sphinx theme to rtd
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
extensions = [
'sphinx.ext.autodoc',
]
source_suffix = '.rst'
master_doc = 'index'
project = 'Javelin'
copyright = '2016, Ross Whitfield'
author = 'Ross Whitfield'
version = '0.1.0'
release = '0.1.0'
exclude_patterns = ['_build']
pygments_style = 'friendly'
html_theme = 'sphinx_rtd_theme'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Javelindoc'
latex_documents = [
(master_doc, 'Javelin.tex', 'Javelin Documentation',
'Ross Whitfield', 'manual'),
]
autodoc_default_flags = ['members', 'undoc-members']
|
<commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
extensions = [
'sphinx.ext.autodoc',
]
source_suffix = '.rst'
master_doc = 'index'
project = 'Javelin'
copyright = '2016, Ross Whitfield'
author = 'Ross Whitfield'
version = '0.1.0'
release = '0.1.0'
exclude_patterns = ['_build']
pygments_style = 'friendly'
html_theme = 'nature'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Javelindoc'
latex_documents = [
(master_doc, 'Javelin.tex', 'Javelin Documentation',
'Ross Whitfield', 'manual'),
]
autodoc_default_flags = ['members', 'undoc-members']
<commit_msg>Change sphinx theme to rtd<commit_after>
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
extensions = [
'sphinx.ext.autodoc',
]
source_suffix = '.rst'
master_doc = 'index'
project = 'Javelin'
copyright = '2016, Ross Whitfield'
author = 'Ross Whitfield'
version = '0.1.0'
release = '0.1.0'
exclude_patterns = ['_build']
pygments_style = 'friendly'
html_theme = 'sphinx_rtd_theme'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Javelindoc'
latex_documents = [
(master_doc, 'Javelin.tex', 'Javelin Documentation',
'Ross Whitfield', 'manual'),
]
autodoc_default_flags = ['members', 'undoc-members']
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
extensions = [
'sphinx.ext.autodoc',
]
source_suffix = '.rst'
master_doc = 'index'
project = 'Javelin'
copyright = '2016, Ross Whitfield'
author = 'Ross Whitfield'
version = '0.1.0'
release = '0.1.0'
exclude_patterns = ['_build']
pygments_style = 'friendly'
html_theme = 'nature'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Javelindoc'
latex_documents = [
(master_doc, 'Javelin.tex', 'Javelin Documentation',
'Ross Whitfield', 'manual'),
]
autodoc_default_flags = ['members', 'undoc-members']
Change sphinx theme to rtd#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
extensions = [
'sphinx.ext.autodoc',
]
source_suffix = '.rst'
master_doc = 'index'
project = 'Javelin'
copyright = '2016, Ross Whitfield'
author = 'Ross Whitfield'
version = '0.1.0'
release = '0.1.0'
exclude_patterns = ['_build']
pygments_style = 'friendly'
html_theme = 'sphinx_rtd_theme'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Javelindoc'
latex_documents = [
(master_doc, 'Javelin.tex', 'Javelin Documentation',
'Ross Whitfield', 'manual'),
]
autodoc_default_flags = ['members', 'undoc-members']
|
<commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
extensions = [
'sphinx.ext.autodoc',
]
source_suffix = '.rst'
master_doc = 'index'
project = 'Javelin'
copyright = '2016, Ross Whitfield'
author = 'Ross Whitfield'
version = '0.1.0'
release = '0.1.0'
exclude_patterns = ['_build']
pygments_style = 'friendly'
html_theme = 'nature'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Javelindoc'
latex_documents = [
(master_doc, 'Javelin.tex', 'Javelin Documentation',
'Ross Whitfield', 'manual'),
]
autodoc_default_flags = ['members', 'undoc-members']
<commit_msg>Change sphinx theme to rtd<commit_after>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
extensions = [
'sphinx.ext.autodoc',
]
source_suffix = '.rst'
master_doc = 'index'
project = 'Javelin'
copyright = '2016, Ross Whitfield'
author = 'Ross Whitfield'
version = '0.1.0'
release = '0.1.0'
exclude_patterns = ['_build']
pygments_style = 'friendly'
html_theme = 'sphinx_rtd_theme'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Javelindoc'
latex_documents = [
(master_doc, 'Javelin.tex', 'Javelin Documentation',
'Ross Whitfield', 'manual'),
]
autodoc_default_flags = ['members', 'undoc-members']
|
bbb5fa95cd4b9d7fac6ac45546eedcd8a5d14162
|
s4v1.py
|
s4v1.py
|
from s3v3 import *
|
from s3v3 import *
import csv
def write_to_file(filename, data_sample):
example = csv.write(open(filename, 'w', encoding='utf-8'), dialect='excel') # example is the variable of the new file that is open and which we can write to (using utf-8 encoding and an excel dialect).
example.writerows(data_sample) # write rows is going to take the rows in the data sample and write them to the example (i.e. the file name we passed in)
write_to_file("_data/s4-silk_ties.csv", silk_ties) # this is going to create a new csv located in the _data directory, named s4-silk_ties.csv and it is going to contain all of that data from the silk_ties list which we created in s3v2 (silk_ties = filter_col_by_string(data_from_csv, "material", "_silk"))
|
Create write to file function
|
Create write to file function
|
Python
|
mit
|
alexmilesyounger/ds_basics
|
from s3v3 import *
Create write to file function
|
from s3v3 import *
import csv
def write_to_file(filename, data_sample):
example = csv.write(open(filename, 'w', encoding='utf-8'), dialect='excel') # example is the variable of the new file that is open and which we can write to (using utf-8 encoding and an excel dialect).
example.writerows(data_sample) # write rows is going to take the rows in the data sample and write them to the example (i.e. the file name we passed in)
write_to_file("_data/s4-silk_ties.csv", silk_ties) # this is going to create a new csv located in the _data directory, named s4-silk_ties.csv and it is going to contain all of that data from the silk_ties list which we created in s3v2 (silk_ties = filter_col_by_string(data_from_csv, "material", "_silk"))
|
<commit_before>from s3v3 import *
<commit_msg>Create write to file function<commit_after>
|
from s3v3 import *
import csv
def write_to_file(filename, data_sample):
example = csv.write(open(filename, 'w', encoding='utf-8'), dialect='excel') # example is the variable of the new file that is open and which we can write to (using utf-8 encoding and an excel dialect).
example.writerows(data_sample) # write rows is going to take the rows in the data sample and write them to the example (i.e. the file name we passed in)
write_to_file("_data/s4-silk_ties.csv", silk_ties) # this is going to create a new csv located in the _data directory, named s4-silk_ties.csv and it is going to contain all of that data from the silk_ties list which we created in s3v2 (silk_ties = filter_col_by_string(data_from_csv, "material", "_silk"))
|
from s3v3 import *
Create write to file functionfrom s3v3 import *
import csv
def write_to_file(filename, data_sample):
example = csv.write(open(filename, 'w', encoding='utf-8'), dialect='excel') # example is the variable of the new file that is open and which we can write to (using utf-8 encoding and an excel dialect).
example.writerows(data_sample) # write rows is going to take the rows in the data sample and write them to the example (i.e. the file name we passed in)
write_to_file("_data/s4-silk_ties.csv", silk_ties) # this is going to create a new csv located in the _data directory, named s4-silk_ties.csv and it is going to contain all of that data from the silk_ties list which we created in s3v2 (silk_ties = filter_col_by_string(data_from_csv, "material", "_silk"))
|
<commit_before>from s3v3 import *
<commit_msg>Create write to file function<commit_after>from s3v3 import *
import csv
def write_to_file(filename, data_sample):
example = csv.write(open(filename, 'w', encoding='utf-8'), dialect='excel') # example is the variable of the new file that is open and which we can write to (using utf-8 encoding and an excel dialect).
example.writerows(data_sample) # write rows is going to take the rows in the data sample and write them to the example (i.e. the file name we passed in)
write_to_file("_data/s4-silk_ties.csv", silk_ties) # this is going to create a new csv located in the _data directory, named s4-silk_ties.csv and it is going to contain all of that data from the silk_ties list which we created in s3v2 (silk_ties = filter_col_by_string(data_from_csv, "material", "_silk"))
|
978b9450346f4de687ed3c23bc11c970538e948b
|
mosecom_air/api/parser.py
|
mosecom_air/api/parser.py
|
#coding: utf-8
from collections import namedtuple
Substance = namedtuple('Substance', 'name alias')
Measurement = namedtuple('Measurement', 'substance unit performed value')
Result = namedtuple('Result', 'measurements substances units station_alias')
|
#coding: utf-8
from collections import namedtuple
Substance = namedtuple('Substance', ('name', 'alias'))
Measurement = namedtuple('Measurement', ('substance', 'unit', 'performed',
'value'))
Result = namedtuple('Result', ('measurements', 'substances', 'units',
'station_alias'))
|
Use tuple in namedtuples initization
|
Use tuple in namedtuples initization
|
Python
|
mit
|
elsid/mosecom-air,elsid/mosecom-air,elsid/mosecom-air
|
#coding: utf-8
from collections import namedtuple
Substance = namedtuple('Substance', 'name alias')
Measurement = namedtuple('Measurement', 'substance unit performed value')
Result = namedtuple('Result', 'measurements substances units station_alias')
Use tuple in namedtuples initization
|
#coding: utf-8
from collections import namedtuple
Substance = namedtuple('Substance', ('name', 'alias'))
Measurement = namedtuple('Measurement', ('substance', 'unit', 'performed',
'value'))
Result = namedtuple('Result', ('measurements', 'substances', 'units',
'station_alias'))
|
<commit_before>#coding: utf-8
from collections import namedtuple
Substance = namedtuple('Substance', 'name alias')
Measurement = namedtuple('Measurement', 'substance unit performed value')
Result = namedtuple('Result', 'measurements substances units station_alias')
<commit_msg>Use tuple in namedtuples initization<commit_after>
|
#coding: utf-8
from collections import namedtuple
Substance = namedtuple('Substance', ('name', 'alias'))
Measurement = namedtuple('Measurement', ('substance', 'unit', 'performed',
'value'))
Result = namedtuple('Result', ('measurements', 'substances', 'units',
'station_alias'))
|
#coding: utf-8
from collections import namedtuple
Substance = namedtuple('Substance', 'name alias')
Measurement = namedtuple('Measurement', 'substance unit performed value')
Result = namedtuple('Result', 'measurements substances units station_alias')
Use tuple in namedtuples initization#coding: utf-8
from collections import namedtuple
Substance = namedtuple('Substance', ('name', 'alias'))
Measurement = namedtuple('Measurement', ('substance', 'unit', 'performed',
'value'))
Result = namedtuple('Result', ('measurements', 'substances', 'units',
'station_alias'))
|
<commit_before>#coding: utf-8
from collections import namedtuple
Substance = namedtuple('Substance', 'name alias')
Measurement = namedtuple('Measurement', 'substance unit performed value')
Result = namedtuple('Result', 'measurements substances units station_alias')
<commit_msg>Use tuple in namedtuples initization<commit_after>#coding: utf-8
from collections import namedtuple
Substance = namedtuple('Substance', ('name', 'alias'))
Measurement = namedtuple('Measurement', ('substance', 'unit', 'performed',
'value'))
Result = namedtuple('Result', ('measurements', 'substances', 'units',
'station_alias'))
|
e8e2ebb156ce75afa87d26a632ed7aa5c74ba2c6
|
eggsclaim.py
|
eggsclaim.py
|
import signal
import sys
import serial
import sms
from xbee import XBee
MOBILE_NUM = '0400000000'
NOTIFICATION_MSG = 'Cock-a-doodle-doo! An egg is waiting for you!'
egg_was_present = False
def signal_handler(signal, frame):
xbee.halt()
serial_port.close()
sys.exit(0)
def packet_received(packet):
samples = packet['samples'][0]
egg_is_present = samples['dio-4'] if 'dio-4' in samples else False
if egg_is_present and egg_is_present != egg_was_present:
sms.send(MOBILE_NUM, NOTIFICATION_MSG)
egg_was_present = egg_is_present
signal.signal(signal.SIGINT, signal_handler)
serial_port = serial.Serial('/dev/ttyp0', 9600)
xbee = XBee(serial_port, callback=packet_received)
|
import signal
import sys
import serial
import sms
from xbee import XBee
SERIAL_PORT = '/dev/usbserial-143'
MOBILE_NUM = '0400000000'
NOTIFICATION_MSG = 'Cock-a-doodle-doo! An egg is waiting for you!'
egg_was_present = False
def signal_handler(signal, frame):
xbee.halt()
serial_port.close()
sys.exit(0)
def packet_received(packet):
samples = packet['samples'][0]
egg_is_present = samples['dio-1'] if 'dio-1' in samples else False
if egg_is_present and egg_is_present != egg_was_present:
sms.send(MOBILE_NUM, NOTIFICATION_MSG)
egg_was_present = egg_is_present
signal.signal(signal.SIGINT, signal_handler)
serial_port = serial.Serial(SERIAL_PORT, 9600)
xbee = XBee(serial_port, callback=packet_received)
|
Use correct IO pins and serial port
|
Use correct IO pins and serial port
|
Python
|
mit
|
jamespettigrew/eggsclaim
|
import signal
import sys
import serial
import sms
from xbee import XBee
MOBILE_NUM = '0400000000'
NOTIFICATION_MSG = 'Cock-a-doodle-doo! An egg is waiting for you!'
egg_was_present = False
def signal_handler(signal, frame):
xbee.halt()
serial_port.close()
sys.exit(0)
def packet_received(packet):
samples = packet['samples'][0]
egg_is_present = samples['dio-4'] if 'dio-4' in samples else False
if egg_is_present and egg_is_present != egg_was_present:
sms.send(MOBILE_NUM, NOTIFICATION_MSG)
egg_was_present = egg_is_present
signal.signal(signal.SIGINT, signal_handler)
serial_port = serial.Serial('/dev/ttyp0', 9600)
xbee = XBee(serial_port, callback=packet_received)Use correct IO pins and serial port
|
import signal
import sys
import serial
import sms
from xbee import XBee
SERIAL_PORT = '/dev/usbserial-143'
MOBILE_NUM = '0400000000'
NOTIFICATION_MSG = 'Cock-a-doodle-doo! An egg is waiting for you!'
egg_was_present = False
def signal_handler(signal, frame):
xbee.halt()
serial_port.close()
sys.exit(0)
def packet_received(packet):
samples = packet['samples'][0]
egg_is_present = samples['dio-1'] if 'dio-1' in samples else False
if egg_is_present and egg_is_present != egg_was_present:
sms.send(MOBILE_NUM, NOTIFICATION_MSG)
egg_was_present = egg_is_present
signal.signal(signal.SIGINT, signal_handler)
serial_port = serial.Serial(SERIAL_PORT, 9600)
xbee = XBee(serial_port, callback=packet_received)
|
<commit_before>import signal
import sys
import serial
import sms
from xbee import XBee
MOBILE_NUM = '0400000000'
NOTIFICATION_MSG = 'Cock-a-doodle-doo! An egg is waiting for you!'
egg_was_present = False
def signal_handler(signal, frame):
xbee.halt()
serial_port.close()
sys.exit(0)
def packet_received(packet):
samples = packet['samples'][0]
egg_is_present = samples['dio-4'] if 'dio-4' in samples else False
if egg_is_present and egg_is_present != egg_was_present:
sms.send(MOBILE_NUM, NOTIFICATION_MSG)
egg_was_present = egg_is_present
signal.signal(signal.SIGINT, signal_handler)
serial_port = serial.Serial('/dev/ttyp0', 9600)
xbee = XBee(serial_port, callback=packet_received)<commit_msg>Use correct IO pins and serial port<commit_after>
|
import signal
import sys
import serial
import sms
from xbee import XBee
SERIAL_PORT = '/dev/usbserial-143'
MOBILE_NUM = '0400000000'
NOTIFICATION_MSG = 'Cock-a-doodle-doo! An egg is waiting for you!'
egg_was_present = False
def signal_handler(signal, frame):
xbee.halt()
serial_port.close()
sys.exit(0)
def packet_received(packet):
samples = packet['samples'][0]
egg_is_present = samples['dio-1'] if 'dio-1' in samples else False
if egg_is_present and egg_is_present != egg_was_present:
sms.send(MOBILE_NUM, NOTIFICATION_MSG)
egg_was_present = egg_is_present
signal.signal(signal.SIGINT, signal_handler)
serial_port = serial.Serial(SERIAL_PORT, 9600)
xbee = XBee(serial_port, callback=packet_received)
|
import signal
import sys
import serial
import sms
from xbee import XBee
MOBILE_NUM = '0400000000'
NOTIFICATION_MSG = 'Cock-a-doodle-doo! An egg is waiting for you!'
egg_was_present = False
def signal_handler(signal, frame):
xbee.halt()
serial_port.close()
sys.exit(0)
def packet_received(packet):
samples = packet['samples'][0]
egg_is_present = samples['dio-4'] if 'dio-4' in samples else False
if egg_is_present and egg_is_present != egg_was_present:
sms.send(MOBILE_NUM, NOTIFICATION_MSG)
egg_was_present = egg_is_present
signal.signal(signal.SIGINT, signal_handler)
serial_port = serial.Serial('/dev/ttyp0', 9600)
xbee = XBee(serial_port, callback=packet_received)Use correct IO pins and serial portimport signal
import sys
import serial
import sms
from xbee import XBee
SERIAL_PORT = '/dev/usbserial-143'
MOBILE_NUM = '0400000000'
NOTIFICATION_MSG = 'Cock-a-doodle-doo! An egg is waiting for you!'
egg_was_present = False
def signal_handler(signal, frame):
xbee.halt()
serial_port.close()
sys.exit(0)
def packet_received(packet):
samples = packet['samples'][0]
egg_is_present = samples['dio-1'] if 'dio-1' in samples else False
if egg_is_present and egg_is_present != egg_was_present:
sms.send(MOBILE_NUM, NOTIFICATION_MSG)
egg_was_present = egg_is_present
signal.signal(signal.SIGINT, signal_handler)
serial_port = serial.Serial(SERIAL_PORT, 9600)
xbee = XBee(serial_port, callback=packet_received)
|
<commit_before>import signal
import sys
import serial
import sms
from xbee import XBee
MOBILE_NUM = '0400000000'
NOTIFICATION_MSG = 'Cock-a-doodle-doo! An egg is waiting for you!'
egg_was_present = False
def signal_handler(signal, frame):
xbee.halt()
serial_port.close()
sys.exit(0)
def packet_received(packet):
samples = packet['samples'][0]
egg_is_present = samples['dio-4'] if 'dio-4' in samples else False
if egg_is_present and egg_is_present != egg_was_present:
sms.send(MOBILE_NUM, NOTIFICATION_MSG)
egg_was_present = egg_is_present
signal.signal(signal.SIGINT, signal_handler)
serial_port = serial.Serial('/dev/ttyp0', 9600)
xbee = XBee(serial_port, callback=packet_received)<commit_msg>Use correct IO pins and serial port<commit_after>import signal
import sys
import serial
import sms
from xbee import XBee
SERIAL_PORT = '/dev/usbserial-143'
MOBILE_NUM = '0400000000'
NOTIFICATION_MSG = 'Cock-a-doodle-doo! An egg is waiting for you!'
egg_was_present = False
def signal_handler(signal, frame):
xbee.halt()
serial_port.close()
sys.exit(0)
def packet_received(packet):
samples = packet['samples'][0]
egg_is_present = samples['dio-1'] if 'dio-1' in samples else False
if egg_is_present and egg_is_present != egg_was_present:
sms.send(MOBILE_NUM, NOTIFICATION_MSG)
egg_was_present = egg_is_present
signal.signal(signal.SIGINT, signal_handler)
serial_port = serial.Serial(SERIAL_PORT, 9600)
xbee = XBee(serial_port, callback=packet_received)
|
d2686372462f7af3949c08e1904d678107e68bdf
|
docs/conf.py
|
docs/conf.py
|
import pkg_resources
import sphinx_rtd_theme
project = 'Spaceland'
copyright = '2017 Já hf'
version = release = pkg_resources.require('spaceland')[0].version
master_doc = 'index'
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
default_role = 'py:obj'
extensions = (
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.napoleon',
'sphinx_autodoc_typehints',
)
intersphinx_mapping = {'https://docs.python.org/3': None}
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
html_theme_options = {
'navigation_depth': 4,
}
html_last_updated_fmt = '%d %B %Y'
html_use_index = False
html_domain_indices = False
html_copy_source = False
html_show_sphinx = False
|
import pkg_resources
import sphinx_rtd_theme
project = 'Spaceland'
copyright = '2017 Já hf'
version = release = pkg_resources.require('spaceland')[0].version
master_doc = 'index'
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
default_role = 'py:obj'
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.napoleon',
'sphinx_autodoc_typehints',
]
intersphinx_mapping = {'https://docs.python.org/3': None}
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
html_theme_options = {
'navigation_depth': 4,
}
html_last_updated_fmt = '%d %B %Y'
html_use_index = False
html_domain_indices = False
html_copy_source = False
html_show_sphinx = False
|
Use tuple for Sphinx extensions, not list
|
Use tuple for Sphinx extensions, not list
Read the Docs is trying to append to the tuple during the docs build.
|
Python
|
mit
|
JaGallup/spaceland
|
import pkg_resources
import sphinx_rtd_theme
project = 'Spaceland'
copyright = '2017 Já hf'
version = release = pkg_resources.require('spaceland')[0].version
master_doc = 'index'
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
default_role = 'py:obj'
extensions = (
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.napoleon',
'sphinx_autodoc_typehints',
)
intersphinx_mapping = {'https://docs.python.org/3': None}
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
html_theme_options = {
'navigation_depth': 4,
}
html_last_updated_fmt = '%d %B %Y'
html_use_index = False
html_domain_indices = False
html_copy_source = False
html_show_sphinx = False
Use tuple for Sphinx extensions, not list
Read the Docs is trying to append to the tuple during the docs build.
|
import pkg_resources
import sphinx_rtd_theme
project = 'Spaceland'
copyright = '2017 Já hf'
version = release = pkg_resources.require('spaceland')[0].version
master_doc = 'index'
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
default_role = 'py:obj'
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.napoleon',
'sphinx_autodoc_typehints',
]
intersphinx_mapping = {'https://docs.python.org/3': None}
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
html_theme_options = {
'navigation_depth': 4,
}
html_last_updated_fmt = '%d %B %Y'
html_use_index = False
html_domain_indices = False
html_copy_source = False
html_show_sphinx = False
|
<commit_before>import pkg_resources
import sphinx_rtd_theme
project = 'Spaceland'
copyright = '2017 Já hf'
version = release = pkg_resources.require('spaceland')[0].version
master_doc = 'index'
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
default_role = 'py:obj'
extensions = (
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.napoleon',
'sphinx_autodoc_typehints',
)
intersphinx_mapping = {'https://docs.python.org/3': None}
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
html_theme_options = {
'navigation_depth': 4,
}
html_last_updated_fmt = '%d %B %Y'
html_use_index = False
html_domain_indices = False
html_copy_source = False
html_show_sphinx = False
<commit_msg>Use tuple for Sphinx extensions, not list
Read the Docs is trying to append to the tuple during the docs build.<commit_after>
|
import pkg_resources
import sphinx_rtd_theme
project = 'Spaceland'
copyright = '2017 Já hf'
version = release = pkg_resources.require('spaceland')[0].version
master_doc = 'index'
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
default_role = 'py:obj'
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.napoleon',
'sphinx_autodoc_typehints',
]
intersphinx_mapping = {'https://docs.python.org/3': None}
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
html_theme_options = {
'navigation_depth': 4,
}
html_last_updated_fmt = '%d %B %Y'
html_use_index = False
html_domain_indices = False
html_copy_source = False
html_show_sphinx = False
|
import pkg_resources
import sphinx_rtd_theme
project = 'Spaceland'
copyright = '2017 Já hf'
version = release = pkg_resources.require('spaceland')[0].version
master_doc = 'index'
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
default_role = 'py:obj'
extensions = (
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.napoleon',
'sphinx_autodoc_typehints',
)
intersphinx_mapping = {'https://docs.python.org/3': None}
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
html_theme_options = {
'navigation_depth': 4,
}
html_last_updated_fmt = '%d %B %Y'
html_use_index = False
html_domain_indices = False
html_copy_source = False
html_show_sphinx = False
Use tuple for Sphinx extensions, not list
Read the Docs is trying to append to the tuple during the docs build.import pkg_resources
import sphinx_rtd_theme
project = 'Spaceland'
copyright = '2017 Já hf'
version = release = pkg_resources.require('spaceland')[0].version
master_doc = 'index'
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
default_role = 'py:obj'
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.napoleon',
'sphinx_autodoc_typehints',
]
intersphinx_mapping = {'https://docs.python.org/3': None}
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
html_theme_options = {
'navigation_depth': 4,
}
html_last_updated_fmt = '%d %B %Y'
html_use_index = False
html_domain_indices = False
html_copy_source = False
html_show_sphinx = False
|
<commit_before>import pkg_resources
import sphinx_rtd_theme
project = 'Spaceland'
copyright = '2017 Já hf'
version = release = pkg_resources.require('spaceland')[0].version
master_doc = 'index'
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
default_role = 'py:obj'
extensions = (
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.napoleon',
'sphinx_autodoc_typehints',
)
intersphinx_mapping = {'https://docs.python.org/3': None}
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
html_theme_options = {
'navigation_depth': 4,
}
html_last_updated_fmt = '%d %B %Y'
html_use_index = False
html_domain_indices = False
html_copy_source = False
html_show_sphinx = False
<commit_msg>Use tuple for Sphinx extensions, not list
Read the Docs is trying to append to the tuple during the docs build.<commit_after>import pkg_resources
import sphinx_rtd_theme
project = 'Spaceland'
copyright = '2017 Já hf'
version = release = pkg_resources.require('spaceland')[0].version
master_doc = 'index'
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
default_role = 'py:obj'
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.napoleon',
'sphinx_autodoc_typehints',
]
intersphinx_mapping = {'https://docs.python.org/3': None}
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
html_theme_options = {
'navigation_depth': 4,
}
html_last_updated_fmt = '%d %B %Y'
html_use_index = False
html_domain_indices = False
html_copy_source = False
html_show_sphinx = False
|
e45f82ad8f11385ba7776beeb7fee497b9f4761a
|
observatory/dashboard/templatetags/javascript.py
|
observatory/dashboard/templatetags/javascript.py
|
# Copyright (c) 2010, Nate Stedman <natesm@gmail.com>
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from django import template
from settings import JQUERY
import os
register = template.Library()
# returns the url to the jquery version being used
def jquery(parser, token):
class JqueryNode(template.Node):
def render(self, context):
return JQUERY
return JqueryNode()
LIGHTBOX = '''
$(document).ready(function() {
$("a[rel=^lightbox]").click(function() {
alert("asdf!")
});
});
'''
# returns the js required to create a lightbox
def lightbox(parser, token):
class LightboxNode(template.Node):
def render(self, context):
return LIGHTBOX
return LightboxNode()
register.tag('jquery', jquery)
register.tag('lightbox', lightbox)
|
# Copyright (c) 2010, Nate Stedman <natesm@gmail.com>
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from django import template
from settings import JQUERY
import os
register = template.Library()
# returns the url to the jquery version being used
def jquery(parser, token):
class JqueryNode(template.Node):
def render(self, context):
return JQUERY
return JqueryNode()
register.tag('jquery', jquery)
|
Remove old lightbox starting stuff that never got done
|
Remove old lightbox starting stuff that never got done
|
Python
|
isc
|
natestedman/Observatory,rcos/Observatory,rcos/Observatory,natestedman/Observatory,rcos/Observatory,natestedman/Observatory,rcos/Observatory,rcos/Observatory,rcos/Observatory
|
# Copyright (c) 2010, Nate Stedman <natesm@gmail.com>
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from django import template
from settings import JQUERY
import os
register = template.Library()
# returns the url to the jquery version being used
def jquery(parser, token):
class JqueryNode(template.Node):
def render(self, context):
return JQUERY
return JqueryNode()
LIGHTBOX = '''
$(document).ready(function() {
$("a[rel=^lightbox]").click(function() {
alert("asdf!")
});
});
'''
# returns the js required to create a lightbox
def lightbox(parser, token):
class LightboxNode(template.Node):
def render(self, context):
return LIGHTBOX
return LightboxNode()
register.tag('jquery', jquery)
register.tag('lightbox', lightbox)
Remove old lightbox starting stuff that never got done
|
# Copyright (c) 2010, Nate Stedman <natesm@gmail.com>
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from django import template
from settings import JQUERY
import os
register = template.Library()
# returns the url to the jquery version being used
def jquery(parser, token):
class JqueryNode(template.Node):
def render(self, context):
return JQUERY
return JqueryNode()
register.tag('jquery', jquery)
|
<commit_before># Copyright (c) 2010, Nate Stedman <natesm@gmail.com>
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from django import template
from settings import JQUERY
import os
register = template.Library()
# returns the url to the jquery version being used
def jquery(parser, token):
class JqueryNode(template.Node):
def render(self, context):
return JQUERY
return JqueryNode()
LIGHTBOX = '''
$(document).ready(function() {
$("a[rel=^lightbox]").click(function() {
alert("asdf!")
});
});
'''
# returns the js required to create a lightbox
def lightbox(parser, token):
class LightboxNode(template.Node):
def render(self, context):
return LIGHTBOX
return LightboxNode()
register.tag('jquery', jquery)
register.tag('lightbox', lightbox)
<commit_msg>Remove old lightbox starting stuff that never got done<commit_after>
|
# Copyright (c) 2010, Nate Stedman <natesm@gmail.com>
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from django import template
from settings import JQUERY
import os
register = template.Library()
# returns the url to the jquery version being used
def jquery(parser, token):
class JqueryNode(template.Node):
def render(self, context):
return JQUERY
return JqueryNode()
register.tag('jquery', jquery)
|
# Copyright (c) 2010, Nate Stedman <natesm@gmail.com>
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from django import template
from settings import JQUERY
import os
register = template.Library()
# returns the url to the jquery version being used
def jquery(parser, token):
class JqueryNode(template.Node):
def render(self, context):
return JQUERY
return JqueryNode()
LIGHTBOX = '''
$(document).ready(function() {
$("a[rel=^lightbox]").click(function() {
alert("asdf!")
});
});
'''
# returns the js required to create a lightbox
def lightbox(parser, token):
class LightboxNode(template.Node):
def render(self, context):
return LIGHTBOX
return LightboxNode()
register.tag('jquery', jquery)
register.tag('lightbox', lightbox)
Remove old lightbox starting stuff that never got done# Copyright (c) 2010, Nate Stedman <natesm@gmail.com>
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from django import template
from settings import JQUERY
import os
register = template.Library()
# returns the url to the jquery version being used
def jquery(parser, token):
class JqueryNode(template.Node):
def render(self, context):
return JQUERY
return JqueryNode()
register.tag('jquery', jquery)
|
<commit_before># Copyright (c) 2010, Nate Stedman <natesm@gmail.com>
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from django import template
from settings import JQUERY
import os
register = template.Library()
# returns the url to the jquery version being used
def jquery(parser, token):
class JqueryNode(template.Node):
def render(self, context):
return JQUERY
return JqueryNode()
LIGHTBOX = '''
$(document).ready(function() {
$("a[rel=^lightbox]").click(function() {
alert("asdf!")
});
});
'''
# returns the js required to create a lightbox
def lightbox(parser, token):
class LightboxNode(template.Node):
def render(self, context):
return LIGHTBOX
return LightboxNode()
register.tag('jquery', jquery)
register.tag('lightbox', lightbox)
<commit_msg>Remove old lightbox starting stuff that never got done<commit_after># Copyright (c) 2010, Nate Stedman <natesm@gmail.com>
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from django import template
from settings import JQUERY
import os
register = template.Library()
# returns the url to the jquery version being used
def jquery(parser, token):
class JqueryNode(template.Node):
def render(self, context):
return JQUERY
return JqueryNode()
register.tag('jquery', jquery)
|
1ca76f55adaa4ffe305f732df47f1a070449d549
|
rpmvenv/extensions/loader.py
|
rpmvenv/extensions/loader.py
|
"""Tools for loading and validating extensions."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import pkg_resources
import semver
class MissingDependency(Exception):
"""No dependency found."""
class InvalidDependency(Exception):
"""Found dependency but with the wrong version."""
def load_extensions(whitelist=()):
"""Get an iterable of extensions in order."""
whitelist = tuple(set(('core',) + tuple(whitelist)))
extensions = pkg_resources.iter_entry_points('rpmvenv.extensions')
extensions = (
extension for extension in extensions if extension.name in whitelist
)
extensions = tuple(set(extensions))
extensions = sorted(extensions, key=lambda ext: whitelist.index(ext.name))
return tuple(extension.load() for extension in extensions)
def validate_extensions(extensions):
"""Process the extension dependencies."""
ext_map = dict(
(ext.name, ext) for ext in extensions
)
for ext in extensions:
for dependency, versions in ext.requirements.items():
ext_dependency = ext_map.get(dependency, None)
if not ext_dependency:
raise MissingDependency(
'{0} is required by {1} but is not loaded.'.format(
ext.name,
dependency,
)
)
for version in versions:
if not semver.match(ext.version, version):
raise InvalidDependency(
'{0}-{1} required by {2} but found {0}-{3}.'.format(
dependency,
version,
ext.name,
ext.version,
)
)
return extensions
|
"""Tools for loading and validating extensions."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import pkg_resources
import semver
class MissingDependency(Exception):
"""No dependency found."""
class InvalidDependency(Exception):
"""Found dependency but with the wrong version."""
def load_extensions(whitelist=()):
"""Get an iterable of extensions in order."""
whitelist = ('core',) + tuple(whitelist)
extensions = pkg_resources.iter_entry_points('rpmvenv.extensions')
extensions = (
extension for extension in extensions if extension.name in whitelist
)
extensions = tuple(set(extensions))
extensions = sorted(extensions, key=lambda ext: whitelist.index(ext.name))
return tuple(extension.load() for extension in extensions)
def validate_extensions(extensions):
"""Process the extension dependencies."""
ext_map = dict(
(ext.name, ext) for ext in extensions
)
for ext in extensions:
for dependency, versions in ext.requirements.items():
ext_dependency = ext_map.get(dependency, None)
if not ext_dependency:
raise MissingDependency(
'{0} is required by {1} but is not loaded.'.format(
ext.name,
dependency,
)
)
for version in versions:
if not semver.match(ext.version, version):
raise InvalidDependency(
'{0}-{1} required by {2} but found {0}-{3}.'.format(
dependency,
version,
ext.name,
ext.version,
)
)
return extensions
|
Fix indeterminate ordering issue for extensions
|
Fix indeterminate ordering issue for extensions
The original code used set() to dedupe enabled extensions. This resulted
in an arbitrary ordering of the values. The expected result was a
deterministic ordering of loaded extensions that matches the order given
by the whitelist. This removes the set() usage to preserve order.
Existing users subject to the arbitrary ordering should be unaffected as
their builds must already be tolerant to ordering changes to have worked
thus far.
|
Python
|
mit
|
kevinconway/rpmvenv
|
"""Tools for loading and validating extensions."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import pkg_resources
import semver
class MissingDependency(Exception):
"""No dependency found."""
class InvalidDependency(Exception):
"""Found dependency but with the wrong version."""
def load_extensions(whitelist=()):
"""Get an iterable of extensions in order."""
whitelist = tuple(set(('core',) + tuple(whitelist)))
extensions = pkg_resources.iter_entry_points('rpmvenv.extensions')
extensions = (
extension for extension in extensions if extension.name in whitelist
)
extensions = tuple(set(extensions))
extensions = sorted(extensions, key=lambda ext: whitelist.index(ext.name))
return tuple(extension.load() for extension in extensions)
def validate_extensions(extensions):
"""Process the extension dependencies."""
ext_map = dict(
(ext.name, ext) for ext in extensions
)
for ext in extensions:
for dependency, versions in ext.requirements.items():
ext_dependency = ext_map.get(dependency, None)
if not ext_dependency:
raise MissingDependency(
'{0} is required by {1} but is not loaded.'.format(
ext.name,
dependency,
)
)
for version in versions:
if not semver.match(ext.version, version):
raise InvalidDependency(
'{0}-{1} required by {2} but found {0}-{3}.'.format(
dependency,
version,
ext.name,
ext.version,
)
)
return extensions
Fix indeterminate ordering issue for extensions
The original code used set() to dedupe enabled extensions. This resulted
in an arbitrary ordering of the values. The expected result was a
deterministic ordering of loaded extensions that matches the order given
by the whitelist. This removes the set() usage to preserve order.
Existing users subject to the arbitrary ordering should be unaffected as
their builds must already be tolerant to ordering changes to have worked
thus far.
|
"""Tools for loading and validating extensions."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import pkg_resources
import semver
class MissingDependency(Exception):
"""No dependency found."""
class InvalidDependency(Exception):
"""Found dependency but with the wrong version."""
def load_extensions(whitelist=()):
"""Get an iterable of extensions in order."""
whitelist = ('core',) + tuple(whitelist)
extensions = pkg_resources.iter_entry_points('rpmvenv.extensions')
extensions = (
extension for extension in extensions if extension.name in whitelist
)
extensions = tuple(set(extensions))
extensions = sorted(extensions, key=lambda ext: whitelist.index(ext.name))
return tuple(extension.load() for extension in extensions)
def validate_extensions(extensions):
"""Process the extension dependencies."""
ext_map = dict(
(ext.name, ext) for ext in extensions
)
for ext in extensions:
for dependency, versions in ext.requirements.items():
ext_dependency = ext_map.get(dependency, None)
if not ext_dependency:
raise MissingDependency(
'{0} is required by {1} but is not loaded.'.format(
ext.name,
dependency,
)
)
for version in versions:
if not semver.match(ext.version, version):
raise InvalidDependency(
'{0}-{1} required by {2} but found {0}-{3}.'.format(
dependency,
version,
ext.name,
ext.version,
)
)
return extensions
|
<commit_before>"""Tools for loading and validating extensions."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import pkg_resources
import semver
class MissingDependency(Exception):
"""No dependency found."""
class InvalidDependency(Exception):
"""Found dependency but with the wrong version."""
def load_extensions(whitelist=()):
"""Get an iterable of extensions in order."""
whitelist = tuple(set(('core',) + tuple(whitelist)))
extensions = pkg_resources.iter_entry_points('rpmvenv.extensions')
extensions = (
extension for extension in extensions if extension.name in whitelist
)
extensions = tuple(set(extensions))
extensions = sorted(extensions, key=lambda ext: whitelist.index(ext.name))
return tuple(extension.load() for extension in extensions)
def validate_extensions(extensions):
"""Process the extension dependencies."""
ext_map = dict(
(ext.name, ext) for ext in extensions
)
for ext in extensions:
for dependency, versions in ext.requirements.items():
ext_dependency = ext_map.get(dependency, None)
if not ext_dependency:
raise MissingDependency(
'{0} is required by {1} but is not loaded.'.format(
ext.name,
dependency,
)
)
for version in versions:
if not semver.match(ext.version, version):
raise InvalidDependency(
'{0}-{1} required by {2} but found {0}-{3}.'.format(
dependency,
version,
ext.name,
ext.version,
)
)
return extensions
<commit_msg>Fix indeterminate ordering issue for extensions
The original code used set() to dedupe enabled extensions. This resulted
in an arbitrary ordering of the values. The expected result was a
deterministic ordering of loaded extensions that matches the order given
by the whitelist. This removes the set() usage to preserve order.
Existing users subject to the arbitrary ordering should be unaffected as
their builds must already be tolerant to ordering changes to have worked
thus far.<commit_after>
|
"""Tools for loading and validating extensions."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import pkg_resources
import semver
class MissingDependency(Exception):
"""No dependency found."""
class InvalidDependency(Exception):
"""Found dependency but with the wrong version."""
def load_extensions(whitelist=()):
"""Get an iterable of extensions in order."""
whitelist = ('core',) + tuple(whitelist)
extensions = pkg_resources.iter_entry_points('rpmvenv.extensions')
extensions = (
extension for extension in extensions if extension.name in whitelist
)
extensions = tuple(set(extensions))
extensions = sorted(extensions, key=lambda ext: whitelist.index(ext.name))
return tuple(extension.load() for extension in extensions)
def validate_extensions(extensions):
"""Process the extension dependencies."""
ext_map = dict(
(ext.name, ext) for ext in extensions
)
for ext in extensions:
for dependency, versions in ext.requirements.items():
ext_dependency = ext_map.get(dependency, None)
if not ext_dependency:
raise MissingDependency(
'{0} is required by {1} but is not loaded.'.format(
ext.name,
dependency,
)
)
for version in versions:
if not semver.match(ext.version, version):
raise InvalidDependency(
'{0}-{1} required by {2} but found {0}-{3}.'.format(
dependency,
version,
ext.name,
ext.version,
)
)
return extensions
|
"""Tools for loading and validating extensions."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import pkg_resources
import semver
class MissingDependency(Exception):
"""No dependency found."""
class InvalidDependency(Exception):
"""Found dependency but with the wrong version."""
def load_extensions(whitelist=()):
"""Get an iterable of extensions in order."""
whitelist = tuple(set(('core',) + tuple(whitelist)))
extensions = pkg_resources.iter_entry_points('rpmvenv.extensions')
extensions = (
extension for extension in extensions if extension.name in whitelist
)
extensions = tuple(set(extensions))
extensions = sorted(extensions, key=lambda ext: whitelist.index(ext.name))
return tuple(extension.load() for extension in extensions)
def validate_extensions(extensions):
"""Process the extension dependencies."""
ext_map = dict(
(ext.name, ext) for ext in extensions
)
for ext in extensions:
for dependency, versions in ext.requirements.items():
ext_dependency = ext_map.get(dependency, None)
if not ext_dependency:
raise MissingDependency(
'{0} is required by {1} but is not loaded.'.format(
ext.name,
dependency,
)
)
for version in versions:
if not semver.match(ext.version, version):
raise InvalidDependency(
'{0}-{1} required by {2} but found {0}-{3}.'.format(
dependency,
version,
ext.name,
ext.version,
)
)
return extensions
Fix indeterminate ordering issue for extensions
The original code used set() to dedupe enabled extensions. This resulted
in an arbitrary ordering of the values. The expected result was a
deterministic ordering of loaded extensions that matches the order given
by the whitelist. This removes the set() usage to preserve order.
Existing users subject to the arbitrary ordering should be unaffected as
their builds must already be tolerant to ordering changes to have worked
thus far."""Tools for loading and validating extensions."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import pkg_resources
import semver
class MissingDependency(Exception):
"""No dependency found."""
class InvalidDependency(Exception):
"""Found dependency but with the wrong version."""
def load_extensions(whitelist=()):
"""Get an iterable of extensions in order."""
whitelist = ('core',) + tuple(whitelist)
extensions = pkg_resources.iter_entry_points('rpmvenv.extensions')
extensions = (
extension for extension in extensions if extension.name in whitelist
)
extensions = tuple(set(extensions))
extensions = sorted(extensions, key=lambda ext: whitelist.index(ext.name))
return tuple(extension.load() for extension in extensions)
def validate_extensions(extensions):
"""Process the extension dependencies."""
ext_map = dict(
(ext.name, ext) for ext in extensions
)
for ext in extensions:
for dependency, versions in ext.requirements.items():
ext_dependency = ext_map.get(dependency, None)
if not ext_dependency:
raise MissingDependency(
'{0} is required by {1} but is not loaded.'.format(
ext.name,
dependency,
)
)
for version in versions:
if not semver.match(ext.version, version):
raise InvalidDependency(
'{0}-{1} required by {2} but found {0}-{3}.'.format(
dependency,
version,
ext.name,
ext.version,
)
)
return extensions
|
<commit_before>"""Tools for loading and validating extensions."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import pkg_resources
import semver
class MissingDependency(Exception):
"""No dependency found."""
class InvalidDependency(Exception):
"""Found dependency but with the wrong version."""
def load_extensions(whitelist=()):
"""Get an iterable of extensions in order."""
whitelist = tuple(set(('core',) + tuple(whitelist)))
extensions = pkg_resources.iter_entry_points('rpmvenv.extensions')
extensions = (
extension for extension in extensions if extension.name in whitelist
)
extensions = tuple(set(extensions))
extensions = sorted(extensions, key=lambda ext: whitelist.index(ext.name))
return tuple(extension.load() for extension in extensions)
def validate_extensions(extensions):
"""Process the extension dependencies."""
ext_map = dict(
(ext.name, ext) for ext in extensions
)
for ext in extensions:
for dependency, versions in ext.requirements.items():
ext_dependency = ext_map.get(dependency, None)
if not ext_dependency:
raise MissingDependency(
'{0} is required by {1} but is not loaded.'.format(
ext.name,
dependency,
)
)
for version in versions:
if not semver.match(ext.version, version):
raise InvalidDependency(
'{0}-{1} required by {2} but found {0}-{3}.'.format(
dependency,
version,
ext.name,
ext.version,
)
)
return extensions
<commit_msg>Fix indeterminate ordering issue for extensions
The original code used set() to dedupe enabled extensions. This resulted
in an arbitrary ordering of the values. The expected result was a
deterministic ordering of loaded extensions that matches the order given
by the whitelist. This removes the set() usage to preserve order.
Existing users subject to the arbitrary ordering should be unaffected as
their builds must already be tolerant to ordering changes to have worked
thus far.<commit_after>"""Tools for loading and validating extensions."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import pkg_resources
import semver
class MissingDependency(Exception):
"""No dependency found."""
class InvalidDependency(Exception):
"""Found dependency but with the wrong version."""
def load_extensions(whitelist=()):
"""Get an iterable of extensions in order."""
whitelist = ('core',) + tuple(whitelist)
extensions = pkg_resources.iter_entry_points('rpmvenv.extensions')
extensions = (
extension for extension in extensions if extension.name in whitelist
)
extensions = tuple(set(extensions))
extensions = sorted(extensions, key=lambda ext: whitelist.index(ext.name))
return tuple(extension.load() for extension in extensions)
def validate_extensions(extensions):
"""Process the extension dependencies."""
ext_map = dict(
(ext.name, ext) for ext in extensions
)
for ext in extensions:
for dependency, versions in ext.requirements.items():
ext_dependency = ext_map.get(dependency, None)
if not ext_dependency:
raise MissingDependency(
'{0} is required by {1} but is not loaded.'.format(
ext.name,
dependency,
)
)
for version in versions:
if not semver.match(ext.version, version):
raise InvalidDependency(
'{0}-{1} required by {2} but found {0}-{3}.'.format(
dependency,
version,
ext.name,
ext.version,
)
)
return extensions
|
2917c8d380bfee3c7589f806ea12f2e3f83e8b93
|
npc/character/__init__.py
|
npc/character/__init__.py
|
from .character import *
|
"""
Module for all character objects.
"""
from .character import Character
from .changeling import Changeling
from .werewolf import Werewolf
def build(attributes: dict = None, other_char: Character = None):
"""
Build a new character object with the appropriate class
This derives the correct character class based on the type tag of either the
other_char character object or the attributes dict, then creates a new
character object using that class. If neither is supplied, a blank Character
is returned.
The character type is fetched first from other_char and only if that is not
present is it fetched from attributes.
Both other_char and attribuets are passed to the character constructor. See
that for how their precedence is applied.
If you need more control over the instantiation process, use
character_klass_from_type and call the object manually.
Args:
attributes (dict): Dictionary of attributes to insert into the
Character.
other_char (Character): Existing character object to copy.
Returns:
Instantiated Character class or subclass matching the given type.
"""
if other_char:
klass = character_klass_from_type(other_char.type_key)
elif attributes:
klass = character_klass_from_type(attributes['type'][0])
else:
klass = Character
return klass(other_char = other_char, attributes = attributes)
def character_klass_from_type(ctype: str):
"""
Choose the correct character class based on type tag
Args:
ctype (str): Character type tag to use
Returns:
Character class or subclass depending on the type
"""
if ctype:
ctype = ctype.lower()
if ctype == 'changeling':
return Changeling
if ctype == 'werewolf':
return Werewolf
return Character
|
Add helpers to find the right character class
|
Add helpers to find the right character class
|
Python
|
mit
|
aurule/npc,aurule/npc
|
from .character import *
Add helpers to find the right character class
|
"""
Module for all character objects.
"""
from .character import Character
from .changeling import Changeling
from .werewolf import Werewolf
def build(attributes: dict = None, other_char: Character = None):
"""
Build a new character object with the appropriate class
This derives the correct character class based on the type tag of either the
other_char character object or the attributes dict, then creates a new
character object using that class. If neither is supplied, a blank Character
is returned.
The character type is fetched first from other_char and only if that is not
present is it fetched from attributes.
Both other_char and attribuets are passed to the character constructor. See
that for how their precedence is applied.
If you need more control over the instantiation process, use
character_klass_from_type and call the object manually.
Args:
attributes (dict): Dictionary of attributes to insert into the
Character.
other_char (Character): Existing character object to copy.
Returns:
Instantiated Character class or subclass matching the given type.
"""
if other_char:
klass = character_klass_from_type(other_char.type_key)
elif attributes:
klass = character_klass_from_type(attributes['type'][0])
else:
klass = Character
return klass(other_char = other_char, attributes = attributes)
def character_klass_from_type(ctype: str):
"""
Choose the correct character class based on type tag
Args:
ctype (str): Character type tag to use
Returns:
Character class or subclass depending on the type
"""
if ctype:
ctype = ctype.lower()
if ctype == 'changeling':
return Changeling
if ctype == 'werewolf':
return Werewolf
return Character
|
<commit_before>from .character import *
<commit_msg>Add helpers to find the right character class<commit_after>
|
"""
Module for all character objects.
"""
from .character import Character
from .changeling import Changeling
from .werewolf import Werewolf
def build(attributes: dict = None, other_char: Character = None):
"""
Build a new character object with the appropriate class
This derives the correct character class based on the type tag of either the
other_char character object or the attributes dict, then creates a new
character object using that class. If neither is supplied, a blank Character
is returned.
The character type is fetched first from other_char and only if that is not
present is it fetched from attributes.
Both other_char and attribuets are passed to the character constructor. See
that for how their precedence is applied.
If you need more control over the instantiation process, use
character_klass_from_type and call the object manually.
Args:
attributes (dict): Dictionary of attributes to insert into the
Character.
other_char (Character): Existing character object to copy.
Returns:
Instantiated Character class or subclass matching the given type.
"""
if other_char:
klass = character_klass_from_type(other_char.type_key)
elif attributes:
klass = character_klass_from_type(attributes['type'][0])
else:
klass = Character
return klass(other_char = other_char, attributes = attributes)
def character_klass_from_type(ctype: str):
"""
Choose the correct character class based on type tag
Args:
ctype (str): Character type tag to use
Returns:
Character class or subclass depending on the type
"""
if ctype:
ctype = ctype.lower()
if ctype == 'changeling':
return Changeling
if ctype == 'werewolf':
return Werewolf
return Character
|
from .character import *
Add helpers to find the right character class"""
Module for all character objects.
"""
from .character import Character
from .changeling import Changeling
from .werewolf import Werewolf
def build(attributes: dict = None, other_char: Character = None):
"""
Build a new character object with the appropriate class
This derives the correct character class based on the type tag of either the
other_char character object or the attributes dict, then creates a new
character object using that class. If neither is supplied, a blank Character
is returned.
The character type is fetched first from other_char and only if that is not
present is it fetched from attributes.
Both other_char and attribuets are passed to the character constructor. See
that for how their precedence is applied.
If you need more control over the instantiation process, use
character_klass_from_type and call the object manually.
Args:
attributes (dict): Dictionary of attributes to insert into the
Character.
other_char (Character): Existing character object to copy.
Returns:
Instantiated Character class or subclass matching the given type.
"""
if other_char:
klass = character_klass_from_type(other_char.type_key)
elif attributes:
klass = character_klass_from_type(attributes['type'][0])
else:
klass = Character
return klass(other_char = other_char, attributes = attributes)
def character_klass_from_type(ctype: str):
"""
Choose the correct character class based on type tag
Args:
ctype (str): Character type tag to use
Returns:
Character class or subclass depending on the type
"""
if ctype:
ctype = ctype.lower()
if ctype == 'changeling':
return Changeling
if ctype == 'werewolf':
return Werewolf
return Character
|
<commit_before>from .character import *
<commit_msg>Add helpers to find the right character class<commit_after>"""
Module for all character objects.
"""
from .character import Character
from .changeling import Changeling
from .werewolf import Werewolf
def build(attributes: dict = None, other_char: Character = None):
"""
Build a new character object with the appropriate class
This derives the correct character class based on the type tag of either the
other_char character object or the attributes dict, then creates a new
character object using that class. If neither is supplied, a blank Character
is returned.
The character type is fetched first from other_char and only if that is not
present is it fetched from attributes.
Both other_char and attribuets are passed to the character constructor. See
that for how their precedence is applied.
If you need more control over the instantiation process, use
character_klass_from_type and call the object manually.
Args:
attributes (dict): Dictionary of attributes to insert into the
Character.
other_char (Character): Existing character object to copy.
Returns:
Instantiated Character class or subclass matching the given type.
"""
if other_char:
klass = character_klass_from_type(other_char.type_key)
elif attributes:
klass = character_klass_from_type(attributes['type'][0])
else:
klass = Character
return klass(other_char = other_char, attributes = attributes)
def character_klass_from_type(ctype: str):
"""
Choose the correct character class based on type tag
Args:
ctype (str): Character type tag to use
Returns:
Character class or subclass depending on the type
"""
if ctype:
ctype = ctype.lower()
if ctype == 'changeling':
return Changeling
if ctype == 'werewolf':
return Werewolf
return Character
|
3dd8a56af19301fdedd1fa95c25743ea1e9d7a1c
|
tkp/__init__.py
|
tkp/__init__.py
|
"""
This package contains the Python modules used by the LOFAR Transients Pipeline
(TraP). This includes:
- Pipeline configuration management;
- Task distribution;
- Image loading and quality control;
- Source detection and measurement;
- Storing and associating sources in the database.
For details, see http://docs.transientskp.org/.
"""
__version__ = "3.0rc"
|
"""
This package contains the Python modules used by the LOFAR Transients Pipeline
(TraP). This includes:
- Pipeline configuration management;
- Task distribution;
- Image loading and quality control;
- Source detection and measurement;
- Storing and associating sources in the database.
For details, see http://docs.transientskp.org/.
"""
__version__ = "3.0rc2"
|
Test of Travis->autodeploy to PyPI for 3.0 RC2.
|
Test of Travis->autodeploy to PyPI for 3.0 RC2.
|
Python
|
bsd-2-clause
|
transientskp/tkp,transientskp/tkp
|
"""
This package contains the Python modules used by the LOFAR Transients Pipeline
(TraP). This includes:
- Pipeline configuration management;
- Task distribution;
- Image loading and quality control;
- Source detection and measurement;
- Storing and associating sources in the database.
For details, see http://docs.transientskp.org/.
"""
__version__ = "3.0rc"
Test of Travis->autodeploy to PyPI for 3.0 RC2.
|
"""
This package contains the Python modules used by the LOFAR Transients Pipeline
(TraP). This includes:
- Pipeline configuration management;
- Task distribution;
- Image loading and quality control;
- Source detection and measurement;
- Storing and associating sources in the database.
For details, see http://docs.transientskp.org/.
"""
__version__ = "3.0rc2"
|
<commit_before>"""
This package contains the Python modules used by the LOFAR Transients Pipeline
(TraP). This includes:
- Pipeline configuration management;
- Task distribution;
- Image loading and quality control;
- Source detection and measurement;
- Storing and associating sources in the database.
For details, see http://docs.transientskp.org/.
"""
__version__ = "3.0rc"
<commit_msg>Test of Travis->autodeploy to PyPI for 3.0 RC2.<commit_after>
|
"""
This package contains the Python modules used by the LOFAR Transients Pipeline
(TraP). This includes:
- Pipeline configuration management;
- Task distribution;
- Image loading and quality control;
- Source detection and measurement;
- Storing and associating sources in the database.
For details, see http://docs.transientskp.org/.
"""
__version__ = "3.0rc2"
|
"""
This package contains the Python modules used by the LOFAR Transients Pipeline
(TraP). This includes:
- Pipeline configuration management;
- Task distribution;
- Image loading and quality control;
- Source detection and measurement;
- Storing and associating sources in the database.
For details, see http://docs.transientskp.org/.
"""
__version__ = "3.0rc"
Test of Travis->autodeploy to PyPI for 3.0 RC2."""
This package contains the Python modules used by the LOFAR Transients Pipeline
(TraP). This includes:
- Pipeline configuration management;
- Task distribution;
- Image loading and quality control;
- Source detection and measurement;
- Storing and associating sources in the database.
For details, see http://docs.transientskp.org/.
"""
__version__ = "3.0rc2"
|
<commit_before>"""
This package contains the Python modules used by the LOFAR Transients Pipeline
(TraP). This includes:
- Pipeline configuration management;
- Task distribution;
- Image loading and quality control;
- Source detection and measurement;
- Storing and associating sources in the database.
For details, see http://docs.transientskp.org/.
"""
__version__ = "3.0rc"
<commit_msg>Test of Travis->autodeploy to PyPI for 3.0 RC2.<commit_after>"""
This package contains the Python modules used by the LOFAR Transients Pipeline
(TraP). This includes:
- Pipeline configuration management;
- Task distribution;
- Image loading and quality control;
- Source detection and measurement;
- Storing and associating sources in the database.
For details, see http://docs.transientskp.org/.
"""
__version__ = "3.0rc2"
|
6d5ce6164c4406be66b787c84de64f6919a6246d
|
changes/jobs/sync_build.py
|
changes/jobs/sync_build.py
|
from flask import current_app
from changes.config import queue
from changes.backends.jenkins.builder import JenkinsBuilder
from changes.constants import Status
from changes.models.build import Build
@queue.job
def sync_build(build_id):
try:
build = Build.query.get(build_id)
if build.status == Status.finished:
return
builder = JenkinsBuilder(
app=current_app,
base_uri=current_app.config['JENKINS_URL'],
)
builder.sync_build(build)
if build.status != Status.finished:
sync_build.delay(
build_id=build.id,
)
except Exception:
# Ensure we continue to synchronize this build as this could be a
# temporary failure
sync_build.delay(
build_id=build.id,
)
raise
|
from flask import current_app
from changes.config import queue, db
from changes.backends.jenkins.builder import JenkinsBuilder
from changes.constants import Status
from changes.models.build import Build
@queue.job
def sync_build(build_id):
try:
build = Build.query.get(build_id)
if build.status == Status.finished:
return
builder = JenkinsBuilder(
app=current_app,
base_url=current_app.config['JENKINS_URL'],
)
builder.sync_build(build)
db.session.commit()
if build.status != Status.finished:
sync_build.delay(
build_id=build.id,
)
except Exception:
# Ensure we continue to synchronize this build as this could be a
# temporary failure
sync_build.delay(
build_id=build.id,
)
raise
|
Correct base_url usage, and force commit
|
Correct base_url usage, and force commit
|
Python
|
apache-2.0
|
dropbox/changes,dropbox/changes,wfxiang08/changes,wfxiang08/changes,bowlofstew/changes,dropbox/changes,dropbox/changes,bowlofstew/changes,bowlofstew/changes,wfxiang08/changes,bowlofstew/changes,wfxiang08/changes
|
from flask import current_app
from changes.config import queue
from changes.backends.jenkins.builder import JenkinsBuilder
from changes.constants import Status
from changes.models.build import Build
@queue.job
def sync_build(build_id):
try:
build = Build.query.get(build_id)
if build.status == Status.finished:
return
builder = JenkinsBuilder(
app=current_app,
base_uri=current_app.config['JENKINS_URL'],
)
builder.sync_build(build)
if build.status != Status.finished:
sync_build.delay(
build_id=build.id,
)
except Exception:
# Ensure we continue to synchronize this build as this could be a
# temporary failure
sync_build.delay(
build_id=build.id,
)
raise
Correct base_url usage, and force commit
|
from flask import current_app
from changes.config import queue, db
from changes.backends.jenkins.builder import JenkinsBuilder
from changes.constants import Status
from changes.models.build import Build
@queue.job
def sync_build(build_id):
try:
build = Build.query.get(build_id)
if build.status == Status.finished:
return
builder = JenkinsBuilder(
app=current_app,
base_url=current_app.config['JENKINS_URL'],
)
builder.sync_build(build)
db.session.commit()
if build.status != Status.finished:
sync_build.delay(
build_id=build.id,
)
except Exception:
# Ensure we continue to synchronize this build as this could be a
# temporary failure
sync_build.delay(
build_id=build.id,
)
raise
|
<commit_before>from flask import current_app
from changes.config import queue
from changes.backends.jenkins.builder import JenkinsBuilder
from changes.constants import Status
from changes.models.build import Build
@queue.job
def sync_build(build_id):
try:
build = Build.query.get(build_id)
if build.status == Status.finished:
return
builder = JenkinsBuilder(
app=current_app,
base_uri=current_app.config['JENKINS_URL'],
)
builder.sync_build(build)
if build.status != Status.finished:
sync_build.delay(
build_id=build.id,
)
except Exception:
# Ensure we continue to synchronize this build as this could be a
# temporary failure
sync_build.delay(
build_id=build.id,
)
raise
<commit_msg>Correct base_url usage, and force commit<commit_after>
|
from flask import current_app
from changes.config import queue, db
from changes.backends.jenkins.builder import JenkinsBuilder
from changes.constants import Status
from changes.models.build import Build
@queue.job
def sync_build(build_id):
try:
build = Build.query.get(build_id)
if build.status == Status.finished:
return
builder = JenkinsBuilder(
app=current_app,
base_url=current_app.config['JENKINS_URL'],
)
builder.sync_build(build)
db.session.commit()
if build.status != Status.finished:
sync_build.delay(
build_id=build.id,
)
except Exception:
# Ensure we continue to synchronize this build as this could be a
# temporary failure
sync_build.delay(
build_id=build.id,
)
raise
|
from flask import current_app
from changes.config import queue
from changes.backends.jenkins.builder import JenkinsBuilder
from changes.constants import Status
from changes.models.build import Build
@queue.job
def sync_build(build_id):
try:
build = Build.query.get(build_id)
if build.status == Status.finished:
return
builder = JenkinsBuilder(
app=current_app,
base_uri=current_app.config['JENKINS_URL'],
)
builder.sync_build(build)
if build.status != Status.finished:
sync_build.delay(
build_id=build.id,
)
except Exception:
# Ensure we continue to synchronize this build as this could be a
# temporary failure
sync_build.delay(
build_id=build.id,
)
raise
Correct base_url usage, and force commitfrom flask import current_app
from changes.config import queue, db
from changes.backends.jenkins.builder import JenkinsBuilder
from changes.constants import Status
from changes.models.build import Build
@queue.job
def sync_build(build_id):
try:
build = Build.query.get(build_id)
if build.status == Status.finished:
return
builder = JenkinsBuilder(
app=current_app,
base_url=current_app.config['JENKINS_URL'],
)
builder.sync_build(build)
db.session.commit()
if build.status != Status.finished:
sync_build.delay(
build_id=build.id,
)
except Exception:
# Ensure we continue to synchronize this build as this could be a
# temporary failure
sync_build.delay(
build_id=build.id,
)
raise
|
<commit_before>from flask import current_app
from changes.config import queue
from changes.backends.jenkins.builder import JenkinsBuilder
from changes.constants import Status
from changes.models.build import Build
@queue.job
def sync_build(build_id):
try:
build = Build.query.get(build_id)
if build.status == Status.finished:
return
builder = JenkinsBuilder(
app=current_app,
base_uri=current_app.config['JENKINS_URL'],
)
builder.sync_build(build)
if build.status != Status.finished:
sync_build.delay(
build_id=build.id,
)
except Exception:
# Ensure we continue to synchronize this build as this could be a
# temporary failure
sync_build.delay(
build_id=build.id,
)
raise
<commit_msg>Correct base_url usage, and force commit<commit_after>from flask import current_app
from changes.config import queue, db
from changes.backends.jenkins.builder import JenkinsBuilder
from changes.constants import Status
from changes.models.build import Build
@queue.job
def sync_build(build_id):
try:
build = Build.query.get(build_id)
if build.status == Status.finished:
return
builder = JenkinsBuilder(
app=current_app,
base_url=current_app.config['JENKINS_URL'],
)
builder.sync_build(build)
db.session.commit()
if build.status != Status.finished:
sync_build.delay(
build_id=build.id,
)
except Exception:
# Ensure we continue to synchronize this build as this could be a
# temporary failure
sync_build.delay(
build_id=build.id,
)
raise
|
be9daefbdd80380a7fdb8369bf32208ef61a6615
|
spacy/tests/test_download.py
|
spacy/tests/test_download.py
|
# coding: utf-8
from __future__ import unicode_literals
from ..cli.download import download, get_compatibility, get_version, check_error_depr
import pytest
def test_download_fetch_compatibility():
compatibility = get_compatibility()
assert type(compatibility) == dict
@pytest.mark.slow
@pytest.mark.parametrize('model', ['en_core_web_md-1.2.0'])
def test_download_direct_download(model):
download(model, direct=True)
@pytest.mark.parametrize('model', ['en_core_web_md'])
def test_download_get_matching_version_succeeds(model):
comp = { model: ['1.7.0', '0.100.0'] }
assert get_version(model, comp)
@pytest.mark.parametrize('model', ['en_core_web_md'])
def test_download_get_matching_version_fails(model):
diff_model = 'test_' + model
comp = { diff_model: ['1.7.0', '0.100.0'] }
with pytest.raises(SystemExit):
assert get_version(model, comp)
@pytest.mark.parametrize('model', [False, None, '', 'all'])
def test_download_no_model_depr_error(model):
with pytest.raises(SystemExit):
check_error_depr(model)
|
# coding: utf-8
from __future__ import unicode_literals
from ..cli.download import download, get_compatibility, get_version, check_error_depr
import pytest
def test_download_fetch_compatibility():
compatibility = get_compatibility()
assert type(compatibility) == dict
@pytest.mark.parametrize('model', ['en_core_web_md'])
def test_download_get_matching_version_succeeds(model):
comp = { model: ['1.7.0', '0.100.0'] }
assert get_version(model, comp)
@pytest.mark.parametrize('model', ['en_core_web_md'])
def test_download_get_matching_version_fails(model):
diff_model = 'test_' + model
comp = { diff_model: ['1.7.0', '0.100.0'] }
with pytest.raises(SystemExit):
assert get_version(model, comp)
@pytest.mark.parametrize('model', [False, None, '', 'all'])
def test_download_no_model_depr_error(model):
with pytest.raises(SystemExit):
check_error_depr(model)
|
Remove actual model downloading from tests
|
Remove actual model downloading from tests
|
Python
|
mit
|
oroszgy/spaCy.hu,raphael0202/spaCy,raphael0202/spaCy,oroszgy/spaCy.hu,aikramer2/spaCy,honnibal/spaCy,aikramer2/spaCy,explosion/spaCy,oroszgy/spaCy.hu,oroszgy/spaCy.hu,spacy-io/spaCy,raphael0202/spaCy,Gregory-Howard/spaCy,explosion/spaCy,explosion/spaCy,oroszgy/spaCy.hu,recognai/spaCy,Gregory-Howard/spaCy,spacy-io/spaCy,explosion/spaCy,Gregory-Howard/spaCy,honnibal/spaCy,recognai/spaCy,recognai/spaCy,spacy-io/spaCy,honnibal/spaCy,recognai/spaCy,spacy-io/spaCy,oroszgy/spaCy.hu,recognai/spaCy,spacy-io/spaCy,aikramer2/spaCy,Gregory-Howard/spaCy,explosion/spaCy,spacy-io/spaCy,Gregory-Howard/spaCy,Gregory-Howard/spaCy,aikramer2/spaCy,explosion/spaCy,aikramer2/spaCy,recognai/spaCy,raphael0202/spaCy,aikramer2/spaCy,raphael0202/spaCy,raphael0202/spaCy,honnibal/spaCy
|
# coding: utf-8
from __future__ import unicode_literals
from ..cli.download import download, get_compatibility, get_version, check_error_depr
import pytest
def test_download_fetch_compatibility():
compatibility = get_compatibility()
assert type(compatibility) == dict
@pytest.mark.slow
@pytest.mark.parametrize('model', ['en_core_web_md-1.2.0'])
def test_download_direct_download(model):
download(model, direct=True)
@pytest.mark.parametrize('model', ['en_core_web_md'])
def test_download_get_matching_version_succeeds(model):
comp = { model: ['1.7.0', '0.100.0'] }
assert get_version(model, comp)
@pytest.mark.parametrize('model', ['en_core_web_md'])
def test_download_get_matching_version_fails(model):
diff_model = 'test_' + model
comp = { diff_model: ['1.7.0', '0.100.0'] }
with pytest.raises(SystemExit):
assert get_version(model, comp)
@pytest.mark.parametrize('model', [False, None, '', 'all'])
def test_download_no_model_depr_error(model):
with pytest.raises(SystemExit):
check_error_depr(model)
Remove actual model downloading from tests
|
# coding: utf-8
from __future__ import unicode_literals
from ..cli.download import download, get_compatibility, get_version, check_error_depr
import pytest
def test_download_fetch_compatibility():
compatibility = get_compatibility()
assert type(compatibility) == dict
@pytest.mark.parametrize('model', ['en_core_web_md'])
def test_download_get_matching_version_succeeds(model):
comp = { model: ['1.7.0', '0.100.0'] }
assert get_version(model, comp)
@pytest.mark.parametrize('model', ['en_core_web_md'])
def test_download_get_matching_version_fails(model):
diff_model = 'test_' + model
comp = { diff_model: ['1.7.0', '0.100.0'] }
with pytest.raises(SystemExit):
assert get_version(model, comp)
@pytest.mark.parametrize('model', [False, None, '', 'all'])
def test_download_no_model_depr_error(model):
with pytest.raises(SystemExit):
check_error_depr(model)
|
<commit_before># coding: utf-8
from __future__ import unicode_literals
from ..cli.download import download, get_compatibility, get_version, check_error_depr
import pytest
def test_download_fetch_compatibility():
compatibility = get_compatibility()
assert type(compatibility) == dict
@pytest.mark.slow
@pytest.mark.parametrize('model', ['en_core_web_md-1.2.0'])
def test_download_direct_download(model):
download(model, direct=True)
@pytest.mark.parametrize('model', ['en_core_web_md'])
def test_download_get_matching_version_succeeds(model):
comp = { model: ['1.7.0', '0.100.0'] }
assert get_version(model, comp)
@pytest.mark.parametrize('model', ['en_core_web_md'])
def test_download_get_matching_version_fails(model):
diff_model = 'test_' + model
comp = { diff_model: ['1.7.0', '0.100.0'] }
with pytest.raises(SystemExit):
assert get_version(model, comp)
@pytest.mark.parametrize('model', [False, None, '', 'all'])
def test_download_no_model_depr_error(model):
with pytest.raises(SystemExit):
check_error_depr(model)
<commit_msg>Remove actual model downloading from tests<commit_after>
|
# coding: utf-8
from __future__ import unicode_literals
from ..cli.download import download, get_compatibility, get_version, check_error_depr
import pytest
def test_download_fetch_compatibility():
compatibility = get_compatibility()
assert type(compatibility) == dict
@pytest.mark.parametrize('model', ['en_core_web_md'])
def test_download_get_matching_version_succeeds(model):
comp = { model: ['1.7.0', '0.100.0'] }
assert get_version(model, comp)
@pytest.mark.parametrize('model', ['en_core_web_md'])
def test_download_get_matching_version_fails(model):
diff_model = 'test_' + model
comp = { diff_model: ['1.7.0', '0.100.0'] }
with pytest.raises(SystemExit):
assert get_version(model, comp)
@pytest.mark.parametrize('model', [False, None, '', 'all'])
def test_download_no_model_depr_error(model):
with pytest.raises(SystemExit):
check_error_depr(model)
|
# coding: utf-8
from __future__ import unicode_literals
from ..cli.download import download, get_compatibility, get_version, check_error_depr
import pytest
def test_download_fetch_compatibility():
compatibility = get_compatibility()
assert type(compatibility) == dict
@pytest.mark.slow
@pytest.mark.parametrize('model', ['en_core_web_md-1.2.0'])
def test_download_direct_download(model):
download(model, direct=True)
@pytest.mark.parametrize('model', ['en_core_web_md'])
def test_download_get_matching_version_succeeds(model):
comp = { model: ['1.7.0', '0.100.0'] }
assert get_version(model, comp)
@pytest.mark.parametrize('model', ['en_core_web_md'])
def test_download_get_matching_version_fails(model):
diff_model = 'test_' + model
comp = { diff_model: ['1.7.0', '0.100.0'] }
with pytest.raises(SystemExit):
assert get_version(model, comp)
@pytest.mark.parametrize('model', [False, None, '', 'all'])
def test_download_no_model_depr_error(model):
with pytest.raises(SystemExit):
check_error_depr(model)
Remove actual model downloading from tests# coding: utf-8
from __future__ import unicode_literals
from ..cli.download import download, get_compatibility, get_version, check_error_depr
import pytest
def test_download_fetch_compatibility():
compatibility = get_compatibility()
assert type(compatibility) == dict
@pytest.mark.parametrize('model', ['en_core_web_md'])
def test_download_get_matching_version_succeeds(model):
comp = { model: ['1.7.0', '0.100.0'] }
assert get_version(model, comp)
@pytest.mark.parametrize('model', ['en_core_web_md'])
def test_download_get_matching_version_fails(model):
diff_model = 'test_' + model
comp = { diff_model: ['1.7.0', '0.100.0'] }
with pytest.raises(SystemExit):
assert get_version(model, comp)
@pytest.mark.parametrize('model', [False, None, '', 'all'])
def test_download_no_model_depr_error(model):
with pytest.raises(SystemExit):
check_error_depr(model)
|
<commit_before># coding: utf-8
from __future__ import unicode_literals
from ..cli.download import download, get_compatibility, get_version, check_error_depr
import pytest
def test_download_fetch_compatibility():
compatibility = get_compatibility()
assert type(compatibility) == dict
@pytest.mark.slow
@pytest.mark.parametrize('model', ['en_core_web_md-1.2.0'])
def test_download_direct_download(model):
download(model, direct=True)
@pytest.mark.parametrize('model', ['en_core_web_md'])
def test_download_get_matching_version_succeeds(model):
comp = { model: ['1.7.0', '0.100.0'] }
assert get_version(model, comp)
@pytest.mark.parametrize('model', ['en_core_web_md'])
def test_download_get_matching_version_fails(model):
diff_model = 'test_' + model
comp = { diff_model: ['1.7.0', '0.100.0'] }
with pytest.raises(SystemExit):
assert get_version(model, comp)
@pytest.mark.parametrize('model', [False, None, '', 'all'])
def test_download_no_model_depr_error(model):
with pytest.raises(SystemExit):
check_error_depr(model)
<commit_msg>Remove actual model downloading from tests<commit_after># coding: utf-8
from __future__ import unicode_literals
from ..cli.download import download, get_compatibility, get_version, check_error_depr
import pytest
def test_download_fetch_compatibility():
compatibility = get_compatibility()
assert type(compatibility) == dict
@pytest.mark.parametrize('model', ['en_core_web_md'])
def test_download_get_matching_version_succeeds(model):
comp = { model: ['1.7.0', '0.100.0'] }
assert get_version(model, comp)
@pytest.mark.parametrize('model', ['en_core_web_md'])
def test_download_get_matching_version_fails(model):
diff_model = 'test_' + model
comp = { diff_model: ['1.7.0', '0.100.0'] }
with pytest.raises(SystemExit):
assert get_version(model, comp)
@pytest.mark.parametrize('model', [False, None, '', 'all'])
def test_download_no_model_depr_error(model):
with pytest.raises(SystemExit):
check_error_depr(model)
|
08f4c3a2360f6c5fe0048f6d1a6f0eab536f8aa9
|
joku/cogs/_common.py
|
joku/cogs/_common.py
|
from collections import OrderedDict
import threading
import aiohttp
from joku.bot import Jokusoramame
class _CogMeta(type):
def __prepare__(*args, **kwargs):
# Use an OrderedDict for the class body.
return OrderedDict()
class Cog(metaclass=_CogMeta):
def __init__(self, bot: Jokusoramame):
self._bot = bot
self.logger = self.bot.logger
# A cog-local session that can be used.
self.session = aiohttp.ClientSession()
def __unload(self):
self.session.close()
@property
def bot(self) -> 'Jokusoramame':
"""
:return: The bot instance associated with this cog.
"""
return self._bot
@classmethod
def setup(cls, bot: Jokusoramame):
bot.add_cog(cls(bot))
|
from collections import OrderedDict
import threading
import aiohttp
import random
from joku.bot import Jokusoramame
class _CogMeta(type):
def __prepare__(*args, **kwargs):
# Use an OrderedDict for the class body.
return OrderedDict()
class Cog(metaclass=_CogMeta):
def __init__(self, bot: Jokusoramame):
self._bot = bot
self.logger = self.bot.logger
# A cog-local session that can be used.
self.session = aiohttp.ClientSession()
# A RNG that can be used by each cog.
self.rng = random.SystemRandom()
def __unload(self):
self.session.close()
@property
def bot(self) -> 'Jokusoramame':
"""
:return: The bot instance associated with this cog.
"""
return self._bot
@classmethod
def setup(cls, bot: Jokusoramame):
bot.add_cog(cls(bot))
|
Add an RNG to each cog.
|
Add an RNG to each cog.
|
Python
|
mit
|
MJB47/Jokusoramame,MJB47/Jokusoramame,MJB47/Jokusoramame
|
from collections import OrderedDict
import threading
import aiohttp
from joku.bot import Jokusoramame
class _CogMeta(type):
def __prepare__(*args, **kwargs):
# Use an OrderedDict for the class body.
return OrderedDict()
class Cog(metaclass=_CogMeta):
def __init__(self, bot: Jokusoramame):
self._bot = bot
self.logger = self.bot.logger
# A cog-local session that can be used.
self.session = aiohttp.ClientSession()
def __unload(self):
self.session.close()
@property
def bot(self) -> 'Jokusoramame':
"""
:return: The bot instance associated with this cog.
"""
return self._bot
@classmethod
def setup(cls, bot: Jokusoramame):
bot.add_cog(cls(bot))
Add an RNG to each cog.
|
from collections import OrderedDict
import threading
import aiohttp
import random
from joku.bot import Jokusoramame
class _CogMeta(type):
def __prepare__(*args, **kwargs):
# Use an OrderedDict for the class body.
return OrderedDict()
class Cog(metaclass=_CogMeta):
def __init__(self, bot: Jokusoramame):
self._bot = bot
self.logger = self.bot.logger
# A cog-local session that can be used.
self.session = aiohttp.ClientSession()
# A RNG that can be used by each cog.
self.rng = random.SystemRandom()
def __unload(self):
self.session.close()
@property
def bot(self) -> 'Jokusoramame':
"""
:return: The bot instance associated with this cog.
"""
return self._bot
@classmethod
def setup(cls, bot: Jokusoramame):
bot.add_cog(cls(bot))
|
<commit_before>from collections import OrderedDict
import threading
import aiohttp
from joku.bot import Jokusoramame
class _CogMeta(type):
def __prepare__(*args, **kwargs):
# Use an OrderedDict for the class body.
return OrderedDict()
class Cog(metaclass=_CogMeta):
def __init__(self, bot: Jokusoramame):
self._bot = bot
self.logger = self.bot.logger
# A cog-local session that can be used.
self.session = aiohttp.ClientSession()
def __unload(self):
self.session.close()
@property
def bot(self) -> 'Jokusoramame':
"""
:return: The bot instance associated with this cog.
"""
return self._bot
@classmethod
def setup(cls, bot: Jokusoramame):
bot.add_cog(cls(bot))
<commit_msg>Add an RNG to each cog.<commit_after>
|
from collections import OrderedDict
import threading
import aiohttp
import random
from joku.bot import Jokusoramame
class _CogMeta(type):
def __prepare__(*args, **kwargs):
# Use an OrderedDict for the class body.
return OrderedDict()
class Cog(metaclass=_CogMeta):
def __init__(self, bot: Jokusoramame):
self._bot = bot
self.logger = self.bot.logger
# A cog-local session that can be used.
self.session = aiohttp.ClientSession()
# A RNG that can be used by each cog.
self.rng = random.SystemRandom()
def __unload(self):
self.session.close()
@property
def bot(self) -> 'Jokusoramame':
"""
:return: The bot instance associated with this cog.
"""
return self._bot
@classmethod
def setup(cls, bot: Jokusoramame):
bot.add_cog(cls(bot))
|
from collections import OrderedDict
import threading
import aiohttp
from joku.bot import Jokusoramame
class _CogMeta(type):
def __prepare__(*args, **kwargs):
# Use an OrderedDict for the class body.
return OrderedDict()
class Cog(metaclass=_CogMeta):
def __init__(self, bot: Jokusoramame):
self._bot = bot
self.logger = self.bot.logger
# A cog-local session that can be used.
self.session = aiohttp.ClientSession()
def __unload(self):
self.session.close()
@property
def bot(self) -> 'Jokusoramame':
"""
:return: The bot instance associated with this cog.
"""
return self._bot
@classmethod
def setup(cls, bot: Jokusoramame):
bot.add_cog(cls(bot))
Add an RNG to each cog.from collections import OrderedDict
import threading
import aiohttp
import random
from joku.bot import Jokusoramame
class _CogMeta(type):
def __prepare__(*args, **kwargs):
# Use an OrderedDict for the class body.
return OrderedDict()
class Cog(metaclass=_CogMeta):
def __init__(self, bot: Jokusoramame):
self._bot = bot
self.logger = self.bot.logger
# A cog-local session that can be used.
self.session = aiohttp.ClientSession()
# A RNG that can be used by each cog.
self.rng = random.SystemRandom()
def __unload(self):
self.session.close()
@property
def bot(self) -> 'Jokusoramame':
"""
:return: The bot instance associated with this cog.
"""
return self._bot
@classmethod
def setup(cls, bot: Jokusoramame):
bot.add_cog(cls(bot))
|
<commit_before>from collections import OrderedDict
import threading
import aiohttp
from joku.bot import Jokusoramame
class _CogMeta(type):
def __prepare__(*args, **kwargs):
# Use an OrderedDict for the class body.
return OrderedDict()
class Cog(metaclass=_CogMeta):
def __init__(self, bot: Jokusoramame):
self._bot = bot
self.logger = self.bot.logger
# A cog-local session that can be used.
self.session = aiohttp.ClientSession()
def __unload(self):
self.session.close()
@property
def bot(self) -> 'Jokusoramame':
"""
:return: The bot instance associated with this cog.
"""
return self._bot
@classmethod
def setup(cls, bot: Jokusoramame):
bot.add_cog(cls(bot))
<commit_msg>Add an RNG to each cog.<commit_after>from collections import OrderedDict
import threading
import aiohttp
import random
from joku.bot import Jokusoramame
class _CogMeta(type):
def __prepare__(*args, **kwargs):
# Use an OrderedDict for the class body.
return OrderedDict()
class Cog(metaclass=_CogMeta):
def __init__(self, bot: Jokusoramame):
self._bot = bot
self.logger = self.bot.logger
# A cog-local session that can be used.
self.session = aiohttp.ClientSession()
# A RNG that can be used by each cog.
self.rng = random.SystemRandom()
def __unload(self):
self.session.close()
@property
def bot(self) -> 'Jokusoramame':
"""
:return: The bot instance associated with this cog.
"""
return self._bot
@classmethod
def setup(cls, bot: Jokusoramame):
bot.add_cog(cls(bot))
|
96fe288cbd4c4399c83b4c3d56da6e427aaad0f9
|
spicedham/digitdestroyer.py
|
spicedham/digitdestroyer.py
|
from spicedham.basewrapper import BaseWrapper
class DigitDestroyer(BaseWrapper):
def train(*args):
pass
def classify(self, response):
if all(map(unicode.isdigit, response)):
return 1
else:
return 0.5
|
from spicedham.basewrapper import BaseWrapper
class DigitDestroyer(object):
def train(*args):
pass
def classify(self, response):
if all(map(unicode.isdigit, response)):
return 1
else:
return None
|
Fix inheritence error and return value
|
Fix inheritence error and return value
It shouldn't inherit from BaseWrapper, but merely object.
It should return None instead of 0.5 so it will have no effect on the average.
|
Python
|
mpl-2.0
|
mozilla/spicedham,mozilla/spicedham
|
from spicedham.basewrapper import BaseWrapper
class DigitDestroyer(BaseWrapper):
def train(*args):
pass
def classify(self, response):
if all(map(unicode.isdigit, response)):
return 1
else:
return 0.5
Fix inheritence error and return value
It shouldn't inherit from BaseWrapper, but merely object.
It should return None instead of 0.5 so it will have no effect on the average.
|
from spicedham.basewrapper import BaseWrapper
class DigitDestroyer(object):
def train(*args):
pass
def classify(self, response):
if all(map(unicode.isdigit, response)):
return 1
else:
return None
|
<commit_before>from spicedham.basewrapper import BaseWrapper
class DigitDestroyer(BaseWrapper):
def train(*args):
pass
def classify(self, response):
if all(map(unicode.isdigit, response)):
return 1
else:
return 0.5
<commit_msg>Fix inheritence error and return value
It shouldn't inherit from BaseWrapper, but merely object.
It should return None instead of 0.5 so it will have no effect on the average.<commit_after>
|
from spicedham.basewrapper import BaseWrapper
class DigitDestroyer(object):
def train(*args):
pass
def classify(self, response):
if all(map(unicode.isdigit, response)):
return 1
else:
return None
|
from spicedham.basewrapper import BaseWrapper
class DigitDestroyer(BaseWrapper):
def train(*args):
pass
def classify(self, response):
if all(map(unicode.isdigit, response)):
return 1
else:
return 0.5
Fix inheritence error and return value
It shouldn't inherit from BaseWrapper, but merely object.
It should return None instead of 0.5 so it will have no effect on the average.from spicedham.basewrapper import BaseWrapper
class DigitDestroyer(object):
def train(*args):
pass
def classify(self, response):
if all(map(unicode.isdigit, response)):
return 1
else:
return None
|
<commit_before>from spicedham.basewrapper import BaseWrapper
class DigitDestroyer(BaseWrapper):
def train(*args):
pass
def classify(self, response):
if all(map(unicode.isdigit, response)):
return 1
else:
return 0.5
<commit_msg>Fix inheritence error and return value
It shouldn't inherit from BaseWrapper, but merely object.
It should return None instead of 0.5 so it will have no effect on the average.<commit_after>from spicedham.basewrapper import BaseWrapper
class DigitDestroyer(object):
def train(*args):
pass
def classify(self, response):
if all(map(unicode.isdigit, response)):
return 1
else:
return None
|
cc0c43c3131161902de3a8a68688766cacd637b9
|
lowercasing_test/src/tests/lowercasing/fetchletters.py
|
lowercasing_test/src/tests/lowercasing/fetchletters.py
|
#! /usr/bin/env python
# Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
# This program reads a Unicode database and emits all letters in lower
# and upper case.
# Refer to http://www.unicode.org/ucd/ to download new files.
import sys
def add_character(unicodespec, characterstore):
characterstora
def main(raw, out):
# Fetch upper and lower case characters in Unicode
characters = filter(lambda x: x[2] == 'Lu' or x[2] == 'Ll', raw)
image = [unichr(int(c[0], 16)) for c in characters]
output = u"\n".join(image)
out.write(output.encode("UTF-8"))
out.write(u"\n".encode("UTF-8"))
if __name__ == '__main__':
try:
raw = [x.split(";") for x in open("./UnicodeData.txt", "r").readlines()]
except:
sys.stderr.write("Problems reading ./UnicodeData.txt.\n")
sys.exit(1)
main(raw, sys.stdout)
|
#!/usr/bin/env python3
# Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
# This program reads a Unicode database and emits all letters in lower
# and upper case.
# Refer to http://www.unicode.org/ucd/ to download new files.
import sys
def add_character(unicodespec, characterstore):
characterstora
def main(raw, out):
# Fetch upper and lower case characters in Unicode
characters = [x for x in raw if x[2] == 'Lu' or x[2] == 'Ll']
image = [chr(int(c[0], 16)) for c in characters]
output = "\n".join(image)
out.write(output.encode("UTF-8"))
out.write(u"\n".encode("UTF-8"))
if __name__ == '__main__':
try:
raw = [x.split(";") for x in open("./UnicodeData.txt", "r").readlines()]
except:
sys.stderr.write("Problems reading ./UnicodeData.txt.\n")
sys.exit(1)
main(raw, sys.stdout)
|
Migrate script ot Python 3
|
Migrate script ot Python 3
|
Python
|
apache-2.0
|
vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa
|
#! /usr/bin/env python
# Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
# This program reads a Unicode database and emits all letters in lower
# and upper case.
# Refer to http://www.unicode.org/ucd/ to download new files.
import sys
def add_character(unicodespec, characterstore):
characterstora
def main(raw, out):
# Fetch upper and lower case characters in Unicode
characters = filter(lambda x: x[2] == 'Lu' or x[2] == 'Ll', raw)
image = [unichr(int(c[0], 16)) for c in characters]
output = u"\n".join(image)
out.write(output.encode("UTF-8"))
out.write(u"\n".encode("UTF-8"))
if __name__ == '__main__':
try:
raw = [x.split(";") for x in open("./UnicodeData.txt", "r").readlines()]
except:
sys.stderr.write("Problems reading ./UnicodeData.txt.\n")
sys.exit(1)
main(raw, sys.stdout)
Migrate script ot Python 3
|
#!/usr/bin/env python3
# Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
# This program reads a Unicode database and emits all letters in lower
# and upper case.
# Refer to http://www.unicode.org/ucd/ to download new files.
import sys
def add_character(unicodespec, characterstore):
characterstora
def main(raw, out):
# Fetch upper and lower case characters in Unicode
characters = [x for x in raw if x[2] == 'Lu' or x[2] == 'Ll']
image = [chr(int(c[0], 16)) for c in characters]
output = "\n".join(image)
out.write(output.encode("UTF-8"))
out.write(u"\n".encode("UTF-8"))
if __name__ == '__main__':
try:
raw = [x.split(";") for x in open("./UnicodeData.txt", "r").readlines()]
except:
sys.stderr.write("Problems reading ./UnicodeData.txt.\n")
sys.exit(1)
main(raw, sys.stdout)
|
<commit_before>#! /usr/bin/env python
# Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
# This program reads a Unicode database and emits all letters in lower
# and upper case.
# Refer to http://www.unicode.org/ucd/ to download new files.
import sys
def add_character(unicodespec, characterstore):
characterstora
def main(raw, out):
# Fetch upper and lower case characters in Unicode
characters = filter(lambda x: x[2] == 'Lu' or x[2] == 'Ll', raw)
image = [unichr(int(c[0], 16)) for c in characters]
output = u"\n".join(image)
out.write(output.encode("UTF-8"))
out.write(u"\n".encode("UTF-8"))
if __name__ == '__main__':
try:
raw = [x.split(";") for x in open("./UnicodeData.txt", "r").readlines()]
except:
sys.stderr.write("Problems reading ./UnicodeData.txt.\n")
sys.exit(1)
main(raw, sys.stdout)
<commit_msg>Migrate script ot Python 3<commit_after>
|
#!/usr/bin/env python3
# Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
# This program reads a Unicode database and emits all letters in lower
# and upper case.
# Refer to http://www.unicode.org/ucd/ to download new files.
import sys
def add_character(unicodespec, characterstore):
characterstora
def main(raw, out):
# Fetch upper and lower case characters in Unicode
characters = [x for x in raw if x[2] == 'Lu' or x[2] == 'Ll']
image = [chr(int(c[0], 16)) for c in characters]
output = "\n".join(image)
out.write(output.encode("UTF-8"))
out.write(u"\n".encode("UTF-8"))
if __name__ == '__main__':
try:
raw = [x.split(";") for x in open("./UnicodeData.txt", "r").readlines()]
except:
sys.stderr.write("Problems reading ./UnicodeData.txt.\n")
sys.exit(1)
main(raw, sys.stdout)
|
#! /usr/bin/env python
# Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
# This program reads a Unicode database and emits all letters in lower
# and upper case.
# Refer to http://www.unicode.org/ucd/ to download new files.
import sys
def add_character(unicodespec, characterstore):
characterstora
def main(raw, out):
# Fetch upper and lower case characters in Unicode
characters = filter(lambda x: x[2] == 'Lu' or x[2] == 'Ll', raw)
image = [unichr(int(c[0], 16)) for c in characters]
output = u"\n".join(image)
out.write(output.encode("UTF-8"))
out.write(u"\n".encode("UTF-8"))
if __name__ == '__main__':
try:
raw = [x.split(";") for x in open("./UnicodeData.txt", "r").readlines()]
except:
sys.stderr.write("Problems reading ./UnicodeData.txt.\n")
sys.exit(1)
main(raw, sys.stdout)
Migrate script ot Python 3#!/usr/bin/env python3
# Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
# This program reads a Unicode database and emits all letters in lower
# and upper case.
# Refer to http://www.unicode.org/ucd/ to download new files.
import sys
def add_character(unicodespec, characterstore):
characterstora
def main(raw, out):
# Fetch upper and lower case characters in Unicode
characters = [x for x in raw if x[2] == 'Lu' or x[2] == 'Ll']
image = [chr(int(c[0], 16)) for c in characters]
output = "\n".join(image)
out.write(output.encode("UTF-8"))
out.write(u"\n".encode("UTF-8"))
if __name__ == '__main__':
try:
raw = [x.split(";") for x in open("./UnicodeData.txt", "r").readlines()]
except:
sys.stderr.write("Problems reading ./UnicodeData.txt.\n")
sys.exit(1)
main(raw, sys.stdout)
|
<commit_before>#! /usr/bin/env python
# Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
# This program reads a Unicode database and emits all letters in lower
# and upper case.
# Refer to http://www.unicode.org/ucd/ to download new files.
import sys
def add_character(unicodespec, characterstore):
characterstora
def main(raw, out):
# Fetch upper and lower case characters in Unicode
characters = filter(lambda x: x[2] == 'Lu' or x[2] == 'Ll', raw)
image = [unichr(int(c[0], 16)) for c in characters]
output = u"\n".join(image)
out.write(output.encode("UTF-8"))
out.write(u"\n".encode("UTF-8"))
if __name__ == '__main__':
try:
raw = [x.split(";") for x in open("./UnicodeData.txt", "r").readlines()]
except:
sys.stderr.write("Problems reading ./UnicodeData.txt.\n")
sys.exit(1)
main(raw, sys.stdout)
<commit_msg>Migrate script ot Python 3<commit_after>#!/usr/bin/env python3
# Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
# This program reads a Unicode database and emits all letters in lower
# and upper case.
# Refer to http://www.unicode.org/ucd/ to download new files.
import sys
def add_character(unicodespec, characterstore):
characterstora
def main(raw, out):
# Fetch upper and lower case characters in Unicode
characters = [x for x in raw if x[2] == 'Lu' or x[2] == 'Ll']
image = [chr(int(c[0], 16)) for c in characters]
output = "\n".join(image)
out.write(output.encode("UTF-8"))
out.write(u"\n".encode("UTF-8"))
if __name__ == '__main__':
try:
raw = [x.split(";") for x in open("./UnicodeData.txt", "r").readlines()]
except:
sys.stderr.write("Problems reading ./UnicodeData.txt.\n")
sys.exit(1)
main(raw, sys.stdout)
|
cc12a902b772e057141da72c5bffeb678bc37df6
|
tvrenamr/tests/base.py
|
tvrenamr/tests/base.py
|
from os import mkdir
from os.path import abspath, dirname, exists, join
from shutil import rmtree
from tvrenamr.config import Config
from tvrenamr.main import TvRenamr
from tvrenamr.tests import mock_requests
# make pyflakes STFU
assert mock_requests
class BaseTest(object):
def setup(self):
# absolute path to the file is pretty useful
self.path = abspath(dirname(__file__))
def join_path(path):
return join(self.path, path)
self.files = join_path('files')
self.organised = join_path('organised')
self.renamed = join_path('renamed')
# if `file` isn't there, make it
if not exists(self.files):
mkdir(self.files)
# build the file list
with open(join(self.path, 'file_list'), 'r') as f:
for fn in f.readlines():
with open(abspath(join(self.files, fn.strip())), 'w') as f:
f.write('')
# instantiate tvr
self.config = Config(join(self.path, 'config.yml'))
self.tv = TvRenamr(self.files, self.config)
def teardown(self):
rmtree(self.files)
|
from os import mkdir
from os.path import abspath, dirname, exists, join
from shutil import rmtree
from tvrenamr.config import Config
from tvrenamr.main import TvRenamr
from tvrenamr.tests import mock_requests
# make pyflakes STFU
assert mock_requests
class BaseTest(object):
def setup(self):
# absolute path to the file is pretty useful
self.path = abspath(dirname(__file__))
def join_path(path):
return join(self.path, path)
self.files = join_path('files')
self.organised = join_path('organised')
self.renamed = join_path('renamed')
# if `file` isn't there, make it
if not exists(self.files):
mkdir(self.files)
# build the file list
with open(join(self.path, 'file_list'), 'r') as f:
for fn in f.readlines():
with open(abspath(join(self.files, fn.strip())), 'w') as f:
f.write('')
# instantiate tvr
self.config = Config(join(self.path, 'config.yml'))
self.config.defaults['renamed'] = self.files
self.tv = TvRenamr(self.files, self.config)
def teardown(self):
rmtree(self.files)
|
Set the rename directory to the test files directory
|
Set the rename directory to the test files directory
|
Python
|
mit
|
ghickman/tvrenamr,wintersandroid/tvrenamr
|
from os import mkdir
from os.path import abspath, dirname, exists, join
from shutil import rmtree
from tvrenamr.config import Config
from tvrenamr.main import TvRenamr
from tvrenamr.tests import mock_requests
# make pyflakes STFU
assert mock_requests
class BaseTest(object):
def setup(self):
# absolute path to the file is pretty useful
self.path = abspath(dirname(__file__))
def join_path(path):
return join(self.path, path)
self.files = join_path('files')
self.organised = join_path('organised')
self.renamed = join_path('renamed')
# if `file` isn't there, make it
if not exists(self.files):
mkdir(self.files)
# build the file list
with open(join(self.path, 'file_list'), 'r') as f:
for fn in f.readlines():
with open(abspath(join(self.files, fn.strip())), 'w') as f:
f.write('')
# instantiate tvr
self.config = Config(join(self.path, 'config.yml'))
self.tv = TvRenamr(self.files, self.config)
def teardown(self):
rmtree(self.files)
Set the rename directory to the test files directory
|
from os import mkdir
from os.path import abspath, dirname, exists, join
from shutil import rmtree
from tvrenamr.config import Config
from tvrenamr.main import TvRenamr
from tvrenamr.tests import mock_requests
# make pyflakes STFU
assert mock_requests
class BaseTest(object):
def setup(self):
# absolute path to the file is pretty useful
self.path = abspath(dirname(__file__))
def join_path(path):
return join(self.path, path)
self.files = join_path('files')
self.organised = join_path('organised')
self.renamed = join_path('renamed')
# if `file` isn't there, make it
if not exists(self.files):
mkdir(self.files)
# build the file list
with open(join(self.path, 'file_list'), 'r') as f:
for fn in f.readlines():
with open(abspath(join(self.files, fn.strip())), 'w') as f:
f.write('')
# instantiate tvr
self.config = Config(join(self.path, 'config.yml'))
self.config.defaults['renamed'] = self.files
self.tv = TvRenamr(self.files, self.config)
def teardown(self):
rmtree(self.files)
|
<commit_before>from os import mkdir
from os.path import abspath, dirname, exists, join
from shutil import rmtree
from tvrenamr.config import Config
from tvrenamr.main import TvRenamr
from tvrenamr.tests import mock_requests
# make pyflakes STFU
assert mock_requests
class BaseTest(object):
def setup(self):
# absolute path to the file is pretty useful
self.path = abspath(dirname(__file__))
def join_path(path):
return join(self.path, path)
self.files = join_path('files')
self.organised = join_path('organised')
self.renamed = join_path('renamed')
# if `file` isn't there, make it
if not exists(self.files):
mkdir(self.files)
# build the file list
with open(join(self.path, 'file_list'), 'r') as f:
for fn in f.readlines():
with open(abspath(join(self.files, fn.strip())), 'w') as f:
f.write('')
# instantiate tvr
self.config = Config(join(self.path, 'config.yml'))
self.tv = TvRenamr(self.files, self.config)
def teardown(self):
rmtree(self.files)
<commit_msg>Set the rename directory to the test files directory<commit_after>
|
from os import mkdir
from os.path import abspath, dirname, exists, join
from shutil import rmtree
from tvrenamr.config import Config
from tvrenamr.main import TvRenamr
from tvrenamr.tests import mock_requests
# make pyflakes STFU
assert mock_requests
class BaseTest(object):
def setup(self):
# absolute path to the file is pretty useful
self.path = abspath(dirname(__file__))
def join_path(path):
return join(self.path, path)
self.files = join_path('files')
self.organised = join_path('organised')
self.renamed = join_path('renamed')
# if `file` isn't there, make it
if not exists(self.files):
mkdir(self.files)
# build the file list
with open(join(self.path, 'file_list'), 'r') as f:
for fn in f.readlines():
with open(abspath(join(self.files, fn.strip())), 'w') as f:
f.write('')
# instantiate tvr
self.config = Config(join(self.path, 'config.yml'))
self.config.defaults['renamed'] = self.files
self.tv = TvRenamr(self.files, self.config)
def teardown(self):
rmtree(self.files)
|
from os import mkdir
from os.path import abspath, dirname, exists, join
from shutil import rmtree
from tvrenamr.config import Config
from tvrenamr.main import TvRenamr
from tvrenamr.tests import mock_requests
# make pyflakes STFU
assert mock_requests
class BaseTest(object):
def setup(self):
# absolute path to the file is pretty useful
self.path = abspath(dirname(__file__))
def join_path(path):
return join(self.path, path)
self.files = join_path('files')
self.organised = join_path('organised')
self.renamed = join_path('renamed')
# if `file` isn't there, make it
if not exists(self.files):
mkdir(self.files)
# build the file list
with open(join(self.path, 'file_list'), 'r') as f:
for fn in f.readlines():
with open(abspath(join(self.files, fn.strip())), 'w') as f:
f.write('')
# instantiate tvr
self.config = Config(join(self.path, 'config.yml'))
self.tv = TvRenamr(self.files, self.config)
def teardown(self):
rmtree(self.files)
Set the rename directory to the test files directoryfrom os import mkdir
from os.path import abspath, dirname, exists, join
from shutil import rmtree
from tvrenamr.config import Config
from tvrenamr.main import TvRenamr
from tvrenamr.tests import mock_requests
# make pyflakes STFU
assert mock_requests
class BaseTest(object):
def setup(self):
# absolute path to the file is pretty useful
self.path = abspath(dirname(__file__))
def join_path(path):
return join(self.path, path)
self.files = join_path('files')
self.organised = join_path('organised')
self.renamed = join_path('renamed')
# if `file` isn't there, make it
if not exists(self.files):
mkdir(self.files)
# build the file list
with open(join(self.path, 'file_list'), 'r') as f:
for fn in f.readlines():
with open(abspath(join(self.files, fn.strip())), 'w') as f:
f.write('')
# instantiate tvr
self.config = Config(join(self.path, 'config.yml'))
self.config.defaults['renamed'] = self.files
self.tv = TvRenamr(self.files, self.config)
def teardown(self):
rmtree(self.files)
|
<commit_before>from os import mkdir
from os.path import abspath, dirname, exists, join
from shutil import rmtree
from tvrenamr.config import Config
from tvrenamr.main import TvRenamr
from tvrenamr.tests import mock_requests
# make pyflakes STFU
assert mock_requests
class BaseTest(object):
def setup(self):
# absolute path to the file is pretty useful
self.path = abspath(dirname(__file__))
def join_path(path):
return join(self.path, path)
self.files = join_path('files')
self.organised = join_path('organised')
self.renamed = join_path('renamed')
# if `file` isn't there, make it
if not exists(self.files):
mkdir(self.files)
# build the file list
with open(join(self.path, 'file_list'), 'r') as f:
for fn in f.readlines():
with open(abspath(join(self.files, fn.strip())), 'w') as f:
f.write('')
# instantiate tvr
self.config = Config(join(self.path, 'config.yml'))
self.tv = TvRenamr(self.files, self.config)
def teardown(self):
rmtree(self.files)
<commit_msg>Set the rename directory to the test files directory<commit_after>from os import mkdir
from os.path import abspath, dirname, exists, join
from shutil import rmtree
from tvrenamr.config import Config
from tvrenamr.main import TvRenamr
from tvrenamr.tests import mock_requests
# make pyflakes STFU
assert mock_requests
class BaseTest(object):
def setup(self):
# absolute path to the file is pretty useful
self.path = abspath(dirname(__file__))
def join_path(path):
return join(self.path, path)
self.files = join_path('files')
self.organised = join_path('organised')
self.renamed = join_path('renamed')
# if `file` isn't there, make it
if not exists(self.files):
mkdir(self.files)
# build the file list
with open(join(self.path, 'file_list'), 'r') as f:
for fn in f.readlines():
with open(abspath(join(self.files, fn.strip())), 'w') as f:
f.write('')
# instantiate tvr
self.config = Config(join(self.path, 'config.yml'))
self.config.defaults['renamed'] = self.files
self.tv = TvRenamr(self.files, self.config)
def teardown(self):
rmtree(self.files)
|
77d72fe0502c64294dbacdbf8defbb44ee21c088
|
schools/admin.py
|
schools/admin.py
|
from django.contrib import admin
from .models import *
class SchoolBuildingPhotoInline(admin.TabularInline):
model = SchoolBuildingPhoto
@admin.register(SchoolBuilding)
class SchoolBuildingAdmin(admin.ModelAdmin):
fields = ('school', 'building', 'begin_year', 'end_year')
readonly_fields = fields
list_display = ('__str__', 'has_photo')
list_filter = ('photos',)
inlines = [SchoolBuildingPhotoInline]
|
from django.contrib import admin
from .models import *
class SchoolBuildingPhotoInline(admin.TabularInline):
model = SchoolBuildingPhoto
@admin.register(SchoolBuilding)
class SchoolBuildingAdmin(admin.ModelAdmin):
fields = ('school', 'building', 'begin_year', 'end_year')
readonly_fields = fields
search_fields = ['school__names__types__value']
list_display = ('__str__', 'has_photo')
list_filter = ('photos',)
inlines = [SchoolBuildingPhotoInline]
|
Add search based on school name
|
Add search based on school name
|
Python
|
agpl-3.0
|
City-of-Helsinki/kore,City-of-Helsinki/kore,Rikuoja/kore,Rikuoja/kore
|
from django.contrib import admin
from .models import *
class SchoolBuildingPhotoInline(admin.TabularInline):
model = SchoolBuildingPhoto
@admin.register(SchoolBuilding)
class SchoolBuildingAdmin(admin.ModelAdmin):
fields = ('school', 'building', 'begin_year', 'end_year')
readonly_fields = fields
list_display = ('__str__', 'has_photo')
list_filter = ('photos',)
inlines = [SchoolBuildingPhotoInline]
Add search based on school name
|
from django.contrib import admin
from .models import *
class SchoolBuildingPhotoInline(admin.TabularInline):
model = SchoolBuildingPhoto
@admin.register(SchoolBuilding)
class SchoolBuildingAdmin(admin.ModelAdmin):
fields = ('school', 'building', 'begin_year', 'end_year')
readonly_fields = fields
search_fields = ['school__names__types__value']
list_display = ('__str__', 'has_photo')
list_filter = ('photos',)
inlines = [SchoolBuildingPhotoInline]
|
<commit_before>from django.contrib import admin
from .models import *
class SchoolBuildingPhotoInline(admin.TabularInline):
model = SchoolBuildingPhoto
@admin.register(SchoolBuilding)
class SchoolBuildingAdmin(admin.ModelAdmin):
fields = ('school', 'building', 'begin_year', 'end_year')
readonly_fields = fields
list_display = ('__str__', 'has_photo')
list_filter = ('photos',)
inlines = [SchoolBuildingPhotoInline]
<commit_msg>Add search based on school name<commit_after>
|
from django.contrib import admin
from .models import *
class SchoolBuildingPhotoInline(admin.TabularInline):
model = SchoolBuildingPhoto
@admin.register(SchoolBuilding)
class SchoolBuildingAdmin(admin.ModelAdmin):
fields = ('school', 'building', 'begin_year', 'end_year')
readonly_fields = fields
search_fields = ['school__names__types__value']
list_display = ('__str__', 'has_photo')
list_filter = ('photos',)
inlines = [SchoolBuildingPhotoInline]
|
from django.contrib import admin
from .models import *
class SchoolBuildingPhotoInline(admin.TabularInline):
model = SchoolBuildingPhoto
@admin.register(SchoolBuilding)
class SchoolBuildingAdmin(admin.ModelAdmin):
fields = ('school', 'building', 'begin_year', 'end_year')
readonly_fields = fields
list_display = ('__str__', 'has_photo')
list_filter = ('photos',)
inlines = [SchoolBuildingPhotoInline]
Add search based on school namefrom django.contrib import admin
from .models import *
class SchoolBuildingPhotoInline(admin.TabularInline):
model = SchoolBuildingPhoto
@admin.register(SchoolBuilding)
class SchoolBuildingAdmin(admin.ModelAdmin):
fields = ('school', 'building', 'begin_year', 'end_year')
readonly_fields = fields
search_fields = ['school__names__types__value']
list_display = ('__str__', 'has_photo')
list_filter = ('photos',)
inlines = [SchoolBuildingPhotoInline]
|
<commit_before>from django.contrib import admin
from .models import *
class SchoolBuildingPhotoInline(admin.TabularInline):
model = SchoolBuildingPhoto
@admin.register(SchoolBuilding)
class SchoolBuildingAdmin(admin.ModelAdmin):
fields = ('school', 'building', 'begin_year', 'end_year')
readonly_fields = fields
list_display = ('__str__', 'has_photo')
list_filter = ('photos',)
inlines = [SchoolBuildingPhotoInline]
<commit_msg>Add search based on school name<commit_after>from django.contrib import admin
from .models import *
class SchoolBuildingPhotoInline(admin.TabularInline):
model = SchoolBuildingPhoto
@admin.register(SchoolBuilding)
class SchoolBuildingAdmin(admin.ModelAdmin):
fields = ('school', 'building', 'begin_year', 'end_year')
readonly_fields = fields
search_fields = ['school__names__types__value']
list_display = ('__str__', 'has_photo')
list_filter = ('photos',)
inlines = [SchoolBuildingPhotoInline]
|
8b628ce91040736e1cb33a544871925608c70479
|
penchy/jobs/dependency.py
|
penchy/jobs/dependency.py
|
"""
This module provides the parts to model and resolve dependencies in the flow of
execution.
"""
class Edge(object):
"""
This class represents edges in the dependency graph.
"""
def __init__(self, source, sink=None, map=None):
"""
:param source: source of data
:param sink: sink of data
:param map: sequence of name pairs that map source exits to sink
entrances
"""
self.source = source
self.sink = sink
self.map = map
|
"""
This module provides the parts to model and resolve dependencies in the flow of
execution.
"""
from penchy.util import topological_sort
class Edge(object):
"""
This class represents edges in the dependency graph.
"""
def __init__(self, source, sink=None, map=None):
"""
:param source: source of data
:param sink: sink of data
:param map: sequence of name pairs that map source exits to sink
entrances
"""
self.source = source
self.sink = sink
self.map = map
def edgesort(edges):
"""
Return the topological sorted elements of ``edges``.
:param edges: Sequence of :class:`Edge`
:returns: topological sorted :class:`PipelineElement`
"""
starts = set(edge.source for edge in edges)
deps = []
edges = list(edges)
while edges:
target = edges[0].sink
starts.discard(target)
sources = [edge.source for edge in edges if edge.sink is target]
deps.append((sources if sources else None, target))
edges = [edge for edge in edges if edge.sink is not target]
deps.extend((None, start) for start in starts)
return topological_sort(deps)
|
Add edgesort as a frontend to topological_sort.
|
Add edgesort as a frontend to topological_sort.
Signed-off-by: Michael Markert <5eb998b7ac86da375651a4cd767b88c9dad25896@googlemail.com>
|
Python
|
mit
|
fhirschmann/penchy,fhirschmann/penchy
|
"""
This module provides the parts to model and resolve dependencies in the flow of
execution.
"""
class Edge(object):
"""
This class represents edges in the dependency graph.
"""
def __init__(self, source, sink=None, map=None):
"""
:param source: source of data
:param sink: sink of data
:param map: sequence of name pairs that map source exits to sink
entrances
"""
self.source = source
self.sink = sink
self.map = map
Add edgesort as a frontend to topological_sort.
Signed-off-by: Michael Markert <5eb998b7ac86da375651a4cd767b88c9dad25896@googlemail.com>
|
"""
This module provides the parts to model and resolve dependencies in the flow of
execution.
"""
from penchy.util import topological_sort
class Edge(object):
"""
This class represents edges in the dependency graph.
"""
def __init__(self, source, sink=None, map=None):
"""
:param source: source of data
:param sink: sink of data
:param map: sequence of name pairs that map source exits to sink
entrances
"""
self.source = source
self.sink = sink
self.map = map
def edgesort(edges):
"""
Return the topological sorted elements of ``edges``.
:param edges: Sequence of :class:`Edge`
:returns: topological sorted :class:`PipelineElement`
"""
starts = set(edge.source for edge in edges)
deps = []
edges = list(edges)
while edges:
target = edges[0].sink
starts.discard(target)
sources = [edge.source for edge in edges if edge.sink is target]
deps.append((sources if sources else None, target))
edges = [edge for edge in edges if edge.sink is not target]
deps.extend((None, start) for start in starts)
return topological_sort(deps)
|
<commit_before>"""
This module provides the parts to model and resolve dependencies in the flow of
execution.
"""
class Edge(object):
"""
This class represents edges in the dependency graph.
"""
def __init__(self, source, sink=None, map=None):
"""
:param source: source of data
:param sink: sink of data
:param map: sequence of name pairs that map source exits to sink
entrances
"""
self.source = source
self.sink = sink
self.map = map
<commit_msg>Add edgesort as a frontend to topological_sort.
Signed-off-by: Michael Markert <5eb998b7ac86da375651a4cd767b88c9dad25896@googlemail.com><commit_after>
|
"""
This module provides the parts to model and resolve dependencies in the flow of
execution.
"""
from penchy.util import topological_sort
class Edge(object):
"""
This class represents edges in the dependency graph.
"""
def __init__(self, source, sink=None, map=None):
"""
:param source: source of data
:param sink: sink of data
:param map: sequence of name pairs that map source exits to sink
entrances
"""
self.source = source
self.sink = sink
self.map = map
def edgesort(edges):
"""
Return the topological sorted elements of ``edges``.
:param edges: Sequence of :class:`Edge`
:returns: topological sorted :class:`PipelineElement`
"""
starts = set(edge.source for edge in edges)
deps = []
edges = list(edges)
while edges:
target = edges[0].sink
starts.discard(target)
sources = [edge.source for edge in edges if edge.sink is target]
deps.append((sources if sources else None, target))
edges = [edge for edge in edges if edge.sink is not target]
deps.extend((None, start) for start in starts)
return topological_sort(deps)
|
"""
This module provides the parts to model and resolve dependencies in the flow of
execution.
"""
class Edge(object):
"""
This class represents edges in the dependency graph.
"""
def __init__(self, source, sink=None, map=None):
"""
:param source: source of data
:param sink: sink of data
:param map: sequence of name pairs that map source exits to sink
entrances
"""
self.source = source
self.sink = sink
self.map = map
Add edgesort as a frontend to topological_sort.
Signed-off-by: Michael Markert <5eb998b7ac86da375651a4cd767b88c9dad25896@googlemail.com>"""
This module provides the parts to model and resolve dependencies in the flow of
execution.
"""
from penchy.util import topological_sort
class Edge(object):
"""
This class represents edges in the dependency graph.
"""
def __init__(self, source, sink=None, map=None):
"""
:param source: source of data
:param sink: sink of data
:param map: sequence of name pairs that map source exits to sink
entrances
"""
self.source = source
self.sink = sink
self.map = map
def edgesort(edges):
"""
Return the topological sorted elements of ``edges``.
:param edges: Sequence of :class:`Edge`
:returns: topological sorted :class:`PipelineElement`
"""
starts = set(edge.source for edge in edges)
deps = []
edges = list(edges)
while edges:
target = edges[0].sink
starts.discard(target)
sources = [edge.source for edge in edges if edge.sink is target]
deps.append((sources if sources else None, target))
edges = [edge for edge in edges if edge.sink is not target]
deps.extend((None, start) for start in starts)
return topological_sort(deps)
|
<commit_before>"""
This module provides the parts to model and resolve dependencies in the flow of
execution.
"""
class Edge(object):
"""
This class represents edges in the dependency graph.
"""
def __init__(self, source, sink=None, map=None):
"""
:param source: source of data
:param sink: sink of data
:param map: sequence of name pairs that map source exits to sink
entrances
"""
self.source = source
self.sink = sink
self.map = map
<commit_msg>Add edgesort as a frontend to topological_sort.
Signed-off-by: Michael Markert <5eb998b7ac86da375651a4cd767b88c9dad25896@googlemail.com><commit_after>"""
This module provides the parts to model and resolve dependencies in the flow of
execution.
"""
from penchy.util import topological_sort
class Edge(object):
"""
This class represents edges in the dependency graph.
"""
def __init__(self, source, sink=None, map=None):
"""
:param source: source of data
:param sink: sink of data
:param map: sequence of name pairs that map source exits to sink
entrances
"""
self.source = source
self.sink = sink
self.map = map
def edgesort(edges):
"""
Return the topological sorted elements of ``edges``.
:param edges: Sequence of :class:`Edge`
:returns: topological sorted :class:`PipelineElement`
"""
starts = set(edge.source for edge in edges)
deps = []
edges = list(edges)
while edges:
target = edges[0].sink
starts.discard(target)
sources = [edge.source for edge in edges if edge.sink is target]
deps.append((sources if sources else None, target))
edges = [edge for edge in edges if edge.sink is not target]
deps.extend((None, start) for start in starts)
return topological_sort(deps)
|
c07b6d2abae4ccd1eacb846a947945ecd6e963af
|
photutils/utils/_round.py
|
photutils/utils/_round.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This module provides tools to round numpy arrays.
"""
import numpy as np
def _py2intround(a):
"""
Round the input to the nearest integer.
If two integers are equally close, rounding is done away from 0.
"""
data = np.asanyarray(a)
value = np.where(data >= 0, np.floor(data + 0.5),
np.ceil(data - 0.5)).astype(int)
if not hasattr(a, '__iter__'):
value = np.asscalar(value)
return value
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This module provides tools to round numpy arrays.
"""
import numpy as np
def _py2intround(a):
"""
Round the input to the nearest integer.
If two integers are equally close, rounding is done away from 0.
"""
data = np.asanyarray(a)
value = np.where(data >= 0, np.floor(data + 0.5),
np.ceil(data - 0.5)).astype(int)
if not hasattr(a, '__iter__'):
value = value.item()
return value
|
Replace deprecated np.asscalar(a) with a.item()
|
Replace deprecated np.asscalar(a) with a.item()
|
Python
|
bsd-3-clause
|
larrybradley/photutils,astropy/photutils
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This module provides tools to round numpy arrays.
"""
import numpy as np
def _py2intround(a):
"""
Round the input to the nearest integer.
If two integers are equally close, rounding is done away from 0.
"""
data = np.asanyarray(a)
value = np.where(data >= 0, np.floor(data + 0.5),
np.ceil(data - 0.5)).astype(int)
if not hasattr(a, '__iter__'):
value = np.asscalar(value)
return value
Replace deprecated np.asscalar(a) with a.item()
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This module provides tools to round numpy arrays.
"""
import numpy as np
def _py2intround(a):
"""
Round the input to the nearest integer.
If two integers are equally close, rounding is done away from 0.
"""
data = np.asanyarray(a)
value = np.where(data >= 0, np.floor(data + 0.5),
np.ceil(data - 0.5)).astype(int)
if not hasattr(a, '__iter__'):
value = value.item()
return value
|
<commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This module provides tools to round numpy arrays.
"""
import numpy as np
def _py2intround(a):
"""
Round the input to the nearest integer.
If two integers are equally close, rounding is done away from 0.
"""
data = np.asanyarray(a)
value = np.where(data >= 0, np.floor(data + 0.5),
np.ceil(data - 0.5)).astype(int)
if not hasattr(a, '__iter__'):
value = np.asscalar(value)
return value
<commit_msg>Replace deprecated np.asscalar(a) with a.item()<commit_after>
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This module provides tools to round numpy arrays.
"""
import numpy as np
def _py2intround(a):
"""
Round the input to the nearest integer.
If two integers are equally close, rounding is done away from 0.
"""
data = np.asanyarray(a)
value = np.where(data >= 0, np.floor(data + 0.5),
np.ceil(data - 0.5)).astype(int)
if not hasattr(a, '__iter__'):
value = value.item()
return value
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This module provides tools to round numpy arrays.
"""
import numpy as np
def _py2intround(a):
"""
Round the input to the nearest integer.
If two integers are equally close, rounding is done away from 0.
"""
data = np.asanyarray(a)
value = np.where(data >= 0, np.floor(data + 0.5),
np.ceil(data - 0.5)).astype(int)
if not hasattr(a, '__iter__'):
value = np.asscalar(value)
return value
Replace deprecated np.asscalar(a) with a.item()# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This module provides tools to round numpy arrays.
"""
import numpy as np
def _py2intround(a):
"""
Round the input to the nearest integer.
If two integers are equally close, rounding is done away from 0.
"""
data = np.asanyarray(a)
value = np.where(data >= 0, np.floor(data + 0.5),
np.ceil(data - 0.5)).astype(int)
if not hasattr(a, '__iter__'):
value = value.item()
return value
|
<commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This module provides tools to round numpy arrays.
"""
import numpy as np
def _py2intround(a):
"""
Round the input to the nearest integer.
If two integers are equally close, rounding is done away from 0.
"""
data = np.asanyarray(a)
value = np.where(data >= 0, np.floor(data + 0.5),
np.ceil(data - 0.5)).astype(int)
if not hasattr(a, '__iter__'):
value = np.asscalar(value)
return value
<commit_msg>Replace deprecated np.asscalar(a) with a.item()<commit_after># Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This module provides tools to round numpy arrays.
"""
import numpy as np
def _py2intround(a):
"""
Round the input to the nearest integer.
If two integers are equally close, rounding is done away from 0.
"""
data = np.asanyarray(a)
value = np.where(data >= 0, np.floor(data + 0.5),
np.ceil(data - 0.5)).astype(int)
if not hasattr(a, '__iter__'):
value = value.item()
return value
|
445bd6d2b5f68da6d51d9acb84b1e15e6b4af2d8
|
k8s/models/common.py
|
k8s/models/common.py
|
#!/usr/bin/env python
# -*- coding: utf-8
from __future__ import absolute_import
import six
from ..base import Model
from ..fields import Field, ReadOnlyField, RequiredField
class ObjectMeta(Model):
name = RequiredField(six.text_type)
namespace = Field(six.text_type, "default")
resourceVersion = ReadOnlyField(six.text_type)
labels = Field(dict)
annotations = Field(dict)
|
#!/usr/bin/env python
# -*- coding: utf-8
from __future__ import absolute_import
import six
from ..base import Model
from ..fields import Field, ReadOnlyField, RequiredField
class ObjectMeta(Model):
name = Field(six.text_type)
namespace = Field(six.text_type, "default")
resourceVersion = ReadOnlyField(six.text_type)
labels = Field(dict)
annotations = Field(dict)
generateName = Field(six.text_type)
|
Add support for auto-generated names in metadata
|
Add support for auto-generated names in metadata
|
Python
|
apache-2.0
|
fiaas/k8s
|
#!/usr/bin/env python
# -*- coding: utf-8
from __future__ import absolute_import
import six
from ..base import Model
from ..fields import Field, ReadOnlyField, RequiredField
class ObjectMeta(Model):
name = RequiredField(six.text_type)
namespace = Field(six.text_type, "default")
resourceVersion = ReadOnlyField(six.text_type)
labels = Field(dict)
annotations = Field(dict)
Add support for auto-generated names in metadata
|
#!/usr/bin/env python
# -*- coding: utf-8
from __future__ import absolute_import
import six
from ..base import Model
from ..fields import Field, ReadOnlyField, RequiredField
class ObjectMeta(Model):
name = Field(six.text_type)
namespace = Field(six.text_type, "default")
resourceVersion = ReadOnlyField(six.text_type)
labels = Field(dict)
annotations = Field(dict)
generateName = Field(six.text_type)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8
from __future__ import absolute_import
import six
from ..base import Model
from ..fields import Field, ReadOnlyField, RequiredField
class ObjectMeta(Model):
name = RequiredField(six.text_type)
namespace = Field(six.text_type, "default")
resourceVersion = ReadOnlyField(six.text_type)
labels = Field(dict)
annotations = Field(dict)
<commit_msg>Add support for auto-generated names in metadata<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8
from __future__ import absolute_import
import six
from ..base import Model
from ..fields import Field, ReadOnlyField, RequiredField
class ObjectMeta(Model):
name = Field(six.text_type)
namespace = Field(six.text_type, "default")
resourceVersion = ReadOnlyField(six.text_type)
labels = Field(dict)
annotations = Field(dict)
generateName = Field(six.text_type)
|
#!/usr/bin/env python
# -*- coding: utf-8
from __future__ import absolute_import
import six
from ..base import Model
from ..fields import Field, ReadOnlyField, RequiredField
class ObjectMeta(Model):
name = RequiredField(six.text_type)
namespace = Field(six.text_type, "default")
resourceVersion = ReadOnlyField(six.text_type)
labels = Field(dict)
annotations = Field(dict)
Add support for auto-generated names in metadata#!/usr/bin/env python
# -*- coding: utf-8
from __future__ import absolute_import
import six
from ..base import Model
from ..fields import Field, ReadOnlyField, RequiredField
class ObjectMeta(Model):
name = Field(six.text_type)
namespace = Field(six.text_type, "default")
resourceVersion = ReadOnlyField(six.text_type)
labels = Field(dict)
annotations = Field(dict)
generateName = Field(six.text_type)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8
from __future__ import absolute_import
import six
from ..base import Model
from ..fields import Field, ReadOnlyField, RequiredField
class ObjectMeta(Model):
name = RequiredField(six.text_type)
namespace = Field(six.text_type, "default")
resourceVersion = ReadOnlyField(six.text_type)
labels = Field(dict)
annotations = Field(dict)
<commit_msg>Add support for auto-generated names in metadata<commit_after>#!/usr/bin/env python
# -*- coding: utf-8
from __future__ import absolute_import
import six
from ..base import Model
from ..fields import Field, ReadOnlyField, RequiredField
class ObjectMeta(Model):
name = Field(six.text_type)
namespace = Field(six.text_type, "default")
resourceVersion = ReadOnlyField(six.text_type)
labels = Field(dict)
annotations = Field(dict)
generateName = Field(six.text_type)
|
9bd2d607e52b50ae79ff51199118395e57cedfdc
|
custom/icds/tests/test_views.py
|
custom/icds/tests/test_views.py
|
from __future__ import absolute_import
from __future__ import unicode_literals
from django.test import TestCase
from django.test.utils import override_settings
from django.urls import reverse
class TestViews(TestCase):
@override_settings(CUSTOM_LANDING_TEMPLATE='icds/login.html')
def test_custom_login(self):
response = self.client.get(reverse("login"), follow=False)
self.assertEqual(response.status_code, 200)
|
from __future__ import absolute_import
from __future__ import unicode_literals
from django.test import TestCase
from django.test.utils import override_settings
from django.urls import reverse
class TestViews(TestCase):
@override_settings(CUSTOM_LANDING_TEMPLATE='icds/login.html')
def test_custom_login_old_format(self):
response = self.client.get(reverse("login"), follow=False)
self.assertEqual(response.status_code, 200)
@override_settings(CUSTOM_LANDING_TEMPLATE={"default": 'icds/login.html'})
def test_custom_login(self):
response = self.client.get(reverse("login"), follow=False)
self.assertEqual(response.status_code, 200)
|
Add test for new custom landing format
|
Add test for new custom landing format
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
from __future__ import absolute_import
from __future__ import unicode_literals
from django.test import TestCase
from django.test.utils import override_settings
from django.urls import reverse
class TestViews(TestCase):
@override_settings(CUSTOM_LANDING_TEMPLATE='icds/login.html')
def test_custom_login(self):
response = self.client.get(reverse("login"), follow=False)
self.assertEqual(response.status_code, 200)
Add test for new custom landing format
|
from __future__ import absolute_import
from __future__ import unicode_literals
from django.test import TestCase
from django.test.utils import override_settings
from django.urls import reverse
class TestViews(TestCase):
@override_settings(CUSTOM_LANDING_TEMPLATE='icds/login.html')
def test_custom_login_old_format(self):
response = self.client.get(reverse("login"), follow=False)
self.assertEqual(response.status_code, 200)
@override_settings(CUSTOM_LANDING_TEMPLATE={"default": 'icds/login.html'})
def test_custom_login(self):
response = self.client.get(reverse("login"), follow=False)
self.assertEqual(response.status_code, 200)
|
<commit_before>from __future__ import absolute_import
from __future__ import unicode_literals
from django.test import TestCase
from django.test.utils import override_settings
from django.urls import reverse
class TestViews(TestCase):
@override_settings(CUSTOM_LANDING_TEMPLATE='icds/login.html')
def test_custom_login(self):
response = self.client.get(reverse("login"), follow=False)
self.assertEqual(response.status_code, 200)
<commit_msg>Add test for new custom landing format<commit_after>
|
from __future__ import absolute_import
from __future__ import unicode_literals
from django.test import TestCase
from django.test.utils import override_settings
from django.urls import reverse
class TestViews(TestCase):
@override_settings(CUSTOM_LANDING_TEMPLATE='icds/login.html')
def test_custom_login_old_format(self):
response = self.client.get(reverse("login"), follow=False)
self.assertEqual(response.status_code, 200)
@override_settings(CUSTOM_LANDING_TEMPLATE={"default": 'icds/login.html'})
def test_custom_login(self):
response = self.client.get(reverse("login"), follow=False)
self.assertEqual(response.status_code, 200)
|
from __future__ import absolute_import
from __future__ import unicode_literals
from django.test import TestCase
from django.test.utils import override_settings
from django.urls import reverse
class TestViews(TestCase):
@override_settings(CUSTOM_LANDING_TEMPLATE='icds/login.html')
def test_custom_login(self):
response = self.client.get(reverse("login"), follow=False)
self.assertEqual(response.status_code, 200)
Add test for new custom landing formatfrom __future__ import absolute_import
from __future__ import unicode_literals
from django.test import TestCase
from django.test.utils import override_settings
from django.urls import reverse
class TestViews(TestCase):
@override_settings(CUSTOM_LANDING_TEMPLATE='icds/login.html')
def test_custom_login_old_format(self):
response = self.client.get(reverse("login"), follow=False)
self.assertEqual(response.status_code, 200)
@override_settings(CUSTOM_LANDING_TEMPLATE={"default": 'icds/login.html'})
def test_custom_login(self):
response = self.client.get(reverse("login"), follow=False)
self.assertEqual(response.status_code, 200)
|
<commit_before>from __future__ import absolute_import
from __future__ import unicode_literals
from django.test import TestCase
from django.test.utils import override_settings
from django.urls import reverse
class TestViews(TestCase):
@override_settings(CUSTOM_LANDING_TEMPLATE='icds/login.html')
def test_custom_login(self):
response = self.client.get(reverse("login"), follow=False)
self.assertEqual(response.status_code, 200)
<commit_msg>Add test for new custom landing format<commit_after>from __future__ import absolute_import
from __future__ import unicode_literals
from django.test import TestCase
from django.test.utils import override_settings
from django.urls import reverse
class TestViews(TestCase):
@override_settings(CUSTOM_LANDING_TEMPLATE='icds/login.html')
def test_custom_login_old_format(self):
response = self.client.get(reverse("login"), follow=False)
self.assertEqual(response.status_code, 200)
@override_settings(CUSTOM_LANDING_TEMPLATE={"default": 'icds/login.html'})
def test_custom_login(self):
response = self.client.get(reverse("login"), follow=False)
self.assertEqual(response.status_code, 200)
|
03a54ea1920a3716e9c8d326f5c4c408f45b7d08
|
apps/api/urls.py
|
apps/api/urls.py
|
from django.conf.urls import url, include
from rest_framework.routers import DefaultRouter
from rest_framework_jwt import views as jwt_views
from . import views
router = DefaultRouter()
router.register(r'quotes', views.QuoteViewSet)
router.register(r'authors', views.AuthorViewSet)
router.register(r'categories', views.CategoryViewSet)
router.register(r'tags', views.TagViewSet)
urlpatterns = [
url(r'^docs/$', views.schema_view),
url(r'^', include(router.urls)),
url(r'^token/new/$', jwt_views.obtain_jwt_token),
url(r'^token/refresh/$', jwt_views.refresh_jwt_token),
url(r'^filters/$', views.FiltersOptionsView.as_view()),
url(r'^templates/(?P<page>[-\w]+.html)/$', views.AngularTemplateView.as_view()),
]
|
from django.conf.urls import url, include
from rest_framework.routers import DefaultRouter
from rest_framework_jwt import views as jwt_views
from . import views
router = DefaultRouter()
router.register(r'quotes', views.QuoteViewSet)
router.register(r'authors', views.AuthorViewSet)
router.register(r'categories', views.CategoryViewSet)
router.register(r'tags', views.TagViewSet)
urlpatterns = [
url(r'^docs/$', views.schema_view),
url(r'^', include(router.urls)),
url(r'^token/new/$', jwt_views.ObtainJSONWebToken.as_view()),
url(r'^token/refresh/$', jwt_views.RefreshJSONWebToken.as_view()),
url(r'^token/verify/$', jwt_views.VerifyJSONWebToken.as_view()),
url(r'^filters/$', views.FiltersOptionsView.as_view()),
url(r'^templates/(?P<page>[-\w]+.html)/$', views.AngularTemplateView.as_view()),
]
|
Add option to verify jwt token
|
Add option to verify jwt token
|
Python
|
bsd-3-clause
|
lucifurtun/myquotes,lucifurtun/myquotes,lucifurtun/myquotes,lucifurtun/myquotes
|
from django.conf.urls import url, include
from rest_framework.routers import DefaultRouter
from rest_framework_jwt import views as jwt_views
from . import views
router = DefaultRouter()
router.register(r'quotes', views.QuoteViewSet)
router.register(r'authors', views.AuthorViewSet)
router.register(r'categories', views.CategoryViewSet)
router.register(r'tags', views.TagViewSet)
urlpatterns = [
url(r'^docs/$', views.schema_view),
url(r'^', include(router.urls)),
url(r'^token/new/$', jwt_views.obtain_jwt_token),
url(r'^token/refresh/$', jwt_views.refresh_jwt_token),
url(r'^filters/$', views.FiltersOptionsView.as_view()),
url(r'^templates/(?P<page>[-\w]+.html)/$', views.AngularTemplateView.as_view()),
]
Add option to verify jwt token
|
from django.conf.urls import url, include
from rest_framework.routers import DefaultRouter
from rest_framework_jwt import views as jwt_views
from . import views
router = DefaultRouter()
router.register(r'quotes', views.QuoteViewSet)
router.register(r'authors', views.AuthorViewSet)
router.register(r'categories', views.CategoryViewSet)
router.register(r'tags', views.TagViewSet)
urlpatterns = [
url(r'^docs/$', views.schema_view),
url(r'^', include(router.urls)),
url(r'^token/new/$', jwt_views.ObtainJSONWebToken.as_view()),
url(r'^token/refresh/$', jwt_views.RefreshJSONWebToken.as_view()),
url(r'^token/verify/$', jwt_views.VerifyJSONWebToken.as_view()),
url(r'^filters/$', views.FiltersOptionsView.as_view()),
url(r'^templates/(?P<page>[-\w]+.html)/$', views.AngularTemplateView.as_view()),
]
|
<commit_before>from django.conf.urls import url, include
from rest_framework.routers import DefaultRouter
from rest_framework_jwt import views as jwt_views
from . import views
router = DefaultRouter()
router.register(r'quotes', views.QuoteViewSet)
router.register(r'authors', views.AuthorViewSet)
router.register(r'categories', views.CategoryViewSet)
router.register(r'tags', views.TagViewSet)
urlpatterns = [
url(r'^docs/$', views.schema_view),
url(r'^', include(router.urls)),
url(r'^token/new/$', jwt_views.obtain_jwt_token),
url(r'^token/refresh/$', jwt_views.refresh_jwt_token),
url(r'^filters/$', views.FiltersOptionsView.as_view()),
url(r'^templates/(?P<page>[-\w]+.html)/$', views.AngularTemplateView.as_view()),
]
<commit_msg>Add option to verify jwt token<commit_after>
|
from django.conf.urls import url, include
from rest_framework.routers import DefaultRouter
from rest_framework_jwt import views as jwt_views
from . import views
router = DefaultRouter()
router.register(r'quotes', views.QuoteViewSet)
router.register(r'authors', views.AuthorViewSet)
router.register(r'categories', views.CategoryViewSet)
router.register(r'tags', views.TagViewSet)
urlpatterns = [
url(r'^docs/$', views.schema_view),
url(r'^', include(router.urls)),
url(r'^token/new/$', jwt_views.ObtainJSONWebToken.as_view()),
url(r'^token/refresh/$', jwt_views.RefreshJSONWebToken.as_view()),
url(r'^token/verify/$', jwt_views.VerifyJSONWebToken.as_view()),
url(r'^filters/$', views.FiltersOptionsView.as_view()),
url(r'^templates/(?P<page>[-\w]+.html)/$', views.AngularTemplateView.as_view()),
]
|
from django.conf.urls import url, include
from rest_framework.routers import DefaultRouter
from rest_framework_jwt import views as jwt_views
from . import views
router = DefaultRouter()
router.register(r'quotes', views.QuoteViewSet)
router.register(r'authors', views.AuthorViewSet)
router.register(r'categories', views.CategoryViewSet)
router.register(r'tags', views.TagViewSet)
urlpatterns = [
url(r'^docs/$', views.schema_view),
url(r'^', include(router.urls)),
url(r'^token/new/$', jwt_views.obtain_jwt_token),
url(r'^token/refresh/$', jwt_views.refresh_jwt_token),
url(r'^filters/$', views.FiltersOptionsView.as_view()),
url(r'^templates/(?P<page>[-\w]+.html)/$', views.AngularTemplateView.as_view()),
]
Add option to verify jwt tokenfrom django.conf.urls import url, include
from rest_framework.routers import DefaultRouter
from rest_framework_jwt import views as jwt_views
from . import views
router = DefaultRouter()
router.register(r'quotes', views.QuoteViewSet)
router.register(r'authors', views.AuthorViewSet)
router.register(r'categories', views.CategoryViewSet)
router.register(r'tags', views.TagViewSet)
urlpatterns = [
url(r'^docs/$', views.schema_view),
url(r'^', include(router.urls)),
url(r'^token/new/$', jwt_views.ObtainJSONWebToken.as_view()),
url(r'^token/refresh/$', jwt_views.RefreshJSONWebToken.as_view()),
url(r'^token/verify/$', jwt_views.VerifyJSONWebToken.as_view()),
url(r'^filters/$', views.FiltersOptionsView.as_view()),
url(r'^templates/(?P<page>[-\w]+.html)/$', views.AngularTemplateView.as_view()),
]
|
<commit_before>from django.conf.urls import url, include
from rest_framework.routers import DefaultRouter
from rest_framework_jwt import views as jwt_views
from . import views
router = DefaultRouter()
router.register(r'quotes', views.QuoteViewSet)
router.register(r'authors', views.AuthorViewSet)
router.register(r'categories', views.CategoryViewSet)
router.register(r'tags', views.TagViewSet)
urlpatterns = [
url(r'^docs/$', views.schema_view),
url(r'^', include(router.urls)),
url(r'^token/new/$', jwt_views.obtain_jwt_token),
url(r'^token/refresh/$', jwt_views.refresh_jwt_token),
url(r'^filters/$', views.FiltersOptionsView.as_view()),
url(r'^templates/(?P<page>[-\w]+.html)/$', views.AngularTemplateView.as_view()),
]
<commit_msg>Add option to verify jwt token<commit_after>from django.conf.urls import url, include
from rest_framework.routers import DefaultRouter
from rest_framework_jwt import views as jwt_views
from . import views
router = DefaultRouter()
router.register(r'quotes', views.QuoteViewSet)
router.register(r'authors', views.AuthorViewSet)
router.register(r'categories', views.CategoryViewSet)
router.register(r'tags', views.TagViewSet)
urlpatterns = [
url(r'^docs/$', views.schema_view),
url(r'^', include(router.urls)),
url(r'^token/new/$', jwt_views.ObtainJSONWebToken.as_view()),
url(r'^token/refresh/$', jwt_views.RefreshJSONWebToken.as_view()),
url(r'^token/verify/$', jwt_views.VerifyJSONWebToken.as_view()),
url(r'^filters/$', views.FiltersOptionsView.as_view()),
url(r'^templates/(?P<page>[-\w]+.html)/$', views.AngularTemplateView.as_view()),
]
|
7626f955a799cf93bb66aaa9b79a33395e9871e6
|
api/api_resource.py
|
api/api_resource.py
|
from falcon.util.uri import parse_query_string
import json
from api.actions import pos_tagging
class ApiResource(object):
def parse_request_data(self, raw_post_data):
encoded_raw_post_data = ""
try:
encoded_raw_post_data = str(raw_post_data, 'utf-8')
except UnicodeDecodeError:
try:
encoded_raw_post_data = str(raw_post_data, 'latin-1')
except UnicodeDecodeError:
pass
return encoded_raw_post_data
def on_post(self, request, response):
body = request.stream.read()
encoded_raw_post_data = self.parse_request_data(body)
pretty = request.get_param("pretty")
if not pretty:
pretty = parse_query_string(encoded_raw_post_data).get("pretty", False)
data = request.get_param("data")
if not data:
data = parse_query_string(encoded_raw_post_data).get("data", False)
if not data:
data = encoded_raw_post_data
if not data:
return {"error": "No data posted or data incorrectly encoded"}
tagged_json = pos_tagging(data)
json_kwargs = {
"separators": (',', ':'),
"ensure_ascii": False,
}
if pretty:
json_kwargs["indent"] = 4
json_kwargs["separators"] = (', ', ': ')
response.body = json.dumps(tagged_json, **json_kwargs)
|
from falcon.util.uri import parse_query_string
import json
from api.actions import pos_tagging
class ApiResource(object):
def parse_request_data(self, raw_post_data):
encoded_raw_post_data = ""
try:
encoded_raw_post_data = str(raw_post_data, 'utf-8')
except UnicodeDecodeError:
try:
encoded_raw_post_data = str(raw_post_data, 'latin-1')
except UnicodeDecodeError:
pass
return encoded_raw_post_data
def on_post(self, request, response):
body = request.stream.read()
encoded_raw_post_data = self.parse_request_data(body)
pretty = request.get_param("pretty")
if not pretty:
pretty = parse_query_string(encoded_raw_post_data).get("pretty", False)
data = request.get_param("data")
if not data:
data = parse_query_string(encoded_raw_post_data).get("data", False)
if not data:
data = encoded_raw_post_data
if not data:
return {"error": "No data posted or data incorrectly encoded"}
tagged_json = pos_tagging(data)
json_kwargs = {"separators": (',', ':')}
if pretty:
json_kwargs = {"indent": 4, "separators": (', ', ': ')}
response.body = json.dumps(tagged_json, **json_kwargs)
|
Revert "Return utf-8, not ascii."
|
Revert "Return utf-8, not ascii."
This reverts commit 86cbefc74471e4c991c96e0385b931a2a20f5d50.
Former-commit-id: 3246e0bfefb806bd2b4d3dda0cb77e91f3481971
|
Python
|
mit
|
EmilStenstrom/json-tagger,EmilStenstrom/json-tagger,EmilStenstrom/json-tagger,EmilStenstrom/json-tagger,EmilStenstrom/json-tagger
|
from falcon.util.uri import parse_query_string
import json
from api.actions import pos_tagging
class ApiResource(object):
def parse_request_data(self, raw_post_data):
encoded_raw_post_data = ""
try:
encoded_raw_post_data = str(raw_post_data, 'utf-8')
except UnicodeDecodeError:
try:
encoded_raw_post_data = str(raw_post_data, 'latin-1')
except UnicodeDecodeError:
pass
return encoded_raw_post_data
def on_post(self, request, response):
body = request.stream.read()
encoded_raw_post_data = self.parse_request_data(body)
pretty = request.get_param("pretty")
if not pretty:
pretty = parse_query_string(encoded_raw_post_data).get("pretty", False)
data = request.get_param("data")
if not data:
data = parse_query_string(encoded_raw_post_data).get("data", False)
if not data:
data = encoded_raw_post_data
if not data:
return {"error": "No data posted or data incorrectly encoded"}
tagged_json = pos_tagging(data)
json_kwargs = {
"separators": (',', ':'),
"ensure_ascii": False,
}
if pretty:
json_kwargs["indent"] = 4
json_kwargs["separators"] = (', ', ': ')
response.body = json.dumps(tagged_json, **json_kwargs)
Revert "Return utf-8, not ascii."
This reverts commit 86cbefc74471e4c991c96e0385b931a2a20f5d50.
Former-commit-id: 3246e0bfefb806bd2b4d3dda0cb77e91f3481971
|
from falcon.util.uri import parse_query_string
import json
from api.actions import pos_tagging
class ApiResource(object):
def parse_request_data(self, raw_post_data):
encoded_raw_post_data = ""
try:
encoded_raw_post_data = str(raw_post_data, 'utf-8')
except UnicodeDecodeError:
try:
encoded_raw_post_data = str(raw_post_data, 'latin-1')
except UnicodeDecodeError:
pass
return encoded_raw_post_data
def on_post(self, request, response):
body = request.stream.read()
encoded_raw_post_data = self.parse_request_data(body)
pretty = request.get_param("pretty")
if not pretty:
pretty = parse_query_string(encoded_raw_post_data).get("pretty", False)
data = request.get_param("data")
if not data:
data = parse_query_string(encoded_raw_post_data).get("data", False)
if not data:
data = encoded_raw_post_data
if not data:
return {"error": "No data posted or data incorrectly encoded"}
tagged_json = pos_tagging(data)
json_kwargs = {"separators": (',', ':')}
if pretty:
json_kwargs = {"indent": 4, "separators": (', ', ': ')}
response.body = json.dumps(tagged_json, **json_kwargs)
|
<commit_before>from falcon.util.uri import parse_query_string
import json
from api.actions import pos_tagging
class ApiResource(object):
def parse_request_data(self, raw_post_data):
encoded_raw_post_data = ""
try:
encoded_raw_post_data = str(raw_post_data, 'utf-8')
except UnicodeDecodeError:
try:
encoded_raw_post_data = str(raw_post_data, 'latin-1')
except UnicodeDecodeError:
pass
return encoded_raw_post_data
def on_post(self, request, response):
body = request.stream.read()
encoded_raw_post_data = self.parse_request_data(body)
pretty = request.get_param("pretty")
if not pretty:
pretty = parse_query_string(encoded_raw_post_data).get("pretty", False)
data = request.get_param("data")
if not data:
data = parse_query_string(encoded_raw_post_data).get("data", False)
if not data:
data = encoded_raw_post_data
if not data:
return {"error": "No data posted or data incorrectly encoded"}
tagged_json = pos_tagging(data)
json_kwargs = {
"separators": (',', ':'),
"ensure_ascii": False,
}
if pretty:
json_kwargs["indent"] = 4
json_kwargs["separators"] = (', ', ': ')
response.body = json.dumps(tagged_json, **json_kwargs)
<commit_msg>Revert "Return utf-8, not ascii."
This reverts commit 86cbefc74471e4c991c96e0385b931a2a20f5d50.
Former-commit-id: 3246e0bfefb806bd2b4d3dda0cb77e91f3481971<commit_after>
|
from falcon.util.uri import parse_query_string
import json
from api.actions import pos_tagging
class ApiResource(object):
def parse_request_data(self, raw_post_data):
encoded_raw_post_data = ""
try:
encoded_raw_post_data = str(raw_post_data, 'utf-8')
except UnicodeDecodeError:
try:
encoded_raw_post_data = str(raw_post_data, 'latin-1')
except UnicodeDecodeError:
pass
return encoded_raw_post_data
def on_post(self, request, response):
body = request.stream.read()
encoded_raw_post_data = self.parse_request_data(body)
pretty = request.get_param("pretty")
if not pretty:
pretty = parse_query_string(encoded_raw_post_data).get("pretty", False)
data = request.get_param("data")
if not data:
data = parse_query_string(encoded_raw_post_data).get("data", False)
if not data:
data = encoded_raw_post_data
if not data:
return {"error": "No data posted or data incorrectly encoded"}
tagged_json = pos_tagging(data)
json_kwargs = {"separators": (',', ':')}
if pretty:
json_kwargs = {"indent": 4, "separators": (', ', ': ')}
response.body = json.dumps(tagged_json, **json_kwargs)
|
from falcon.util.uri import parse_query_string
import json
from api.actions import pos_tagging
class ApiResource(object):
def parse_request_data(self, raw_post_data):
encoded_raw_post_data = ""
try:
encoded_raw_post_data = str(raw_post_data, 'utf-8')
except UnicodeDecodeError:
try:
encoded_raw_post_data = str(raw_post_data, 'latin-1')
except UnicodeDecodeError:
pass
return encoded_raw_post_data
def on_post(self, request, response):
body = request.stream.read()
encoded_raw_post_data = self.parse_request_data(body)
pretty = request.get_param("pretty")
if not pretty:
pretty = parse_query_string(encoded_raw_post_data).get("pretty", False)
data = request.get_param("data")
if not data:
data = parse_query_string(encoded_raw_post_data).get("data", False)
if not data:
data = encoded_raw_post_data
if not data:
return {"error": "No data posted or data incorrectly encoded"}
tagged_json = pos_tagging(data)
json_kwargs = {
"separators": (',', ':'),
"ensure_ascii": False,
}
if pretty:
json_kwargs["indent"] = 4
json_kwargs["separators"] = (', ', ': ')
response.body = json.dumps(tagged_json, **json_kwargs)
Revert "Return utf-8, not ascii."
This reverts commit 86cbefc74471e4c991c96e0385b931a2a20f5d50.
Former-commit-id: 3246e0bfefb806bd2b4d3dda0cb77e91f3481971from falcon.util.uri import parse_query_string
import json
from api.actions import pos_tagging
class ApiResource(object):
def parse_request_data(self, raw_post_data):
encoded_raw_post_data = ""
try:
encoded_raw_post_data = str(raw_post_data, 'utf-8')
except UnicodeDecodeError:
try:
encoded_raw_post_data = str(raw_post_data, 'latin-1')
except UnicodeDecodeError:
pass
return encoded_raw_post_data
def on_post(self, request, response):
body = request.stream.read()
encoded_raw_post_data = self.parse_request_data(body)
pretty = request.get_param("pretty")
if not pretty:
pretty = parse_query_string(encoded_raw_post_data).get("pretty", False)
data = request.get_param("data")
if not data:
data = parse_query_string(encoded_raw_post_data).get("data", False)
if not data:
data = encoded_raw_post_data
if not data:
return {"error": "No data posted or data incorrectly encoded"}
tagged_json = pos_tagging(data)
json_kwargs = {"separators": (',', ':')}
if pretty:
json_kwargs = {"indent": 4, "separators": (', ', ': ')}
response.body = json.dumps(tagged_json, **json_kwargs)
|
<commit_before>from falcon.util.uri import parse_query_string
import json
from api.actions import pos_tagging
class ApiResource(object):
def parse_request_data(self, raw_post_data):
encoded_raw_post_data = ""
try:
encoded_raw_post_data = str(raw_post_data, 'utf-8')
except UnicodeDecodeError:
try:
encoded_raw_post_data = str(raw_post_data, 'latin-1')
except UnicodeDecodeError:
pass
return encoded_raw_post_data
def on_post(self, request, response):
body = request.stream.read()
encoded_raw_post_data = self.parse_request_data(body)
pretty = request.get_param("pretty")
if not pretty:
pretty = parse_query_string(encoded_raw_post_data).get("pretty", False)
data = request.get_param("data")
if not data:
data = parse_query_string(encoded_raw_post_data).get("data", False)
if not data:
data = encoded_raw_post_data
if not data:
return {"error": "No data posted or data incorrectly encoded"}
tagged_json = pos_tagging(data)
json_kwargs = {
"separators": (',', ':'),
"ensure_ascii": False,
}
if pretty:
json_kwargs["indent"] = 4
json_kwargs["separators"] = (', ', ': ')
response.body = json.dumps(tagged_json, **json_kwargs)
<commit_msg>Revert "Return utf-8, not ascii."
This reverts commit 86cbefc74471e4c991c96e0385b931a2a20f5d50.
Former-commit-id: 3246e0bfefb806bd2b4d3dda0cb77e91f3481971<commit_after>from falcon.util.uri import parse_query_string
import json
from api.actions import pos_tagging
class ApiResource(object):
def parse_request_data(self, raw_post_data):
encoded_raw_post_data = ""
try:
encoded_raw_post_data = str(raw_post_data, 'utf-8')
except UnicodeDecodeError:
try:
encoded_raw_post_data = str(raw_post_data, 'latin-1')
except UnicodeDecodeError:
pass
return encoded_raw_post_data
def on_post(self, request, response):
body = request.stream.read()
encoded_raw_post_data = self.parse_request_data(body)
pretty = request.get_param("pretty")
if not pretty:
pretty = parse_query_string(encoded_raw_post_data).get("pretty", False)
data = request.get_param("data")
if not data:
data = parse_query_string(encoded_raw_post_data).get("data", False)
if not data:
data = encoded_raw_post_data
if not data:
return {"error": "No data posted or data incorrectly encoded"}
tagged_json = pos_tagging(data)
json_kwargs = {"separators": (',', ':')}
if pretty:
json_kwargs = {"indent": 4, "separators": (', ', ': ')}
response.body = json.dumps(tagged_json, **json_kwargs)
|
ed0b33dc0866100c2bede3579711af761d5cb159
|
plumeria/util/__init__.py
|
plumeria/util/__init__.py
|
MIME_TYPES = {
'.png': 'image/png',
'.jpg': 'image/jpeg',
'.jpeg': 'image/jpeg',
'.gif': 'image/gif',
}
def to_mimetype(ext):
if ext.lower() in MIME_TYPES:
return MIME_TYPES[ext.lower()]
|
MIME_TYPES = {
'.png': 'image/png',
'.jpg': 'image/jpeg',
'.jpeg': 'image/jpeg',
'.gif': 'image/gif',
'.txt': 'text/plain',
}
def to_mimetype(ext):
if ext.lower() in MIME_TYPES:
return MIME_TYPES[ext.lower()]
else:
return "application/octet-stream"
|
Fix mimetype detection to handle more types.
|
Fix mimetype detection to handle more types.
|
Python
|
mit
|
sk89q/Plumeria,sk89q/Plumeria,sk89q/Plumeria
|
MIME_TYPES = {
'.png': 'image/png',
'.jpg': 'image/jpeg',
'.jpeg': 'image/jpeg',
'.gif': 'image/gif',
}
def to_mimetype(ext):
if ext.lower() in MIME_TYPES:
return MIME_TYPES[ext.lower()]
Fix mimetype detection to handle more types.
|
MIME_TYPES = {
'.png': 'image/png',
'.jpg': 'image/jpeg',
'.jpeg': 'image/jpeg',
'.gif': 'image/gif',
'.txt': 'text/plain',
}
def to_mimetype(ext):
if ext.lower() in MIME_TYPES:
return MIME_TYPES[ext.lower()]
else:
return "application/octet-stream"
|
<commit_before>MIME_TYPES = {
'.png': 'image/png',
'.jpg': 'image/jpeg',
'.jpeg': 'image/jpeg',
'.gif': 'image/gif',
}
def to_mimetype(ext):
if ext.lower() in MIME_TYPES:
return MIME_TYPES[ext.lower()]
<commit_msg>Fix mimetype detection to handle more types.<commit_after>
|
MIME_TYPES = {
'.png': 'image/png',
'.jpg': 'image/jpeg',
'.jpeg': 'image/jpeg',
'.gif': 'image/gif',
'.txt': 'text/plain',
}
def to_mimetype(ext):
if ext.lower() in MIME_TYPES:
return MIME_TYPES[ext.lower()]
else:
return "application/octet-stream"
|
MIME_TYPES = {
'.png': 'image/png',
'.jpg': 'image/jpeg',
'.jpeg': 'image/jpeg',
'.gif': 'image/gif',
}
def to_mimetype(ext):
if ext.lower() in MIME_TYPES:
return MIME_TYPES[ext.lower()]
Fix mimetype detection to handle more types.MIME_TYPES = {
'.png': 'image/png',
'.jpg': 'image/jpeg',
'.jpeg': 'image/jpeg',
'.gif': 'image/gif',
'.txt': 'text/plain',
}
def to_mimetype(ext):
if ext.lower() in MIME_TYPES:
return MIME_TYPES[ext.lower()]
else:
return "application/octet-stream"
|
<commit_before>MIME_TYPES = {
'.png': 'image/png',
'.jpg': 'image/jpeg',
'.jpeg': 'image/jpeg',
'.gif': 'image/gif',
}
def to_mimetype(ext):
if ext.lower() in MIME_TYPES:
return MIME_TYPES[ext.lower()]
<commit_msg>Fix mimetype detection to handle more types.<commit_after>MIME_TYPES = {
'.png': 'image/png',
'.jpg': 'image/jpeg',
'.jpeg': 'image/jpeg',
'.gif': 'image/gif',
'.txt': 'text/plain',
}
def to_mimetype(ext):
if ext.lower() in MIME_TYPES:
return MIME_TYPES[ext.lower()]
else:
return "application/octet-stream"
|
2b29ad50beb04f2212ccd5c4dd3c769ac157ce02
|
urls.py
|
urls.py
|
from django.conf.urls.defaults import url, patterns
urlpatterns = patterns('',
url(r'^$', 'invite.views.index', name='index'),
url(r'^invite/$', 'invite.views.invite', name='invite'),
url(r'^resend/(?P<code>.*)/$', 'invite.views.resend', name='resend'),
url(r'^revoke/(?P<code>.*)/$', 'invite.views.revoke', name='revoke'),
url(r'^login/$', 'invite.views.log_in_user', name='login'),
url(r'^logout/$', 'invite.views.log_out_user', name='edit_logout'),
url(r'^amnesia/$', 'invite.views.amnesia', name='amnesia'),
url(r'^reset/$', 'invite.views.reset', name="reset"),
url(r'^signup/$', 'invite.views.signup', name="account_signup"),
url(r'^about/$', 'invite.views.about', name="about"),
)
|
from django.conf.urls import url, patterns
urlpatterns = patterns('',
url(r'^$', 'invite.views.index', name='index'),
url(r'^invite/$', 'invite.views.invite', name='invite'),
url(r'^resend/(?P<code>.*)/$', 'invite.views.resend', name='resend'),
url(r'^revoke/(?P<code>.*)/$', 'invite.views.revoke', name='revoke'),
url(r'^login/$', 'invite.views.log_in_user', name='login'),
url(r'^logout/$', 'invite.views.log_out_user', name='edit_logout'),
url(r'^amnesia/$', 'invite.views.amnesia', name='amnesia'),
url(r'^reset/$', 'invite.views.reset', name="reset"),
url(r'^signup/$', 'invite.views.signup', name="account_signup"),
url(r'^about/$', 'invite.views.about', name="about"),
)
|
Fix import path for url utilities.
|
Fix import path for url utilities.
|
Python
|
bsd-3-clause
|
unt-libraries/django-invite,unt-libraries/django-invite
|
from django.conf.urls.defaults import url, patterns
urlpatterns = patterns('',
url(r'^$', 'invite.views.index', name='index'),
url(r'^invite/$', 'invite.views.invite', name='invite'),
url(r'^resend/(?P<code>.*)/$', 'invite.views.resend', name='resend'),
url(r'^revoke/(?P<code>.*)/$', 'invite.views.revoke', name='revoke'),
url(r'^login/$', 'invite.views.log_in_user', name='login'),
url(r'^logout/$', 'invite.views.log_out_user', name='edit_logout'),
url(r'^amnesia/$', 'invite.views.amnesia', name='amnesia'),
url(r'^reset/$', 'invite.views.reset', name="reset"),
url(r'^signup/$', 'invite.views.signup', name="account_signup"),
url(r'^about/$', 'invite.views.about', name="about"),
)
Fix import path for url utilities.
|
from django.conf.urls import url, patterns
urlpatterns = patterns('',
url(r'^$', 'invite.views.index', name='index'),
url(r'^invite/$', 'invite.views.invite', name='invite'),
url(r'^resend/(?P<code>.*)/$', 'invite.views.resend', name='resend'),
url(r'^revoke/(?P<code>.*)/$', 'invite.views.revoke', name='revoke'),
url(r'^login/$', 'invite.views.log_in_user', name='login'),
url(r'^logout/$', 'invite.views.log_out_user', name='edit_logout'),
url(r'^amnesia/$', 'invite.views.amnesia', name='amnesia'),
url(r'^reset/$', 'invite.views.reset', name="reset"),
url(r'^signup/$', 'invite.views.signup', name="account_signup"),
url(r'^about/$', 'invite.views.about', name="about"),
)
|
<commit_before>from django.conf.urls.defaults import url, patterns
urlpatterns = patterns('',
url(r'^$', 'invite.views.index', name='index'),
url(r'^invite/$', 'invite.views.invite', name='invite'),
url(r'^resend/(?P<code>.*)/$', 'invite.views.resend', name='resend'),
url(r'^revoke/(?P<code>.*)/$', 'invite.views.revoke', name='revoke'),
url(r'^login/$', 'invite.views.log_in_user', name='login'),
url(r'^logout/$', 'invite.views.log_out_user', name='edit_logout'),
url(r'^amnesia/$', 'invite.views.amnesia', name='amnesia'),
url(r'^reset/$', 'invite.views.reset', name="reset"),
url(r'^signup/$', 'invite.views.signup', name="account_signup"),
url(r'^about/$', 'invite.views.about', name="about"),
)
<commit_msg>Fix import path for url utilities.<commit_after>
|
from django.conf.urls import url, patterns
urlpatterns = patterns('',
url(r'^$', 'invite.views.index', name='index'),
url(r'^invite/$', 'invite.views.invite', name='invite'),
url(r'^resend/(?P<code>.*)/$', 'invite.views.resend', name='resend'),
url(r'^revoke/(?P<code>.*)/$', 'invite.views.revoke', name='revoke'),
url(r'^login/$', 'invite.views.log_in_user', name='login'),
url(r'^logout/$', 'invite.views.log_out_user', name='edit_logout'),
url(r'^amnesia/$', 'invite.views.amnesia', name='amnesia'),
url(r'^reset/$', 'invite.views.reset', name="reset"),
url(r'^signup/$', 'invite.views.signup', name="account_signup"),
url(r'^about/$', 'invite.views.about', name="about"),
)
|
from django.conf.urls.defaults import url, patterns
urlpatterns = patterns('',
url(r'^$', 'invite.views.index', name='index'),
url(r'^invite/$', 'invite.views.invite', name='invite'),
url(r'^resend/(?P<code>.*)/$', 'invite.views.resend', name='resend'),
url(r'^revoke/(?P<code>.*)/$', 'invite.views.revoke', name='revoke'),
url(r'^login/$', 'invite.views.log_in_user', name='login'),
url(r'^logout/$', 'invite.views.log_out_user', name='edit_logout'),
url(r'^amnesia/$', 'invite.views.amnesia', name='amnesia'),
url(r'^reset/$', 'invite.views.reset', name="reset"),
url(r'^signup/$', 'invite.views.signup', name="account_signup"),
url(r'^about/$', 'invite.views.about', name="about"),
)
Fix import path for url utilities.from django.conf.urls import url, patterns
urlpatterns = patterns('',
url(r'^$', 'invite.views.index', name='index'),
url(r'^invite/$', 'invite.views.invite', name='invite'),
url(r'^resend/(?P<code>.*)/$', 'invite.views.resend', name='resend'),
url(r'^revoke/(?P<code>.*)/$', 'invite.views.revoke', name='revoke'),
url(r'^login/$', 'invite.views.log_in_user', name='login'),
url(r'^logout/$', 'invite.views.log_out_user', name='edit_logout'),
url(r'^amnesia/$', 'invite.views.amnesia', name='amnesia'),
url(r'^reset/$', 'invite.views.reset', name="reset"),
url(r'^signup/$', 'invite.views.signup', name="account_signup"),
url(r'^about/$', 'invite.views.about', name="about"),
)
|
<commit_before>from django.conf.urls.defaults import url, patterns
urlpatterns = patterns('',
url(r'^$', 'invite.views.index', name='index'),
url(r'^invite/$', 'invite.views.invite', name='invite'),
url(r'^resend/(?P<code>.*)/$', 'invite.views.resend', name='resend'),
url(r'^revoke/(?P<code>.*)/$', 'invite.views.revoke', name='revoke'),
url(r'^login/$', 'invite.views.log_in_user', name='login'),
url(r'^logout/$', 'invite.views.log_out_user', name='edit_logout'),
url(r'^amnesia/$', 'invite.views.amnesia', name='amnesia'),
url(r'^reset/$', 'invite.views.reset', name="reset"),
url(r'^signup/$', 'invite.views.signup', name="account_signup"),
url(r'^about/$', 'invite.views.about', name="about"),
)
<commit_msg>Fix import path for url utilities.<commit_after>from django.conf.urls import url, patterns
urlpatterns = patterns('',
url(r'^$', 'invite.views.index', name='index'),
url(r'^invite/$', 'invite.views.invite', name='invite'),
url(r'^resend/(?P<code>.*)/$', 'invite.views.resend', name='resend'),
url(r'^revoke/(?P<code>.*)/$', 'invite.views.revoke', name='revoke'),
url(r'^login/$', 'invite.views.log_in_user', name='login'),
url(r'^logout/$', 'invite.views.log_out_user', name='edit_logout'),
url(r'^amnesia/$', 'invite.views.amnesia', name='amnesia'),
url(r'^reset/$', 'invite.views.reset', name="reset"),
url(r'^signup/$', 'invite.views.signup', name="account_signup"),
url(r'^about/$', 'invite.views.about', name="about"),
)
|
0b1c174808ddebee4c41bb423e05d75118830c1d
|
src/analyses/report_urls.py
|
src/analyses/report_urls.py
|
from django.conf.urls import url
from .views import serve_report
urlpatterns = [
url(
r'^view/(?P<auth_key>[\w:]+)/(?P<file_path>.*)',
serve_report,
name='serve_report'
),
]
|
from django.conf.urls import url
from .views import serve_report
urlpatterns = [
url(
r'^view/(?P<auth_key>[\w:-]+)/(?P<file_path>.*)',
serve_report,
name='serve_report'
),
]
|
Fix some base64 auth keys are not captured by url
|
Fix some base64 auth keys are not captured by url
|
Python
|
mit
|
ccwang002/biocloud-server-kai,ccwang002/biocloud-server-kai,ccwang002/biocloud-server-kai
|
from django.conf.urls import url
from .views import serve_report
urlpatterns = [
url(
r'^view/(?P<auth_key>[\w:]+)/(?P<file_path>.*)',
serve_report,
name='serve_report'
),
]
Fix some base64 auth keys are not captured by url
|
from django.conf.urls import url
from .views import serve_report
urlpatterns = [
url(
r'^view/(?P<auth_key>[\w:-]+)/(?P<file_path>.*)',
serve_report,
name='serve_report'
),
]
|
<commit_before>from django.conf.urls import url
from .views import serve_report
urlpatterns = [
url(
r'^view/(?P<auth_key>[\w:]+)/(?P<file_path>.*)',
serve_report,
name='serve_report'
),
]
<commit_msg>Fix some base64 auth keys are not captured by url<commit_after>
|
from django.conf.urls import url
from .views import serve_report
urlpatterns = [
url(
r'^view/(?P<auth_key>[\w:-]+)/(?P<file_path>.*)',
serve_report,
name='serve_report'
),
]
|
from django.conf.urls import url
from .views import serve_report
urlpatterns = [
url(
r'^view/(?P<auth_key>[\w:]+)/(?P<file_path>.*)',
serve_report,
name='serve_report'
),
]
Fix some base64 auth keys are not captured by urlfrom django.conf.urls import url
from .views import serve_report
urlpatterns = [
url(
r'^view/(?P<auth_key>[\w:-]+)/(?P<file_path>.*)',
serve_report,
name='serve_report'
),
]
|
<commit_before>from django.conf.urls import url
from .views import serve_report
urlpatterns = [
url(
r'^view/(?P<auth_key>[\w:]+)/(?P<file_path>.*)',
serve_report,
name='serve_report'
),
]
<commit_msg>Fix some base64 auth keys are not captured by url<commit_after>from django.conf.urls import url
from .views import serve_report
urlpatterns = [
url(
r'^view/(?P<auth_key>[\w:-]+)/(?P<file_path>.*)',
serve_report,
name='serve_report'
),
]
|
ce8c79346d3c7978739ce2c0a05f89a48150fa2f
|
ratechecker/migrations/0002_remove_fee_loader.py
|
ratechecker/migrations/0002_remove_fee_loader.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-10-31 16:33
from __future__ import unicode_literals
from django.db import migrations, OperationalError, ProgrammingError
def fix_fee_product_index(apps, schema_editor):
try:
schema_editor.execute(
'DROP INDEX IF EXISTS idx_16977_product_id;'
)
except (ProgrammingError, OperationalError):
pass
try:
schema_editor.execute(
'ALTER TABLE IF EXISTS cfpb.ratechecker_fee '
'DROP CONSTRAINT IF EXISTS idx_16977_product_id;'
)
except (ProgrammingError, OperationalError):
pass
try:
schema_editor.execute(
'ALTER TABLE IF EXISTS cfpb.ratechecker_fee '
'ADD CONSTRAINT idx_16977_product_id '
'UNIQUE (product_id, state_id, lender, single_family, condo, coop);'
)
except (ProgrammingError, OperationalError):
pass
class Migration(migrations.Migration):
dependencies = [
('ratechecker', '0001_initial'),
]
operations = [
migrations.RunPython(fix_fee_product_index),
migrations.AlterUniqueTogether(
name='fee',
unique_together=set([]),
),
migrations.RemoveField(
model_name='fee',
name='plan',
),
migrations.DeleteModel(
name='Fee',
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-10-31 16:33
from __future__ import unicode_literals
from django.db import migrations, OperationalError, ProgrammingError
def fix_fee_product_index(apps, schema_editor):
table_name = 'cfpb.ratechecker_fee'
index_name = 'idx_16977_product_id'
try:
schema_editor.execute(
'DROP INDEX idx_16977_product_id;'
'ALTER TABLE cfpb.ratechecker_fee '
'DROP CONSTRAINT IF EXISTS idx_16977_product_id;'
'ALTER TABLE cfpb.ratechecker_fee '
'ADD CONSTRAINT idx_16977_product_id '
'UNIQUE (product_id, state_id, lender, single_family, condo, coop);'
)
except (ProgrammingError, OperationalError):
pass
class Migration(migrations.Migration):
dependencies = [
('ratechecker', '0001_initial'),
]
operations = [
migrations.RunPython(fix_fee_product_index),
migrations.AlterUniqueTogether(
name='fee',
unique_together=set([]),
),
migrations.RemoveField(
model_name='fee',
name='plan',
),
migrations.DeleteModel(
name='Fee',
),
]
|
Remove IF EXISTS from fix_fee_product_index
|
Remove IF EXISTS from fix_fee_product_index
|
Python
|
cc0-1.0
|
cfpb/owning-a-home-api
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-10-31 16:33
from __future__ import unicode_literals
from django.db import migrations, OperationalError, ProgrammingError
def fix_fee_product_index(apps, schema_editor):
try:
schema_editor.execute(
'DROP INDEX IF EXISTS idx_16977_product_id;'
)
except (ProgrammingError, OperationalError):
pass
try:
schema_editor.execute(
'ALTER TABLE IF EXISTS cfpb.ratechecker_fee '
'DROP CONSTRAINT IF EXISTS idx_16977_product_id;'
)
except (ProgrammingError, OperationalError):
pass
try:
schema_editor.execute(
'ALTER TABLE IF EXISTS cfpb.ratechecker_fee '
'ADD CONSTRAINT idx_16977_product_id '
'UNIQUE (product_id, state_id, lender, single_family, condo, coop);'
)
except (ProgrammingError, OperationalError):
pass
class Migration(migrations.Migration):
dependencies = [
('ratechecker', '0001_initial'),
]
operations = [
migrations.RunPython(fix_fee_product_index),
migrations.AlterUniqueTogether(
name='fee',
unique_together=set([]),
),
migrations.RemoveField(
model_name='fee',
name='plan',
),
migrations.DeleteModel(
name='Fee',
),
]
Remove IF EXISTS from fix_fee_product_index
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-10-31 16:33
from __future__ import unicode_literals
from django.db import migrations, OperationalError, ProgrammingError
def fix_fee_product_index(apps, schema_editor):
table_name = 'cfpb.ratechecker_fee'
index_name = 'idx_16977_product_id'
try:
schema_editor.execute(
'DROP INDEX idx_16977_product_id;'
'ALTER TABLE cfpb.ratechecker_fee '
'DROP CONSTRAINT IF EXISTS idx_16977_product_id;'
'ALTER TABLE cfpb.ratechecker_fee '
'ADD CONSTRAINT idx_16977_product_id '
'UNIQUE (product_id, state_id, lender, single_family, condo, coop);'
)
except (ProgrammingError, OperationalError):
pass
class Migration(migrations.Migration):
dependencies = [
('ratechecker', '0001_initial'),
]
operations = [
migrations.RunPython(fix_fee_product_index),
migrations.AlterUniqueTogether(
name='fee',
unique_together=set([]),
),
migrations.RemoveField(
model_name='fee',
name='plan',
),
migrations.DeleteModel(
name='Fee',
),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-10-31 16:33
from __future__ import unicode_literals
from django.db import migrations, OperationalError, ProgrammingError
def fix_fee_product_index(apps, schema_editor):
try:
schema_editor.execute(
'DROP INDEX IF EXISTS idx_16977_product_id;'
)
except (ProgrammingError, OperationalError):
pass
try:
schema_editor.execute(
'ALTER TABLE IF EXISTS cfpb.ratechecker_fee '
'DROP CONSTRAINT IF EXISTS idx_16977_product_id;'
)
except (ProgrammingError, OperationalError):
pass
try:
schema_editor.execute(
'ALTER TABLE IF EXISTS cfpb.ratechecker_fee '
'ADD CONSTRAINT idx_16977_product_id '
'UNIQUE (product_id, state_id, lender, single_family, condo, coop);'
)
except (ProgrammingError, OperationalError):
pass
class Migration(migrations.Migration):
dependencies = [
('ratechecker', '0001_initial'),
]
operations = [
migrations.RunPython(fix_fee_product_index),
migrations.AlterUniqueTogether(
name='fee',
unique_together=set([]),
),
migrations.RemoveField(
model_name='fee',
name='plan',
),
migrations.DeleteModel(
name='Fee',
),
]
<commit_msg>Remove IF EXISTS from fix_fee_product_index<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-10-31 16:33
from __future__ import unicode_literals
from django.db import migrations, OperationalError, ProgrammingError
def fix_fee_product_index(apps, schema_editor):
table_name = 'cfpb.ratechecker_fee'
index_name = 'idx_16977_product_id'
try:
schema_editor.execute(
'DROP INDEX idx_16977_product_id;'
'ALTER TABLE cfpb.ratechecker_fee '
'DROP CONSTRAINT IF EXISTS idx_16977_product_id;'
'ALTER TABLE cfpb.ratechecker_fee '
'ADD CONSTRAINT idx_16977_product_id '
'UNIQUE (product_id, state_id, lender, single_family, condo, coop);'
)
except (ProgrammingError, OperationalError):
pass
class Migration(migrations.Migration):
dependencies = [
('ratechecker', '0001_initial'),
]
operations = [
migrations.RunPython(fix_fee_product_index),
migrations.AlterUniqueTogether(
name='fee',
unique_together=set([]),
),
migrations.RemoveField(
model_name='fee',
name='plan',
),
migrations.DeleteModel(
name='Fee',
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-10-31 16:33
from __future__ import unicode_literals
from django.db import migrations, OperationalError, ProgrammingError
def fix_fee_product_index(apps, schema_editor):
try:
schema_editor.execute(
'DROP INDEX IF EXISTS idx_16977_product_id;'
)
except (ProgrammingError, OperationalError):
pass
try:
schema_editor.execute(
'ALTER TABLE IF EXISTS cfpb.ratechecker_fee '
'DROP CONSTRAINT IF EXISTS idx_16977_product_id;'
)
except (ProgrammingError, OperationalError):
pass
try:
schema_editor.execute(
'ALTER TABLE IF EXISTS cfpb.ratechecker_fee '
'ADD CONSTRAINT idx_16977_product_id '
'UNIQUE (product_id, state_id, lender, single_family, condo, coop);'
)
except (ProgrammingError, OperationalError):
pass
class Migration(migrations.Migration):
dependencies = [
('ratechecker', '0001_initial'),
]
operations = [
migrations.RunPython(fix_fee_product_index),
migrations.AlterUniqueTogether(
name='fee',
unique_together=set([]),
),
migrations.RemoveField(
model_name='fee',
name='plan',
),
migrations.DeleteModel(
name='Fee',
),
]
Remove IF EXISTS from fix_fee_product_index# -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-10-31 16:33
from __future__ import unicode_literals
from django.db import migrations, OperationalError, ProgrammingError
def fix_fee_product_index(apps, schema_editor):
table_name = 'cfpb.ratechecker_fee'
index_name = 'idx_16977_product_id'
try:
schema_editor.execute(
'DROP INDEX idx_16977_product_id;'
'ALTER TABLE cfpb.ratechecker_fee '
'DROP CONSTRAINT IF EXISTS idx_16977_product_id;'
'ALTER TABLE cfpb.ratechecker_fee '
'ADD CONSTRAINT idx_16977_product_id '
'UNIQUE (product_id, state_id, lender, single_family, condo, coop);'
)
except (ProgrammingError, OperationalError):
pass
class Migration(migrations.Migration):
dependencies = [
('ratechecker', '0001_initial'),
]
operations = [
migrations.RunPython(fix_fee_product_index),
migrations.AlterUniqueTogether(
name='fee',
unique_together=set([]),
),
migrations.RemoveField(
model_name='fee',
name='plan',
),
migrations.DeleteModel(
name='Fee',
),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-10-31 16:33
from __future__ import unicode_literals
from django.db import migrations, OperationalError, ProgrammingError
def fix_fee_product_index(apps, schema_editor):
try:
schema_editor.execute(
'DROP INDEX IF EXISTS idx_16977_product_id;'
)
except (ProgrammingError, OperationalError):
pass
try:
schema_editor.execute(
'ALTER TABLE IF EXISTS cfpb.ratechecker_fee '
'DROP CONSTRAINT IF EXISTS idx_16977_product_id;'
)
except (ProgrammingError, OperationalError):
pass
try:
schema_editor.execute(
'ALTER TABLE IF EXISTS cfpb.ratechecker_fee '
'ADD CONSTRAINT idx_16977_product_id '
'UNIQUE (product_id, state_id, lender, single_family, condo, coop);'
)
except (ProgrammingError, OperationalError):
pass
class Migration(migrations.Migration):
dependencies = [
('ratechecker', '0001_initial'),
]
operations = [
migrations.RunPython(fix_fee_product_index),
migrations.AlterUniqueTogether(
name='fee',
unique_together=set([]),
),
migrations.RemoveField(
model_name='fee',
name='plan',
),
migrations.DeleteModel(
name='Fee',
),
]
<commit_msg>Remove IF EXISTS from fix_fee_product_index<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-10-31 16:33
from __future__ import unicode_literals
from django.db import migrations, OperationalError, ProgrammingError
def fix_fee_product_index(apps, schema_editor):
table_name = 'cfpb.ratechecker_fee'
index_name = 'idx_16977_product_id'
try:
schema_editor.execute(
'DROP INDEX idx_16977_product_id;'
'ALTER TABLE cfpb.ratechecker_fee '
'DROP CONSTRAINT IF EXISTS idx_16977_product_id;'
'ALTER TABLE cfpb.ratechecker_fee '
'ADD CONSTRAINT idx_16977_product_id '
'UNIQUE (product_id, state_id, lender, single_family, condo, coop);'
)
except (ProgrammingError, OperationalError):
pass
class Migration(migrations.Migration):
dependencies = [
('ratechecker', '0001_initial'),
]
operations = [
migrations.RunPython(fix_fee_product_index),
migrations.AlterUniqueTogether(
name='fee',
unique_together=set([]),
),
migrations.RemoveField(
model_name='fee',
name='plan',
),
migrations.DeleteModel(
name='Fee',
),
]
|
70daf4157cc8b039d726fd6482fa9bae1b3fee1e
|
modules/urlparser/__init__.py
|
modules/urlparser/__init__.py
|
from modules import *
import re
import urllib2
import traceback
try:
import simplejson as json
except ImportError:
import json
from unidecode import unidecode
from twitter import Twitter
from bitly import Bitly
from youtube import Youtube
class Urlparser(Module):
"""Checks incoming messages for possible urls. If a url is found then
route the url to a corresponding module to handle.
"""
def __init__(self, *args, **kwargs):
"""Constructor."""
Module.__init__(self, kwargs=kwargs)
self.url_patterns = [
Twitter,
Youtube,
Bitly,
]
self.url_pattern = re.compile("http://(.*?)")
def _register_events(self):
self.add_event('pubmsg', 'parse_message')
def parse_message(self, event):
nick = event['nick']
# make sure the message contains a url before checking
# the other handlers patterns
try:
for handler in self.url_patterns:
m = handler.pattern.search(event['message'])
if m:
handler_instance = handler()
msg = handler_instance.handle(event=event, match=m)
if msg:
self.server.privmsg(event['target'], msg.encode('utf-8'))
break
except:
print "<<Error>> in Urlparser"
print traceback.print_exc()
|
from modules import *
import re
import urllib2
import traceback
try:
import simplejson as json
except ImportError:
import json
from unidecode import unidecode
from twitter import Twitter
from bitly import Bitly
from youtube import Youtube
class Urlparser(Module):
"""Checks incoming messages for possible urls. If a url is found then
route the url to a corresponding module to handle.
"""
def __init__(self, *args, **kwargs):
"""Constructor."""
Module.__init__(self, kwargs=kwargs)
self.url_patterns = [
Twitter,
Youtube,
Bitly,
]
self.url_pattern = re.compile("http://(.*?)")
def _register_events(self):
self.add_event('pubmsg', 'parse_message')
def parse_message(self, event):
nick = event['nick']
# make sure the message contains a url before checking
# the other handlers patterns
try:
for handler in self.url_patterns:
m = handler.pattern.search(event['message'])
if m:
handler_instance = handler()
msg = handler_instance.handle(event=event, match=m)
if msg:
self.server.privmsg(event['target'], msg)
break
except:
print "<<Error>> in Urlparser"
print traceback.print_exc()
|
Change encoding of URL parser
|
Change encoding of URL parser
|
Python
|
mit
|
billyvg/piebot
|
from modules import *
import re
import urllib2
import traceback
try:
import simplejson as json
except ImportError:
import json
from unidecode import unidecode
from twitter import Twitter
from bitly import Bitly
from youtube import Youtube
class Urlparser(Module):
"""Checks incoming messages for possible urls. If a url is found then
route the url to a corresponding module to handle.
"""
def __init__(self, *args, **kwargs):
"""Constructor."""
Module.__init__(self, kwargs=kwargs)
self.url_patterns = [
Twitter,
Youtube,
Bitly,
]
self.url_pattern = re.compile("http://(.*?)")
def _register_events(self):
self.add_event('pubmsg', 'parse_message')
def parse_message(self, event):
nick = event['nick']
# make sure the message contains a url before checking
# the other handlers patterns
try:
for handler in self.url_patterns:
m = handler.pattern.search(event['message'])
if m:
handler_instance = handler()
msg = handler_instance.handle(event=event, match=m)
if msg:
self.server.privmsg(event['target'], msg.encode('utf-8'))
break
except:
print "<<Error>> in Urlparser"
print traceback.print_exc()
Change encoding of URL parser
|
from modules import *
import re
import urllib2
import traceback
try:
import simplejson as json
except ImportError:
import json
from unidecode import unidecode
from twitter import Twitter
from bitly import Bitly
from youtube import Youtube
class Urlparser(Module):
"""Checks incoming messages for possible urls. If a url is found then
route the url to a corresponding module to handle.
"""
def __init__(self, *args, **kwargs):
"""Constructor."""
Module.__init__(self, kwargs=kwargs)
self.url_patterns = [
Twitter,
Youtube,
Bitly,
]
self.url_pattern = re.compile("http://(.*?)")
def _register_events(self):
self.add_event('pubmsg', 'parse_message')
def parse_message(self, event):
nick = event['nick']
# make sure the message contains a url before checking
# the other handlers patterns
try:
for handler in self.url_patterns:
m = handler.pattern.search(event['message'])
if m:
handler_instance = handler()
msg = handler_instance.handle(event=event, match=m)
if msg:
self.server.privmsg(event['target'], msg)
break
except:
print "<<Error>> in Urlparser"
print traceback.print_exc()
|
<commit_before>from modules import *
import re
import urllib2
import traceback
try:
import simplejson as json
except ImportError:
import json
from unidecode import unidecode
from twitter import Twitter
from bitly import Bitly
from youtube import Youtube
class Urlparser(Module):
"""Checks incoming messages for possible urls. If a url is found then
route the url to a corresponding module to handle.
"""
def __init__(self, *args, **kwargs):
"""Constructor."""
Module.__init__(self, kwargs=kwargs)
self.url_patterns = [
Twitter,
Youtube,
Bitly,
]
self.url_pattern = re.compile("http://(.*?)")
def _register_events(self):
self.add_event('pubmsg', 'parse_message')
def parse_message(self, event):
nick = event['nick']
# make sure the message contains a url before checking
# the other handlers patterns
try:
for handler in self.url_patterns:
m = handler.pattern.search(event['message'])
if m:
handler_instance = handler()
msg = handler_instance.handle(event=event, match=m)
if msg:
self.server.privmsg(event['target'], msg.encode('utf-8'))
break
except:
print "<<Error>> in Urlparser"
print traceback.print_exc()
<commit_msg>Change encoding of URL parser<commit_after>
|
from modules import *
import re
import urllib2
import traceback
try:
import simplejson as json
except ImportError:
import json
from unidecode import unidecode
from twitter import Twitter
from bitly import Bitly
from youtube import Youtube
class Urlparser(Module):
"""Checks incoming messages for possible urls. If a url is found then
route the url to a corresponding module to handle.
"""
def __init__(self, *args, **kwargs):
"""Constructor."""
Module.__init__(self, kwargs=kwargs)
self.url_patterns = [
Twitter,
Youtube,
Bitly,
]
self.url_pattern = re.compile("http://(.*?)")
def _register_events(self):
self.add_event('pubmsg', 'parse_message')
def parse_message(self, event):
nick = event['nick']
# make sure the message contains a url before checking
# the other handlers patterns
try:
for handler in self.url_patterns:
m = handler.pattern.search(event['message'])
if m:
handler_instance = handler()
msg = handler_instance.handle(event=event, match=m)
if msg:
self.server.privmsg(event['target'], msg)
break
except:
print "<<Error>> in Urlparser"
print traceback.print_exc()
|
from modules import *
import re
import urllib2
import traceback
try:
import simplejson as json
except ImportError:
import json
from unidecode import unidecode
from twitter import Twitter
from bitly import Bitly
from youtube import Youtube
class Urlparser(Module):
"""Checks incoming messages for possible urls. If a url is found then
route the url to a corresponding module to handle.
"""
def __init__(self, *args, **kwargs):
"""Constructor."""
Module.__init__(self, kwargs=kwargs)
self.url_patterns = [
Twitter,
Youtube,
Bitly,
]
self.url_pattern = re.compile("http://(.*?)")
def _register_events(self):
self.add_event('pubmsg', 'parse_message')
def parse_message(self, event):
nick = event['nick']
# make sure the message contains a url before checking
# the other handlers patterns
try:
for handler in self.url_patterns:
m = handler.pattern.search(event['message'])
if m:
handler_instance = handler()
msg = handler_instance.handle(event=event, match=m)
if msg:
self.server.privmsg(event['target'], msg.encode('utf-8'))
break
except:
print "<<Error>> in Urlparser"
print traceback.print_exc()
Change encoding of URL parserfrom modules import *
import re
import urllib2
import traceback
try:
import simplejson as json
except ImportError:
import json
from unidecode import unidecode
from twitter import Twitter
from bitly import Bitly
from youtube import Youtube
class Urlparser(Module):
"""Checks incoming messages for possible urls. If a url is found then
route the url to a corresponding module to handle.
"""
def __init__(self, *args, **kwargs):
"""Constructor."""
Module.__init__(self, kwargs=kwargs)
self.url_patterns = [
Twitter,
Youtube,
Bitly,
]
self.url_pattern = re.compile("http://(.*?)")
def _register_events(self):
self.add_event('pubmsg', 'parse_message')
def parse_message(self, event):
nick = event['nick']
# make sure the message contains a url before checking
# the other handlers patterns
try:
for handler in self.url_patterns:
m = handler.pattern.search(event['message'])
if m:
handler_instance = handler()
msg = handler_instance.handle(event=event, match=m)
if msg:
self.server.privmsg(event['target'], msg)
break
except:
print "<<Error>> in Urlparser"
print traceback.print_exc()
|
<commit_before>from modules import *
import re
import urllib2
import traceback
try:
import simplejson as json
except ImportError:
import json
from unidecode import unidecode
from twitter import Twitter
from bitly import Bitly
from youtube import Youtube
class Urlparser(Module):
"""Checks incoming messages for possible urls. If a url is found then
route the url to a corresponding module to handle.
"""
def __init__(self, *args, **kwargs):
"""Constructor."""
Module.__init__(self, kwargs=kwargs)
self.url_patterns = [
Twitter,
Youtube,
Bitly,
]
self.url_pattern = re.compile("http://(.*?)")
def _register_events(self):
self.add_event('pubmsg', 'parse_message')
def parse_message(self, event):
nick = event['nick']
# make sure the message contains a url before checking
# the other handlers patterns
try:
for handler in self.url_patterns:
m = handler.pattern.search(event['message'])
if m:
handler_instance = handler()
msg = handler_instance.handle(event=event, match=m)
if msg:
self.server.privmsg(event['target'], msg.encode('utf-8'))
break
except:
print "<<Error>> in Urlparser"
print traceback.print_exc()
<commit_msg>Change encoding of URL parser<commit_after>from modules import *
import re
import urllib2
import traceback
try:
import simplejson as json
except ImportError:
import json
from unidecode import unidecode
from twitter import Twitter
from bitly import Bitly
from youtube import Youtube
class Urlparser(Module):
"""Checks incoming messages for possible urls. If a url is found then
route the url to a corresponding module to handle.
"""
def __init__(self, *args, **kwargs):
"""Constructor."""
Module.__init__(self, kwargs=kwargs)
self.url_patterns = [
Twitter,
Youtube,
Bitly,
]
self.url_pattern = re.compile("http://(.*?)")
def _register_events(self):
self.add_event('pubmsg', 'parse_message')
def parse_message(self, event):
nick = event['nick']
# make sure the message contains a url before checking
# the other handlers patterns
try:
for handler in self.url_patterns:
m = handler.pattern.search(event['message'])
if m:
handler_instance = handler()
msg = handler_instance.handle(event=event, match=m)
if msg:
self.server.privmsg(event['target'], msg)
break
except:
print "<<Error>> in Urlparser"
print traceback.print_exc()
|
f59106c4c804b0d0bc04dec9ff28b1b9c4ff08e4
|
GeneratePassword/generate_password_v3.py
|
GeneratePassword/generate_password_v3.py
|
import os, sys
import random
import re
try:
# Make Python2 work like Python3
input = raw_input
except NameError:
# On Python3; already using input
pass
def get_words_from_file(filepath):
"""Return the set of all words at least three letters
long from within a named file.
"""
with open(filepath) as f:
return set(w.group() for w in re.finditer(r"\w{3,}", f.read()))
def generate(filename, password_length, number_of_words):
"""Generate a password consisting of words from a text, at least
as long as password_length.
"""
words = get_words_from_file(filename)
word_length = (password_length + 1) // number_of_words
suitable_words = list(w for w in words if len(w) == word_length)
random.shuffle(suitable_words)
return "".join(w.title() for w in suitable_words[:number_of_words])
if __name__ == '__main__':
filename = input("Filename: ")
password_length = int(input("How many letters? "))
number_of_words = int(input("How many words? "))
password = generate(filename, password_length, number_of_words)
print("Your password is: {}".format(password))
|
import os, sys
import random
import re
try:
# Make Python2 work like Python3
input = raw_input
except NameError:
# On Python3; already using input
pass
def get_words_from_file(filepath):
"""Return the set of all words at least three letters
long from within a named file.
"""
with open(filepath) as f:
return set(re.findall(r"\w{3,}", f.read()))
def generate(filename, password_length, number_of_words):
"""Generate a password consisting of words from a text, at least
as long as password_length.
"""
words = get_words_from_file(filename)
quotient, remainder = divmod(password_length, number_of_words)
word_length = quotient + (1 if remainder else 0)
suitable_words = list(w for w in words if len(w) == word_length)
random.shuffle(suitable_words)
return "".join(w.title() for w in suitable_words[:number_of_words])
if __name__ == '__main__':
filename = input("Filename: ")
password_length = int(input("How many letters? "))
number_of_words = int(input("How many words? "))
password = generate(filename, password_length, number_of_words)
print("Your password is: {}".format(password))
|
Use a correct algorithm to determine the length of word to use
|
Use a correct algorithm to determine the length of word to use
|
Python
|
apache-2.0
|
OneScreenfulOfPython/screenfuls,OneScreenfulOfPython/screenfuls
|
import os, sys
import random
import re
try:
# Make Python2 work like Python3
input = raw_input
except NameError:
# On Python3; already using input
pass
def get_words_from_file(filepath):
"""Return the set of all words at least three letters
long from within a named file.
"""
with open(filepath) as f:
return set(w.group() for w in re.finditer(r"\w{3,}", f.read()))
def generate(filename, password_length, number_of_words):
"""Generate a password consisting of words from a text, at least
as long as password_length.
"""
words = get_words_from_file(filename)
word_length = (password_length + 1) // number_of_words
suitable_words = list(w for w in words if len(w) == word_length)
random.shuffle(suitable_words)
return "".join(w.title() for w in suitable_words[:number_of_words])
if __name__ == '__main__':
filename = input("Filename: ")
password_length = int(input("How many letters? "))
number_of_words = int(input("How many words? "))
password = generate(filename, password_length, number_of_words)
print("Your password is: {}".format(password))
Use a correct algorithm to determine the length of word to use
|
import os, sys
import random
import re
try:
# Make Python2 work like Python3
input = raw_input
except NameError:
# On Python3; already using input
pass
def get_words_from_file(filepath):
"""Return the set of all words at least three letters
long from within a named file.
"""
with open(filepath) as f:
return set(re.findall(r"\w{3,}", f.read()))
def generate(filename, password_length, number_of_words):
"""Generate a password consisting of words from a text, at least
as long as password_length.
"""
words = get_words_from_file(filename)
quotient, remainder = divmod(password_length, number_of_words)
word_length = quotient + (1 if remainder else 0)
suitable_words = list(w for w in words if len(w) == word_length)
random.shuffle(suitable_words)
return "".join(w.title() for w in suitable_words[:number_of_words])
if __name__ == '__main__':
filename = input("Filename: ")
password_length = int(input("How many letters? "))
number_of_words = int(input("How many words? "))
password = generate(filename, password_length, number_of_words)
print("Your password is: {}".format(password))
|
<commit_before>import os, sys
import random
import re
try:
# Make Python2 work like Python3
input = raw_input
except NameError:
# On Python3; already using input
pass
def get_words_from_file(filepath):
"""Return the set of all words at least three letters
long from within a named file.
"""
with open(filepath) as f:
return set(w.group() for w in re.finditer(r"\w{3,}", f.read()))
def generate(filename, password_length, number_of_words):
"""Generate a password consisting of words from a text, at least
as long as password_length.
"""
words = get_words_from_file(filename)
word_length = (password_length + 1) // number_of_words
suitable_words = list(w for w in words if len(w) == word_length)
random.shuffle(suitable_words)
return "".join(w.title() for w in suitable_words[:number_of_words])
if __name__ == '__main__':
filename = input("Filename: ")
password_length = int(input("How many letters? "))
number_of_words = int(input("How many words? "))
password = generate(filename, password_length, number_of_words)
print("Your password is: {}".format(password))
<commit_msg>Use a correct algorithm to determine the length of word to use<commit_after>
|
import os, sys
import random
import re
try:
# Make Python2 work like Python3
input = raw_input
except NameError:
# On Python3; already using input
pass
def get_words_from_file(filepath):
"""Return the set of all words at least three letters
long from within a named file.
"""
with open(filepath) as f:
return set(re.findall(r"\w{3,}", f.read()))
def generate(filename, password_length, number_of_words):
"""Generate a password consisting of words from a text, at least
as long as password_length.
"""
words = get_words_from_file(filename)
quotient, remainder = divmod(password_length, number_of_words)
word_length = quotient + (1 if remainder else 0)
suitable_words = list(w for w in words if len(w) == word_length)
random.shuffle(suitable_words)
return "".join(w.title() for w in suitable_words[:number_of_words])
if __name__ == '__main__':
filename = input("Filename: ")
password_length = int(input("How many letters? "))
number_of_words = int(input("How many words? "))
password = generate(filename, password_length, number_of_words)
print("Your password is: {}".format(password))
|
import os, sys
import random
import re
try:
# Make Python2 work like Python3
input = raw_input
except NameError:
# On Python3; already using input
pass
def get_words_from_file(filepath):
"""Return the set of all words at least three letters
long from within a named file.
"""
with open(filepath) as f:
return set(w.group() for w in re.finditer(r"\w{3,}", f.read()))
def generate(filename, password_length, number_of_words):
"""Generate a password consisting of words from a text, at least
as long as password_length.
"""
words = get_words_from_file(filename)
word_length = (password_length + 1) // number_of_words
suitable_words = list(w for w in words if len(w) == word_length)
random.shuffle(suitable_words)
return "".join(w.title() for w in suitable_words[:number_of_words])
if __name__ == '__main__':
filename = input("Filename: ")
password_length = int(input("How many letters? "))
number_of_words = int(input("How many words? "))
password = generate(filename, password_length, number_of_words)
print("Your password is: {}".format(password))
Use a correct algorithm to determine the length of word to useimport os, sys
import random
import re
try:
# Make Python2 work like Python3
input = raw_input
except NameError:
# On Python3; already using input
pass
def get_words_from_file(filepath):
"""Return the set of all words at least three letters
long from within a named file.
"""
with open(filepath) as f:
return set(re.findall(r"\w{3,}", f.read()))
def generate(filename, password_length, number_of_words):
"""Generate a password consisting of words from a text, at least
as long as password_length.
"""
words = get_words_from_file(filename)
quotient, remainder = divmod(password_length, number_of_words)
word_length = quotient + (1 if remainder else 0)
suitable_words = list(w for w in words if len(w) == word_length)
random.shuffle(suitable_words)
return "".join(w.title() for w in suitable_words[:number_of_words])
if __name__ == '__main__':
filename = input("Filename: ")
password_length = int(input("How many letters? "))
number_of_words = int(input("How many words? "))
password = generate(filename, password_length, number_of_words)
print("Your password is: {}".format(password))
|
<commit_before>import os, sys
import random
import re
try:
# Make Python2 work like Python3
input = raw_input
except NameError:
# On Python3; already using input
pass
def get_words_from_file(filepath):
"""Return the set of all words at least three letters
long from within a named file.
"""
with open(filepath) as f:
return set(w.group() for w in re.finditer(r"\w{3,}", f.read()))
def generate(filename, password_length, number_of_words):
"""Generate a password consisting of words from a text, at least
as long as password_length.
"""
words = get_words_from_file(filename)
word_length = (password_length + 1) // number_of_words
suitable_words = list(w for w in words if len(w) == word_length)
random.shuffle(suitable_words)
return "".join(w.title() for w in suitable_words[:number_of_words])
if __name__ == '__main__':
filename = input("Filename: ")
password_length = int(input("How many letters? "))
number_of_words = int(input("How many words? "))
password = generate(filename, password_length, number_of_words)
print("Your password is: {}".format(password))
<commit_msg>Use a correct algorithm to determine the length of word to use<commit_after>import os, sys
import random
import re
try:
# Make Python2 work like Python3
input = raw_input
except NameError:
# On Python3; already using input
pass
def get_words_from_file(filepath):
"""Return the set of all words at least three letters
long from within a named file.
"""
with open(filepath) as f:
return set(re.findall(r"\w{3,}", f.read()))
def generate(filename, password_length, number_of_words):
"""Generate a password consisting of words from a text, at least
as long as password_length.
"""
words = get_words_from_file(filename)
quotient, remainder = divmod(password_length, number_of_words)
word_length = quotient + (1 if remainder else 0)
suitable_words = list(w for w in words if len(w) == word_length)
random.shuffle(suitable_words)
return "".join(w.title() for w in suitable_words[:number_of_words])
if __name__ == '__main__':
filename = input("Filename: ")
password_length = int(input("How many letters? "))
number_of_words = int(input("How many words? "))
password = generate(filename, password_length, number_of_words)
print("Your password is: {}".format(password))
|
7264db6b160d27f5b9eeb5571acad254f427ab7e
|
skan/__init__.py
|
skan/__init__.py
|
from .csr import skeleton_to_csgraph, branch_statistics, summarise
__version__ = '0.8.0-dev'
__all__ = ['skeleton_to_csgraph',
'branch_statistics',
'summarise']
|
from .csr import skeleton_to_csgraph, branch_statistics, summarise, Skeleton
__version__ = '0.8.0-dev'
__all__ = ['skeleton_to_csgraph',
'branch_statistics',
'summarise']
|
Add Skeleton class to package namespace
|
Add Skeleton class to package namespace
|
Python
|
bsd-3-clause
|
jni/skan
|
from .csr import skeleton_to_csgraph, branch_statistics, summarise
__version__ = '0.8.0-dev'
__all__ = ['skeleton_to_csgraph',
'branch_statistics',
'summarise']
Add Skeleton class to package namespace
|
from .csr import skeleton_to_csgraph, branch_statistics, summarise, Skeleton
__version__ = '0.8.0-dev'
__all__ = ['skeleton_to_csgraph',
'branch_statistics',
'summarise']
|
<commit_before>from .csr import skeleton_to_csgraph, branch_statistics, summarise
__version__ = '0.8.0-dev'
__all__ = ['skeleton_to_csgraph',
'branch_statistics',
'summarise']
<commit_msg>Add Skeleton class to package namespace<commit_after>
|
from .csr import skeleton_to_csgraph, branch_statistics, summarise, Skeleton
__version__ = '0.8.0-dev'
__all__ = ['skeleton_to_csgraph',
'branch_statistics',
'summarise']
|
from .csr import skeleton_to_csgraph, branch_statistics, summarise
__version__ = '0.8.0-dev'
__all__ = ['skeleton_to_csgraph',
'branch_statistics',
'summarise']
Add Skeleton class to package namespacefrom .csr import skeleton_to_csgraph, branch_statistics, summarise, Skeleton
__version__ = '0.8.0-dev'
__all__ = ['skeleton_to_csgraph',
'branch_statistics',
'summarise']
|
<commit_before>from .csr import skeleton_to_csgraph, branch_statistics, summarise
__version__ = '0.8.0-dev'
__all__ = ['skeleton_to_csgraph',
'branch_statistics',
'summarise']
<commit_msg>Add Skeleton class to package namespace<commit_after>from .csr import skeleton_to_csgraph, branch_statistics, summarise, Skeleton
__version__ = '0.8.0-dev'
__all__ = ['skeleton_to_csgraph',
'branch_statistics',
'summarise']
|
4185a93dcb3e53b87280bfe0579d551c338c440f
|
datatableview/tests/testcase.py
|
datatableview/tests/testcase.py
|
# -*- encoding: utf-8 -*-
from django import get_version
from django.test import TestCase
from django.core.management import call_command
if get_version().split('.') >= ['1', '7']:
from django.test import override_settings
from django.apps import apps
initial_data_fixture = 'initial_data_modern'
clear_app_cache = apps.clear_cache
else:
from django.test.utils import override_settings
from django.db.models import loading
initial_data_fixture = 'initial_data_legacy'
def clear_app_cache():
loading.cache.loaded = False
@override_settings(INSTALLED_APPS=[
'datatableview',
'datatableview.tests.test_app',
'datatableview.tests.example_project.example_project.example_app',
])
class DatatableViewTestCase(TestCase):
def _pre_setup(self):
"""
Asks the management script to re-sync the database. Having test-only models is a pain.
"""
clear_app_cache()
call_command('syncdb', interactive=False, verbosity=0)
call_command('loaddata', initial_data_fixture, interactive=False, verbosity=0)
super(DatatableViewTestCase, self)._pre_setup()
|
# -*- encoding: utf-8 -*-
import django
from django.test import TestCase
from django.core.management import call_command
if django.VERSION >= (1, 7):
from django.test import override_settings
from django.apps import apps
initial_data_fixture = 'initial_data_modern'
clear_app_cache = apps.clear_cache
else:
from django.test.utils import override_settings
from django.db.models import loading
initial_data_fixture = 'initial_data_legacy'
def clear_app_cache():
loading.cache.loaded = False
@override_settings(INSTALLED_APPS=[
'datatableview',
'datatableview.tests.test_app',
'datatableview.tests.example_project.example_project.example_app',
])
class DatatableViewTestCase(TestCase):
def _pre_setup(self):
"""
Asks the management script to re-sync the database. Having test-only models is a pain.
"""
clear_app_cache()
call_command('syncdb', interactive=False, verbosity=0)
call_command('loaddata', initial_data_fixture, interactive=False, verbosity=0)
super(DatatableViewTestCase, self)._pre_setup()
|
Fix Django version check for 1.10
|
Fix Django version check for 1.10
|
Python
|
apache-2.0
|
pivotal-energy-solutions/django-datatable-view,pivotal-energy-solutions/django-datatable-view,jangeador/django-datatable-view,jangeador/django-datatable-view,doganmeh/django-datatable-view,jangeador/django-datatable-view,doganmeh/django-datatable-view,doganmeh/django-datatable-view,pivotal-energy-solutions/django-datatable-view
|
# -*- encoding: utf-8 -*-
from django import get_version
from django.test import TestCase
from django.core.management import call_command
if get_version().split('.') >= ['1', '7']:
from django.test import override_settings
from django.apps import apps
initial_data_fixture = 'initial_data_modern'
clear_app_cache = apps.clear_cache
else:
from django.test.utils import override_settings
from django.db.models import loading
initial_data_fixture = 'initial_data_legacy'
def clear_app_cache():
loading.cache.loaded = False
@override_settings(INSTALLED_APPS=[
'datatableview',
'datatableview.tests.test_app',
'datatableview.tests.example_project.example_project.example_app',
])
class DatatableViewTestCase(TestCase):
def _pre_setup(self):
"""
Asks the management script to re-sync the database. Having test-only models is a pain.
"""
clear_app_cache()
call_command('syncdb', interactive=False, verbosity=0)
call_command('loaddata', initial_data_fixture, interactive=False, verbosity=0)
super(DatatableViewTestCase, self)._pre_setup()
Fix Django version check for 1.10
|
# -*- encoding: utf-8 -*-
import django
from django.test import TestCase
from django.core.management import call_command
if django.VERSION >= (1, 7):
from django.test import override_settings
from django.apps import apps
initial_data_fixture = 'initial_data_modern'
clear_app_cache = apps.clear_cache
else:
from django.test.utils import override_settings
from django.db.models import loading
initial_data_fixture = 'initial_data_legacy'
def clear_app_cache():
loading.cache.loaded = False
@override_settings(INSTALLED_APPS=[
'datatableview',
'datatableview.tests.test_app',
'datatableview.tests.example_project.example_project.example_app',
])
class DatatableViewTestCase(TestCase):
def _pre_setup(self):
"""
Asks the management script to re-sync the database. Having test-only models is a pain.
"""
clear_app_cache()
call_command('syncdb', interactive=False, verbosity=0)
call_command('loaddata', initial_data_fixture, interactive=False, verbosity=0)
super(DatatableViewTestCase, self)._pre_setup()
|
<commit_before># -*- encoding: utf-8 -*-
from django import get_version
from django.test import TestCase
from django.core.management import call_command
if get_version().split('.') >= ['1', '7']:
from django.test import override_settings
from django.apps import apps
initial_data_fixture = 'initial_data_modern'
clear_app_cache = apps.clear_cache
else:
from django.test.utils import override_settings
from django.db.models import loading
initial_data_fixture = 'initial_data_legacy'
def clear_app_cache():
loading.cache.loaded = False
@override_settings(INSTALLED_APPS=[
'datatableview',
'datatableview.tests.test_app',
'datatableview.tests.example_project.example_project.example_app',
])
class DatatableViewTestCase(TestCase):
def _pre_setup(self):
"""
Asks the management script to re-sync the database. Having test-only models is a pain.
"""
clear_app_cache()
call_command('syncdb', interactive=False, verbosity=0)
call_command('loaddata', initial_data_fixture, interactive=False, verbosity=0)
super(DatatableViewTestCase, self)._pre_setup()
<commit_msg>Fix Django version check for 1.10<commit_after>
|
# -*- encoding: utf-8 -*-
import django
from django.test import TestCase
from django.core.management import call_command
if django.VERSION >= (1, 7):
from django.test import override_settings
from django.apps import apps
initial_data_fixture = 'initial_data_modern'
clear_app_cache = apps.clear_cache
else:
from django.test.utils import override_settings
from django.db.models import loading
initial_data_fixture = 'initial_data_legacy'
def clear_app_cache():
loading.cache.loaded = False
@override_settings(INSTALLED_APPS=[
'datatableview',
'datatableview.tests.test_app',
'datatableview.tests.example_project.example_project.example_app',
])
class DatatableViewTestCase(TestCase):
def _pre_setup(self):
"""
Asks the management script to re-sync the database. Having test-only models is a pain.
"""
clear_app_cache()
call_command('syncdb', interactive=False, verbosity=0)
call_command('loaddata', initial_data_fixture, interactive=False, verbosity=0)
super(DatatableViewTestCase, self)._pre_setup()
|
# -*- encoding: utf-8 -*-
from django import get_version
from django.test import TestCase
from django.core.management import call_command
if get_version().split('.') >= ['1', '7']:
from django.test import override_settings
from django.apps import apps
initial_data_fixture = 'initial_data_modern'
clear_app_cache = apps.clear_cache
else:
from django.test.utils import override_settings
from django.db.models import loading
initial_data_fixture = 'initial_data_legacy'
def clear_app_cache():
loading.cache.loaded = False
@override_settings(INSTALLED_APPS=[
'datatableview',
'datatableview.tests.test_app',
'datatableview.tests.example_project.example_project.example_app',
])
class DatatableViewTestCase(TestCase):
def _pre_setup(self):
"""
Asks the management script to re-sync the database. Having test-only models is a pain.
"""
clear_app_cache()
call_command('syncdb', interactive=False, verbosity=0)
call_command('loaddata', initial_data_fixture, interactive=False, verbosity=0)
super(DatatableViewTestCase, self)._pre_setup()
Fix Django version check for 1.10# -*- encoding: utf-8 -*-
import django
from django.test import TestCase
from django.core.management import call_command
if django.VERSION >= (1, 7):
from django.test import override_settings
from django.apps import apps
initial_data_fixture = 'initial_data_modern'
clear_app_cache = apps.clear_cache
else:
from django.test.utils import override_settings
from django.db.models import loading
initial_data_fixture = 'initial_data_legacy'
def clear_app_cache():
loading.cache.loaded = False
@override_settings(INSTALLED_APPS=[
'datatableview',
'datatableview.tests.test_app',
'datatableview.tests.example_project.example_project.example_app',
])
class DatatableViewTestCase(TestCase):
def _pre_setup(self):
"""
Asks the management script to re-sync the database. Having test-only models is a pain.
"""
clear_app_cache()
call_command('syncdb', interactive=False, verbosity=0)
call_command('loaddata', initial_data_fixture, interactive=False, verbosity=0)
super(DatatableViewTestCase, self)._pre_setup()
|
<commit_before># -*- encoding: utf-8 -*-
from django import get_version
from django.test import TestCase
from django.core.management import call_command
if get_version().split('.') >= ['1', '7']:
from django.test import override_settings
from django.apps import apps
initial_data_fixture = 'initial_data_modern'
clear_app_cache = apps.clear_cache
else:
from django.test.utils import override_settings
from django.db.models import loading
initial_data_fixture = 'initial_data_legacy'
def clear_app_cache():
loading.cache.loaded = False
@override_settings(INSTALLED_APPS=[
'datatableview',
'datatableview.tests.test_app',
'datatableview.tests.example_project.example_project.example_app',
])
class DatatableViewTestCase(TestCase):
def _pre_setup(self):
"""
Asks the management script to re-sync the database. Having test-only models is a pain.
"""
clear_app_cache()
call_command('syncdb', interactive=False, verbosity=0)
call_command('loaddata', initial_data_fixture, interactive=False, verbosity=0)
super(DatatableViewTestCase, self)._pre_setup()
<commit_msg>Fix Django version check for 1.10<commit_after># -*- encoding: utf-8 -*-
import django
from django.test import TestCase
from django.core.management import call_command
if django.VERSION >= (1, 7):
from django.test import override_settings
from django.apps import apps
initial_data_fixture = 'initial_data_modern'
clear_app_cache = apps.clear_cache
else:
from django.test.utils import override_settings
from django.db.models import loading
initial_data_fixture = 'initial_data_legacy'
def clear_app_cache():
loading.cache.loaded = False
@override_settings(INSTALLED_APPS=[
'datatableview',
'datatableview.tests.test_app',
'datatableview.tests.example_project.example_project.example_app',
])
class DatatableViewTestCase(TestCase):
def _pre_setup(self):
"""
Asks the management script to re-sync the database. Having test-only models is a pain.
"""
clear_app_cache()
call_command('syncdb', interactive=False, verbosity=0)
call_command('loaddata', initial_data_fixture, interactive=False, verbosity=0)
super(DatatableViewTestCase, self)._pre_setup()
|
b5be0a7d48d51ef504f47711f53eb0d2f6238b70
|
SessionManager.py
|
SessionManager.py
|
import sublime
import sublime_plugin
from datetime import datetime
from .modules import messages
from .modules import serialize
from .modules import settings
from .modules.session import Session
class SaveSession(sublime_plugin.ApplicationCommand):
def run(self):
settings.load()
sublime.active_window().show_input_panel(
messages.dialog("session_name"),
self.generate_name(),
on_done=self.save_session,
on_change=None,
on_cancel=None
)
def generate_name(self):
now = datetime.now()
timestamp = now.strftime(settings.get('session_name_dateformat'))
return '_'.join(['session', timestamp])
def save_session(self, session_name):
session = Session.save(session_name, sublime.windows())
serialize.dump(session_name, session)
def is_enabled(self):
windows = sublime.windows()
for window in windows:
if is_saveable(window):
return True
return False
def is_saveable(window):
return bool(window.views()) or bool(window.project_data())
|
import sublime
import sublime_plugin
from datetime import datetime
from .modules import messages
from .modules import serialize
from .modules import settings
from .modules.session import Session
def plugin_loaded():
settings.load()
class SaveSession(sublime_plugin.ApplicationCommand):
def run(self):
sublime.active_window().show_input_panel(
messages.dialog("session_name"),
self.generate_name(),
on_done=self.save_session,
on_change=None,
on_cancel=None
)
def generate_name(self):
now = datetime.now()
timestamp = now.strftime(settings.get('session_name_dateformat'))
return '_'.join(['session', timestamp])
def save_session(self, session_name):
session = Session.save(session_name, sublime.windows())
serialize.dump(session_name, session)
def is_enabled(self):
windows = sublime.windows()
for window in windows:
if is_saveable(window):
return True
return False
def is_saveable(window):
return bool(window.views()) or bool(window.project_data())
|
Load settings only on "plugin_loaded"
|
Load settings only on "plugin_loaded"
Since sublime caches settings internally and subsequent "load_settings"
calls don't refer to the disk, it doesn't make sense to load the settings
each time a command gets executed.
|
Python
|
mit
|
Zeeker/sublime-SessionManager
|
import sublime
import sublime_plugin
from datetime import datetime
from .modules import messages
from .modules import serialize
from .modules import settings
from .modules.session import Session
class SaveSession(sublime_plugin.ApplicationCommand):
def run(self):
settings.load()
sublime.active_window().show_input_panel(
messages.dialog("session_name"),
self.generate_name(),
on_done=self.save_session,
on_change=None,
on_cancel=None
)
def generate_name(self):
now = datetime.now()
timestamp = now.strftime(settings.get('session_name_dateformat'))
return '_'.join(['session', timestamp])
def save_session(self, session_name):
session = Session.save(session_name, sublime.windows())
serialize.dump(session_name, session)
def is_enabled(self):
windows = sublime.windows()
for window in windows:
if is_saveable(window):
return True
return False
def is_saveable(window):
return bool(window.views()) or bool(window.project_data())
Load settings only on "plugin_loaded"
Since sublime caches settings internally and subsequent "load_settings"
calls don't refer to the disk, it doesn't make sense to load the settings
each time a command gets executed.
|
import sublime
import sublime_plugin
from datetime import datetime
from .modules import messages
from .modules import serialize
from .modules import settings
from .modules.session import Session
def plugin_loaded():
settings.load()
class SaveSession(sublime_plugin.ApplicationCommand):
def run(self):
sublime.active_window().show_input_panel(
messages.dialog("session_name"),
self.generate_name(),
on_done=self.save_session,
on_change=None,
on_cancel=None
)
def generate_name(self):
now = datetime.now()
timestamp = now.strftime(settings.get('session_name_dateformat'))
return '_'.join(['session', timestamp])
def save_session(self, session_name):
session = Session.save(session_name, sublime.windows())
serialize.dump(session_name, session)
def is_enabled(self):
windows = sublime.windows()
for window in windows:
if is_saveable(window):
return True
return False
def is_saveable(window):
return bool(window.views()) or bool(window.project_data())
|
<commit_before>import sublime
import sublime_plugin
from datetime import datetime
from .modules import messages
from .modules import serialize
from .modules import settings
from .modules.session import Session
class SaveSession(sublime_plugin.ApplicationCommand):
def run(self):
settings.load()
sublime.active_window().show_input_panel(
messages.dialog("session_name"),
self.generate_name(),
on_done=self.save_session,
on_change=None,
on_cancel=None
)
def generate_name(self):
now = datetime.now()
timestamp = now.strftime(settings.get('session_name_dateformat'))
return '_'.join(['session', timestamp])
def save_session(self, session_name):
session = Session.save(session_name, sublime.windows())
serialize.dump(session_name, session)
def is_enabled(self):
windows = sublime.windows()
for window in windows:
if is_saveable(window):
return True
return False
def is_saveable(window):
return bool(window.views()) or bool(window.project_data())
<commit_msg>Load settings only on "plugin_loaded"
Since sublime caches settings internally and subsequent "load_settings"
calls don't refer to the disk, it doesn't make sense to load the settings
each time a command gets executed.<commit_after>
|
import sublime
import sublime_plugin
from datetime import datetime
from .modules import messages
from .modules import serialize
from .modules import settings
from .modules.session import Session
def plugin_loaded():
settings.load()
class SaveSession(sublime_plugin.ApplicationCommand):
def run(self):
sublime.active_window().show_input_panel(
messages.dialog("session_name"),
self.generate_name(),
on_done=self.save_session,
on_change=None,
on_cancel=None
)
def generate_name(self):
now = datetime.now()
timestamp = now.strftime(settings.get('session_name_dateformat'))
return '_'.join(['session', timestamp])
def save_session(self, session_name):
session = Session.save(session_name, sublime.windows())
serialize.dump(session_name, session)
def is_enabled(self):
windows = sublime.windows()
for window in windows:
if is_saveable(window):
return True
return False
def is_saveable(window):
return bool(window.views()) or bool(window.project_data())
|
import sublime
import sublime_plugin
from datetime import datetime
from .modules import messages
from .modules import serialize
from .modules import settings
from .modules.session import Session
class SaveSession(sublime_plugin.ApplicationCommand):
def run(self):
settings.load()
sublime.active_window().show_input_panel(
messages.dialog("session_name"),
self.generate_name(),
on_done=self.save_session,
on_change=None,
on_cancel=None
)
def generate_name(self):
now = datetime.now()
timestamp = now.strftime(settings.get('session_name_dateformat'))
return '_'.join(['session', timestamp])
def save_session(self, session_name):
session = Session.save(session_name, sublime.windows())
serialize.dump(session_name, session)
def is_enabled(self):
windows = sublime.windows()
for window in windows:
if is_saveable(window):
return True
return False
def is_saveable(window):
return bool(window.views()) or bool(window.project_data())
Load settings only on "plugin_loaded"
Since sublime caches settings internally and subsequent "load_settings"
calls don't refer to the disk, it doesn't make sense to load the settings
each time a command gets executed.import sublime
import sublime_plugin
from datetime import datetime
from .modules import messages
from .modules import serialize
from .modules import settings
from .modules.session import Session
def plugin_loaded():
settings.load()
class SaveSession(sublime_plugin.ApplicationCommand):
def run(self):
sublime.active_window().show_input_panel(
messages.dialog("session_name"),
self.generate_name(),
on_done=self.save_session,
on_change=None,
on_cancel=None
)
def generate_name(self):
now = datetime.now()
timestamp = now.strftime(settings.get('session_name_dateformat'))
return '_'.join(['session', timestamp])
def save_session(self, session_name):
session = Session.save(session_name, sublime.windows())
serialize.dump(session_name, session)
def is_enabled(self):
windows = sublime.windows()
for window in windows:
if is_saveable(window):
return True
return False
def is_saveable(window):
return bool(window.views()) or bool(window.project_data())
|
<commit_before>import sublime
import sublime_plugin
from datetime import datetime
from .modules import messages
from .modules import serialize
from .modules import settings
from .modules.session import Session
class SaveSession(sublime_plugin.ApplicationCommand):
def run(self):
settings.load()
sublime.active_window().show_input_panel(
messages.dialog("session_name"),
self.generate_name(),
on_done=self.save_session,
on_change=None,
on_cancel=None
)
def generate_name(self):
now = datetime.now()
timestamp = now.strftime(settings.get('session_name_dateformat'))
return '_'.join(['session', timestamp])
def save_session(self, session_name):
session = Session.save(session_name, sublime.windows())
serialize.dump(session_name, session)
def is_enabled(self):
windows = sublime.windows()
for window in windows:
if is_saveable(window):
return True
return False
def is_saveable(window):
return bool(window.views()) or bool(window.project_data())
<commit_msg>Load settings only on "plugin_loaded"
Since sublime caches settings internally and subsequent "load_settings"
calls don't refer to the disk, it doesn't make sense to load the settings
each time a command gets executed.<commit_after>import sublime
import sublime_plugin
from datetime import datetime
from .modules import messages
from .modules import serialize
from .modules import settings
from .modules.session import Session
def plugin_loaded():
settings.load()
class SaveSession(sublime_plugin.ApplicationCommand):
def run(self):
sublime.active_window().show_input_panel(
messages.dialog("session_name"),
self.generate_name(),
on_done=self.save_session,
on_change=None,
on_cancel=None
)
def generate_name(self):
now = datetime.now()
timestamp = now.strftime(settings.get('session_name_dateformat'))
return '_'.join(['session', timestamp])
def save_session(self, session_name):
session = Session.save(session_name, sublime.windows())
serialize.dump(session_name, session)
def is_enabled(self):
windows = sublime.windows()
for window in windows:
if is_saveable(window):
return True
return False
def is_saveable(window):
return bool(window.views()) or bool(window.project_data())
|
670827e34872b40635d567ff1f5a34f132719fa8
|
app/flask_config.py
|
app/flask_config.py
|
"""
Config parameters for the Flask app itself.
Nothing here is user-configurable; all config variables you can set yourself are in config.py.
Generally speaking, don't touch this file unless you know what you're doing.
"""
import config
import constants
# Flask-SQLAlchemy
SQLALCHEMY_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.DATABASE_USER,
database_password=config.DATABASE_PASSWORD,
database_host=config.DATABASE_HOST,
database_name=config.DATABASE_NAME if config.BUILD_ENVIRONMENT == constants.build_environment.PROD else config.DATABASE_NAME + '_dev',
)
SQLALCHEMY_TEST_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.DATABASE_USER,
database_password=config.DATABASE_PASSWORD,
database_host=config.DATABASE_HOST,
database_name=config.DATABASE_NAME + '_test',
)
SQLALCHEMY_TRACK_MODIFICATIONS = False
# Flask session secret key
SECRET_KEY = config.FLASK_SECRET_KEY
|
"""
Config parameters for the Flask app itself.
Nothing here is user-configurable; all config variables you can set yourself are in config.py.
Generally speaking, don't touch this file unless you know what you're doing.
"""
import config
import constants
# Flask-SQLAlchemy
SQLALCHEMY_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.DATABASE_USER,
database_password=config.DATABASE_PASSWORD,
database_host=config.DATABASE_HOST,
database_name=config.DATABASE_NAME if config.BUILD_ENVIRONMENT == constants.build_environment.PROD else config.DATABASE_NAME + '_dev',
)
SQLALCHEMY_TEST_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.DATABASE_USER,
database_password=config.DATABASE_PASSWORD,
database_host=config.DATABASE_HOST,
database_name=config.DATABASE_NAME + '_test',
)
SQLALCHEMY_TRACK_MODIFICATIONS = False
# Flask session cookie name
SESSION_COOKIE_NAME = 'modern-paste-session'
# Flask session secret key
SECRET_KEY = config.FLASK_SECRET_KEY
|
Use more specific session cookie name
|
Use more specific session cookie name
|
Python
|
mit
|
LINKIWI/modern-paste,LINKIWI/modern-paste,LINKIWI/modern-paste
|
"""
Config parameters for the Flask app itself.
Nothing here is user-configurable; all config variables you can set yourself are in config.py.
Generally speaking, don't touch this file unless you know what you're doing.
"""
import config
import constants
# Flask-SQLAlchemy
SQLALCHEMY_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.DATABASE_USER,
database_password=config.DATABASE_PASSWORD,
database_host=config.DATABASE_HOST,
database_name=config.DATABASE_NAME if config.BUILD_ENVIRONMENT == constants.build_environment.PROD else config.DATABASE_NAME + '_dev',
)
SQLALCHEMY_TEST_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.DATABASE_USER,
database_password=config.DATABASE_PASSWORD,
database_host=config.DATABASE_HOST,
database_name=config.DATABASE_NAME + '_test',
)
SQLALCHEMY_TRACK_MODIFICATIONS = False
# Flask session secret key
SECRET_KEY = config.FLASK_SECRET_KEY
Use more specific session cookie name
|
"""
Config parameters for the Flask app itself.
Nothing here is user-configurable; all config variables you can set yourself are in config.py.
Generally speaking, don't touch this file unless you know what you're doing.
"""
import config
import constants
# Flask-SQLAlchemy
SQLALCHEMY_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.DATABASE_USER,
database_password=config.DATABASE_PASSWORD,
database_host=config.DATABASE_HOST,
database_name=config.DATABASE_NAME if config.BUILD_ENVIRONMENT == constants.build_environment.PROD else config.DATABASE_NAME + '_dev',
)
SQLALCHEMY_TEST_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.DATABASE_USER,
database_password=config.DATABASE_PASSWORD,
database_host=config.DATABASE_HOST,
database_name=config.DATABASE_NAME + '_test',
)
SQLALCHEMY_TRACK_MODIFICATIONS = False
# Flask session cookie name
SESSION_COOKIE_NAME = 'modern-paste-session'
# Flask session secret key
SECRET_KEY = config.FLASK_SECRET_KEY
|
<commit_before>"""
Config parameters for the Flask app itself.
Nothing here is user-configurable; all config variables you can set yourself are in config.py.
Generally speaking, don't touch this file unless you know what you're doing.
"""
import config
import constants
# Flask-SQLAlchemy
SQLALCHEMY_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.DATABASE_USER,
database_password=config.DATABASE_PASSWORD,
database_host=config.DATABASE_HOST,
database_name=config.DATABASE_NAME if config.BUILD_ENVIRONMENT == constants.build_environment.PROD else config.DATABASE_NAME + '_dev',
)
SQLALCHEMY_TEST_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.DATABASE_USER,
database_password=config.DATABASE_PASSWORD,
database_host=config.DATABASE_HOST,
database_name=config.DATABASE_NAME + '_test',
)
SQLALCHEMY_TRACK_MODIFICATIONS = False
# Flask session secret key
SECRET_KEY = config.FLASK_SECRET_KEY
<commit_msg>Use more specific session cookie name<commit_after>
|
"""
Config parameters for the Flask app itself.
Nothing here is user-configurable; all config variables you can set yourself are in config.py.
Generally speaking, don't touch this file unless you know what you're doing.
"""
import config
import constants
# Flask-SQLAlchemy
SQLALCHEMY_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.DATABASE_USER,
database_password=config.DATABASE_PASSWORD,
database_host=config.DATABASE_HOST,
database_name=config.DATABASE_NAME if config.BUILD_ENVIRONMENT == constants.build_environment.PROD else config.DATABASE_NAME + '_dev',
)
SQLALCHEMY_TEST_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.DATABASE_USER,
database_password=config.DATABASE_PASSWORD,
database_host=config.DATABASE_HOST,
database_name=config.DATABASE_NAME + '_test',
)
SQLALCHEMY_TRACK_MODIFICATIONS = False
# Flask session cookie name
SESSION_COOKIE_NAME = 'modern-paste-session'
# Flask session secret key
SECRET_KEY = config.FLASK_SECRET_KEY
|
"""
Config parameters for the Flask app itself.
Nothing here is user-configurable; all config variables you can set yourself are in config.py.
Generally speaking, don't touch this file unless you know what you're doing.
"""
import config
import constants
# Flask-SQLAlchemy
SQLALCHEMY_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.DATABASE_USER,
database_password=config.DATABASE_PASSWORD,
database_host=config.DATABASE_HOST,
database_name=config.DATABASE_NAME if config.BUILD_ENVIRONMENT == constants.build_environment.PROD else config.DATABASE_NAME + '_dev',
)
SQLALCHEMY_TEST_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.DATABASE_USER,
database_password=config.DATABASE_PASSWORD,
database_host=config.DATABASE_HOST,
database_name=config.DATABASE_NAME + '_test',
)
SQLALCHEMY_TRACK_MODIFICATIONS = False
# Flask session secret key
SECRET_KEY = config.FLASK_SECRET_KEY
Use more specific session cookie name"""
Config parameters for the Flask app itself.
Nothing here is user-configurable; all config variables you can set yourself are in config.py.
Generally speaking, don't touch this file unless you know what you're doing.
"""
import config
import constants
# Flask-SQLAlchemy
SQLALCHEMY_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.DATABASE_USER,
database_password=config.DATABASE_PASSWORD,
database_host=config.DATABASE_HOST,
database_name=config.DATABASE_NAME if config.BUILD_ENVIRONMENT == constants.build_environment.PROD else config.DATABASE_NAME + '_dev',
)
SQLALCHEMY_TEST_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.DATABASE_USER,
database_password=config.DATABASE_PASSWORD,
database_host=config.DATABASE_HOST,
database_name=config.DATABASE_NAME + '_test',
)
SQLALCHEMY_TRACK_MODIFICATIONS = False
# Flask session cookie name
SESSION_COOKIE_NAME = 'modern-paste-session'
# Flask session secret key
SECRET_KEY = config.FLASK_SECRET_KEY
|
<commit_before>"""
Config parameters for the Flask app itself.
Nothing here is user-configurable; all config variables you can set yourself are in config.py.
Generally speaking, don't touch this file unless you know what you're doing.
"""
import config
import constants
# Flask-SQLAlchemy
SQLALCHEMY_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.DATABASE_USER,
database_password=config.DATABASE_PASSWORD,
database_host=config.DATABASE_HOST,
database_name=config.DATABASE_NAME if config.BUILD_ENVIRONMENT == constants.build_environment.PROD else config.DATABASE_NAME + '_dev',
)
SQLALCHEMY_TEST_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.DATABASE_USER,
database_password=config.DATABASE_PASSWORD,
database_host=config.DATABASE_HOST,
database_name=config.DATABASE_NAME + '_test',
)
SQLALCHEMY_TRACK_MODIFICATIONS = False
# Flask session secret key
SECRET_KEY = config.FLASK_SECRET_KEY
<commit_msg>Use more specific session cookie name<commit_after>"""
Config parameters for the Flask app itself.
Nothing here is user-configurable; all config variables you can set yourself are in config.py.
Generally speaking, don't touch this file unless you know what you're doing.
"""
import config
import constants
# Flask-SQLAlchemy
SQLALCHEMY_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.DATABASE_USER,
database_password=config.DATABASE_PASSWORD,
database_host=config.DATABASE_HOST,
database_name=config.DATABASE_NAME if config.BUILD_ENVIRONMENT == constants.build_environment.PROD else config.DATABASE_NAME + '_dev',
)
SQLALCHEMY_TEST_DATABASE_URI = 'mysql://{database_user}:{database_password}@{database_host}/{database_name}'.format(
database_user=config.DATABASE_USER,
database_password=config.DATABASE_PASSWORD,
database_host=config.DATABASE_HOST,
database_name=config.DATABASE_NAME + '_test',
)
SQLALCHEMY_TRACK_MODIFICATIONS = False
# Flask session cookie name
SESSION_COOKIE_NAME = 'modern-paste-session'
# Flask session secret key
SECRET_KEY = config.FLASK_SECRET_KEY
|
118523251af8861d20b92ce754b48e9911f100c7
|
odsimport.py
|
odsimport.py
|
from odf.opendocument import load
from odf.table import Table, TableRow, TableCell
from odf.text import P
def import_ods(path):
doc = load(path)
db = {}
tables = doc.spreadsheet.getElementsByType(Table)
for table in tables:
db_table = []
db[table.getAttribute('name')] = db_table
for row in table.getElementsByType(TableRow):
db_row = []
db_table.append(db_row)
for cell in row.getElementsByType(TableCell):
db_value = '\n'.join(map(str, cell.getElementsByType(P)))
try:
db_value = float(db_value)
except:
pass
db_row.append(db_value)
return db
|
from odf.opendocument import load
from odf.table import Table, TableRow, TableCell
from odf.namespaces import TABLENS
from odf.text import P
def import_ods(path):
doc = load(path)
db = {}
tables = doc.spreadsheet.getElementsByType(Table)
for table in tables:
db_table = []
db[table.getAttribute('name')] = db_table
for row in table.getElementsByType(TableRow):
db_row = []
db_table.append(db_row)
for cell in row.getElementsByType(TableCell):
db_value = '\n'.join(map(str, cell.getElementsByType(P)))
try:
db_value = float(db_value)
except:
pass
try:
repeat_count = int(cell.getAttribute('numbercolumnsrepeated'))
except:
repeat_count = 1
if not cell.nextSibling:
repeat_count = 1
for i in range(repeat_count):
db_row.append(db_value)
return db
|
Fix ods-import for column repeat
|
Fix ods-import for column repeat
|
Python
|
bsd-2-clause
|
aholkner/PoliticalRPG,aholkner/PoliticalRPG
|
from odf.opendocument import load
from odf.table import Table, TableRow, TableCell
from odf.text import P
def import_ods(path):
doc = load(path)
db = {}
tables = doc.spreadsheet.getElementsByType(Table)
for table in tables:
db_table = []
db[table.getAttribute('name')] = db_table
for row in table.getElementsByType(TableRow):
db_row = []
db_table.append(db_row)
for cell in row.getElementsByType(TableCell):
db_value = '\n'.join(map(str, cell.getElementsByType(P)))
try:
db_value = float(db_value)
except:
pass
db_row.append(db_value)
return db
Fix ods-import for column repeat
|
from odf.opendocument import load
from odf.table import Table, TableRow, TableCell
from odf.namespaces import TABLENS
from odf.text import P
def import_ods(path):
doc = load(path)
db = {}
tables = doc.spreadsheet.getElementsByType(Table)
for table in tables:
db_table = []
db[table.getAttribute('name')] = db_table
for row in table.getElementsByType(TableRow):
db_row = []
db_table.append(db_row)
for cell in row.getElementsByType(TableCell):
db_value = '\n'.join(map(str, cell.getElementsByType(P)))
try:
db_value = float(db_value)
except:
pass
try:
repeat_count = int(cell.getAttribute('numbercolumnsrepeated'))
except:
repeat_count = 1
if not cell.nextSibling:
repeat_count = 1
for i in range(repeat_count):
db_row.append(db_value)
return db
|
<commit_before>from odf.opendocument import load
from odf.table import Table, TableRow, TableCell
from odf.text import P
def import_ods(path):
doc = load(path)
db = {}
tables = doc.spreadsheet.getElementsByType(Table)
for table in tables:
db_table = []
db[table.getAttribute('name')] = db_table
for row in table.getElementsByType(TableRow):
db_row = []
db_table.append(db_row)
for cell in row.getElementsByType(TableCell):
db_value = '\n'.join(map(str, cell.getElementsByType(P)))
try:
db_value = float(db_value)
except:
pass
db_row.append(db_value)
return db
<commit_msg>Fix ods-import for column repeat<commit_after>
|
from odf.opendocument import load
from odf.table import Table, TableRow, TableCell
from odf.namespaces import TABLENS
from odf.text import P
def import_ods(path):
doc = load(path)
db = {}
tables = doc.spreadsheet.getElementsByType(Table)
for table in tables:
db_table = []
db[table.getAttribute('name')] = db_table
for row in table.getElementsByType(TableRow):
db_row = []
db_table.append(db_row)
for cell in row.getElementsByType(TableCell):
db_value = '\n'.join(map(str, cell.getElementsByType(P)))
try:
db_value = float(db_value)
except:
pass
try:
repeat_count = int(cell.getAttribute('numbercolumnsrepeated'))
except:
repeat_count = 1
if not cell.nextSibling:
repeat_count = 1
for i in range(repeat_count):
db_row.append(db_value)
return db
|
from odf.opendocument import load
from odf.table import Table, TableRow, TableCell
from odf.text import P
def import_ods(path):
doc = load(path)
db = {}
tables = doc.spreadsheet.getElementsByType(Table)
for table in tables:
db_table = []
db[table.getAttribute('name')] = db_table
for row in table.getElementsByType(TableRow):
db_row = []
db_table.append(db_row)
for cell in row.getElementsByType(TableCell):
db_value = '\n'.join(map(str, cell.getElementsByType(P)))
try:
db_value = float(db_value)
except:
pass
db_row.append(db_value)
return db
Fix ods-import for column repeatfrom odf.opendocument import load
from odf.table import Table, TableRow, TableCell
from odf.namespaces import TABLENS
from odf.text import P
def import_ods(path):
doc = load(path)
db = {}
tables = doc.spreadsheet.getElementsByType(Table)
for table in tables:
db_table = []
db[table.getAttribute('name')] = db_table
for row in table.getElementsByType(TableRow):
db_row = []
db_table.append(db_row)
for cell in row.getElementsByType(TableCell):
db_value = '\n'.join(map(str, cell.getElementsByType(P)))
try:
db_value = float(db_value)
except:
pass
try:
repeat_count = int(cell.getAttribute('numbercolumnsrepeated'))
except:
repeat_count = 1
if not cell.nextSibling:
repeat_count = 1
for i in range(repeat_count):
db_row.append(db_value)
return db
|
<commit_before>from odf.opendocument import load
from odf.table import Table, TableRow, TableCell
from odf.text import P
def import_ods(path):
doc = load(path)
db = {}
tables = doc.spreadsheet.getElementsByType(Table)
for table in tables:
db_table = []
db[table.getAttribute('name')] = db_table
for row in table.getElementsByType(TableRow):
db_row = []
db_table.append(db_row)
for cell in row.getElementsByType(TableCell):
db_value = '\n'.join(map(str, cell.getElementsByType(P)))
try:
db_value = float(db_value)
except:
pass
db_row.append(db_value)
return db
<commit_msg>Fix ods-import for column repeat<commit_after>from odf.opendocument import load
from odf.table import Table, TableRow, TableCell
from odf.namespaces import TABLENS
from odf.text import P
def import_ods(path):
doc = load(path)
db = {}
tables = doc.spreadsheet.getElementsByType(Table)
for table in tables:
db_table = []
db[table.getAttribute('name')] = db_table
for row in table.getElementsByType(TableRow):
db_row = []
db_table.append(db_row)
for cell in row.getElementsByType(TableCell):
db_value = '\n'.join(map(str, cell.getElementsByType(P)))
try:
db_value = float(db_value)
except:
pass
try:
repeat_count = int(cell.getAttribute('numbercolumnsrepeated'))
except:
repeat_count = 1
if not cell.nextSibling:
repeat_count = 1
for i in range(repeat_count):
db_row.append(db_value)
return db
|
52e5c8ebb74622d2cb2a378f98563cb615115a21
|
uptick/tools.py
|
uptick/tools.py
|
# -*- coding: utf-8 -*-
import click
from .decorators import online
from .main import main, config
from .ui import print_table, print_tx
@main.group()
def tools():
""" Further tools
"""
pass
@tools.command()
@click.pass_context
@online
@click.argument("account")
def getcloudloginkey(ctx, account):
""" Return keys for cloudlogin
"""
from bitsharesbase.account import PasswordKey
password = click.prompt("Passphrase", hide_input=True).strip()
t = [["role", "wif", "pubkey", "accounts"]]
for role in ["owner", "active", "memo"]:
wif = PasswordKey(account, password, role=role)
pubkey = format(wif.get_public_key(), ctx.bitshares.rpc.chain_params["prefix"])
t.append(
[
role,
str(wif.get_private_key()),
pubkey,
ctx.bitshares.wallet.getAccountFromPublicKey(pubkey) or "",
]
)
print_table(t)
|
# -*- coding: utf-8 -*-
import click
from .decorators import online
from .main import main, config
from .ui import print_table, print_tx
@main.group()
def tools():
""" Further tools
"""
pass
@tools.command()
@click.pass_context
@online
@click.argument("account")
def getcloudloginkey(ctx, account):
""" Return keys for cloudlogin
"""
from bitsharesbase.account import PasswordKey
password = click.prompt("Passphrase", hide_input=True).strip()
t = [["role", "wif", "pubkey", "accounts"]]
for role in ["owner", "active", "memo"]:
wif = PasswordKey(account, password, role=role)
pubkey = format(wif.get_public_key(), ctx.bitshares.rpc.chain_params["prefix"])
t.append(
[
role,
str(wif.get_private_key()),
pubkey,
ctx.bitshares.wallet.getAccountFromPublicKey(pubkey) or "",
]
)
print_table(t)
@tools.command()
@click.pass_context
@online
@click.option("--limit", default=10, type=int)
def getbrainkeys(ctx, limit):
""" Return keys for cloudlogin
"""
from bitsharesbase.account import BrainKey
password = click.prompt("Passphrase", hide_input=True).strip()
t = [["index", "wif", "pubkey", "accounts"]]
wif = BrainKey(password)
for i in range(limit):
pubkey = format(wif.get_public_key(), ctx.bitshares.rpc.chain_params["prefix"])
t.append(
[
i,
str(wif.get_private_key()),
pubkey,
ctx.bitshares.wallet.getAccountFromPublicKey(pubkey) or "",
]
)
next(wif)
print_table(t)
|
Allow to manually generate brainkeys
|
Allow to manually generate brainkeys
|
Python
|
mit
|
xeroc/uptick
|
# -*- coding: utf-8 -*-
import click
from .decorators import online
from .main import main, config
from .ui import print_table, print_tx
@main.group()
def tools():
""" Further tools
"""
pass
@tools.command()
@click.pass_context
@online
@click.argument("account")
def getcloudloginkey(ctx, account):
""" Return keys for cloudlogin
"""
from bitsharesbase.account import PasswordKey
password = click.prompt("Passphrase", hide_input=True).strip()
t = [["role", "wif", "pubkey", "accounts"]]
for role in ["owner", "active", "memo"]:
wif = PasswordKey(account, password, role=role)
pubkey = format(wif.get_public_key(), ctx.bitshares.rpc.chain_params["prefix"])
t.append(
[
role,
str(wif.get_private_key()),
pubkey,
ctx.bitshares.wallet.getAccountFromPublicKey(pubkey) or "",
]
)
print_table(t)
Allow to manually generate brainkeys
|
# -*- coding: utf-8 -*-
import click
from .decorators import online
from .main import main, config
from .ui import print_table, print_tx
@main.group()
def tools():
""" Further tools
"""
pass
@tools.command()
@click.pass_context
@online
@click.argument("account")
def getcloudloginkey(ctx, account):
""" Return keys for cloudlogin
"""
from bitsharesbase.account import PasswordKey
password = click.prompt("Passphrase", hide_input=True).strip()
t = [["role", "wif", "pubkey", "accounts"]]
for role in ["owner", "active", "memo"]:
wif = PasswordKey(account, password, role=role)
pubkey = format(wif.get_public_key(), ctx.bitshares.rpc.chain_params["prefix"])
t.append(
[
role,
str(wif.get_private_key()),
pubkey,
ctx.bitshares.wallet.getAccountFromPublicKey(pubkey) or "",
]
)
print_table(t)
@tools.command()
@click.pass_context
@online
@click.option("--limit", default=10, type=int)
def getbrainkeys(ctx, limit):
""" Return keys for cloudlogin
"""
from bitsharesbase.account import BrainKey
password = click.prompt("Passphrase", hide_input=True).strip()
t = [["index", "wif", "pubkey", "accounts"]]
wif = BrainKey(password)
for i in range(limit):
pubkey = format(wif.get_public_key(), ctx.bitshares.rpc.chain_params["prefix"])
t.append(
[
i,
str(wif.get_private_key()),
pubkey,
ctx.bitshares.wallet.getAccountFromPublicKey(pubkey) or "",
]
)
next(wif)
print_table(t)
|
<commit_before># -*- coding: utf-8 -*-
import click
from .decorators import online
from .main import main, config
from .ui import print_table, print_tx
@main.group()
def tools():
""" Further tools
"""
pass
@tools.command()
@click.pass_context
@online
@click.argument("account")
def getcloudloginkey(ctx, account):
""" Return keys for cloudlogin
"""
from bitsharesbase.account import PasswordKey
password = click.prompt("Passphrase", hide_input=True).strip()
t = [["role", "wif", "pubkey", "accounts"]]
for role in ["owner", "active", "memo"]:
wif = PasswordKey(account, password, role=role)
pubkey = format(wif.get_public_key(), ctx.bitshares.rpc.chain_params["prefix"])
t.append(
[
role,
str(wif.get_private_key()),
pubkey,
ctx.bitshares.wallet.getAccountFromPublicKey(pubkey) or "",
]
)
print_table(t)
<commit_msg>Allow to manually generate brainkeys<commit_after>
|
# -*- coding: utf-8 -*-
import click
from .decorators import online
from .main import main, config
from .ui import print_table, print_tx
@main.group()
def tools():
""" Further tools
"""
pass
@tools.command()
@click.pass_context
@online
@click.argument("account")
def getcloudloginkey(ctx, account):
""" Return keys for cloudlogin
"""
from bitsharesbase.account import PasswordKey
password = click.prompt("Passphrase", hide_input=True).strip()
t = [["role", "wif", "pubkey", "accounts"]]
for role in ["owner", "active", "memo"]:
wif = PasswordKey(account, password, role=role)
pubkey = format(wif.get_public_key(), ctx.bitshares.rpc.chain_params["prefix"])
t.append(
[
role,
str(wif.get_private_key()),
pubkey,
ctx.bitshares.wallet.getAccountFromPublicKey(pubkey) or "",
]
)
print_table(t)
@tools.command()
@click.pass_context
@online
@click.option("--limit", default=10, type=int)
def getbrainkeys(ctx, limit):
""" Return keys for cloudlogin
"""
from bitsharesbase.account import BrainKey
password = click.prompt("Passphrase", hide_input=True).strip()
t = [["index", "wif", "pubkey", "accounts"]]
wif = BrainKey(password)
for i in range(limit):
pubkey = format(wif.get_public_key(), ctx.bitshares.rpc.chain_params["prefix"])
t.append(
[
i,
str(wif.get_private_key()),
pubkey,
ctx.bitshares.wallet.getAccountFromPublicKey(pubkey) or "",
]
)
next(wif)
print_table(t)
|
# -*- coding: utf-8 -*-
import click
from .decorators import online
from .main import main, config
from .ui import print_table, print_tx
@main.group()
def tools():
""" Further tools
"""
pass
@tools.command()
@click.pass_context
@online
@click.argument("account")
def getcloudloginkey(ctx, account):
""" Return keys for cloudlogin
"""
from bitsharesbase.account import PasswordKey
password = click.prompt("Passphrase", hide_input=True).strip()
t = [["role", "wif", "pubkey", "accounts"]]
for role in ["owner", "active", "memo"]:
wif = PasswordKey(account, password, role=role)
pubkey = format(wif.get_public_key(), ctx.bitshares.rpc.chain_params["prefix"])
t.append(
[
role,
str(wif.get_private_key()),
pubkey,
ctx.bitshares.wallet.getAccountFromPublicKey(pubkey) or "",
]
)
print_table(t)
Allow to manually generate brainkeys# -*- coding: utf-8 -*-
import click
from .decorators import online
from .main import main, config
from .ui import print_table, print_tx
@main.group()
def tools():
""" Further tools
"""
pass
@tools.command()
@click.pass_context
@online
@click.argument("account")
def getcloudloginkey(ctx, account):
""" Return keys for cloudlogin
"""
from bitsharesbase.account import PasswordKey
password = click.prompt("Passphrase", hide_input=True).strip()
t = [["role", "wif", "pubkey", "accounts"]]
for role in ["owner", "active", "memo"]:
wif = PasswordKey(account, password, role=role)
pubkey = format(wif.get_public_key(), ctx.bitshares.rpc.chain_params["prefix"])
t.append(
[
role,
str(wif.get_private_key()),
pubkey,
ctx.bitshares.wallet.getAccountFromPublicKey(pubkey) or "",
]
)
print_table(t)
@tools.command()
@click.pass_context
@online
@click.option("--limit", default=10, type=int)
def getbrainkeys(ctx, limit):
""" Return keys for cloudlogin
"""
from bitsharesbase.account import BrainKey
password = click.prompt("Passphrase", hide_input=True).strip()
t = [["index", "wif", "pubkey", "accounts"]]
wif = BrainKey(password)
for i in range(limit):
pubkey = format(wif.get_public_key(), ctx.bitshares.rpc.chain_params["prefix"])
t.append(
[
i,
str(wif.get_private_key()),
pubkey,
ctx.bitshares.wallet.getAccountFromPublicKey(pubkey) or "",
]
)
next(wif)
print_table(t)
|
<commit_before># -*- coding: utf-8 -*-
import click
from .decorators import online
from .main import main, config
from .ui import print_table, print_tx
@main.group()
def tools():
""" Further tools
"""
pass
@tools.command()
@click.pass_context
@online
@click.argument("account")
def getcloudloginkey(ctx, account):
""" Return keys for cloudlogin
"""
from bitsharesbase.account import PasswordKey
password = click.prompt("Passphrase", hide_input=True).strip()
t = [["role", "wif", "pubkey", "accounts"]]
for role in ["owner", "active", "memo"]:
wif = PasswordKey(account, password, role=role)
pubkey = format(wif.get_public_key(), ctx.bitshares.rpc.chain_params["prefix"])
t.append(
[
role,
str(wif.get_private_key()),
pubkey,
ctx.bitshares.wallet.getAccountFromPublicKey(pubkey) or "",
]
)
print_table(t)
<commit_msg>Allow to manually generate brainkeys<commit_after># -*- coding: utf-8 -*-
import click
from .decorators import online
from .main import main, config
from .ui import print_table, print_tx
@main.group()
def tools():
""" Further tools
"""
pass
@tools.command()
@click.pass_context
@online
@click.argument("account")
def getcloudloginkey(ctx, account):
""" Return keys for cloudlogin
"""
from bitsharesbase.account import PasswordKey
password = click.prompt("Passphrase", hide_input=True).strip()
t = [["role", "wif", "pubkey", "accounts"]]
for role in ["owner", "active", "memo"]:
wif = PasswordKey(account, password, role=role)
pubkey = format(wif.get_public_key(), ctx.bitshares.rpc.chain_params["prefix"])
t.append(
[
role,
str(wif.get_private_key()),
pubkey,
ctx.bitshares.wallet.getAccountFromPublicKey(pubkey) or "",
]
)
print_table(t)
@tools.command()
@click.pass_context
@online
@click.option("--limit", default=10, type=int)
def getbrainkeys(ctx, limit):
""" Return keys for cloudlogin
"""
from bitsharesbase.account import BrainKey
password = click.prompt("Passphrase", hide_input=True).strip()
t = [["index", "wif", "pubkey", "accounts"]]
wif = BrainKey(password)
for i in range(limit):
pubkey = format(wif.get_public_key(), ctx.bitshares.rpc.chain_params["prefix"])
t.append(
[
i,
str(wif.get_private_key()),
pubkey,
ctx.bitshares.wallet.getAccountFromPublicKey(pubkey) or "",
]
)
next(wif)
print_table(t)
|
d335fce6cea07df872d8cd7d70c6c3fea348e521
|
tests/__init__.py
|
tests/__init__.py
|
import os.path
import unittest
def get_tests():
start_dir = os.path.dirname(__file__)
return unittest.TestLoader().discover(start_dir, pattern="*.py")
|
import os.path
import unittest
def get_tests():
return full_suite()
def full_suite():
from .resource import ResourceTestCase
from .serializer import ResourceTestCase as SerializerTestCase
from .utils import UtilsTestCase
resourcesuite = unittest.TestLoader().loadTestsFromTestCase(ResourceTestCase)
serializersuite = unittest.TestLoader().loadTestsFromTestCase(SerializerTestCase)
utilssuite = unittest.TestLoader().loadTestsFromTestCase(UtilsTestCase)
return unittest.TestSuite([resourcesuite, serializersuite, utilssuite])
|
Update get_tests to be backwards compatible with Python 2.6, since the library is compatible it seems worth this extra effort to test against it.
|
Update get_tests to be backwards compatible with Python 2.6, since the library is compatible it seems worth this extra effort to test against it.
|
Python
|
bsd-2-clause
|
jannon/slumber,IAlwaysBeCoding/More,zongxiao/slumber,infoxchange/slumber,futurice/slumber,IAlwaysBeCoding/slumber,samgiles/slumber,s-block/slumber,ministryofjustice/slumber
|
import os.path
import unittest
def get_tests():
start_dir = os.path.dirname(__file__)
return unittest.TestLoader().discover(start_dir, pattern="*.py")
Update get_tests to be backwards compatible with Python 2.6, since the library is compatible it seems worth this extra effort to test against it.
|
import os.path
import unittest
def get_tests():
return full_suite()
def full_suite():
from .resource import ResourceTestCase
from .serializer import ResourceTestCase as SerializerTestCase
from .utils import UtilsTestCase
resourcesuite = unittest.TestLoader().loadTestsFromTestCase(ResourceTestCase)
serializersuite = unittest.TestLoader().loadTestsFromTestCase(SerializerTestCase)
utilssuite = unittest.TestLoader().loadTestsFromTestCase(UtilsTestCase)
return unittest.TestSuite([resourcesuite, serializersuite, utilssuite])
|
<commit_before>import os.path
import unittest
def get_tests():
start_dir = os.path.dirname(__file__)
return unittest.TestLoader().discover(start_dir, pattern="*.py")
<commit_msg>Update get_tests to be backwards compatible with Python 2.6, since the library is compatible it seems worth this extra effort to test against it.<commit_after>
|
import os.path
import unittest
def get_tests():
return full_suite()
def full_suite():
from .resource import ResourceTestCase
from .serializer import ResourceTestCase as SerializerTestCase
from .utils import UtilsTestCase
resourcesuite = unittest.TestLoader().loadTestsFromTestCase(ResourceTestCase)
serializersuite = unittest.TestLoader().loadTestsFromTestCase(SerializerTestCase)
utilssuite = unittest.TestLoader().loadTestsFromTestCase(UtilsTestCase)
return unittest.TestSuite([resourcesuite, serializersuite, utilssuite])
|
import os.path
import unittest
def get_tests():
start_dir = os.path.dirname(__file__)
return unittest.TestLoader().discover(start_dir, pattern="*.py")
Update get_tests to be backwards compatible with Python 2.6, since the library is compatible it seems worth this extra effort to test against it.import os.path
import unittest
def get_tests():
return full_suite()
def full_suite():
from .resource import ResourceTestCase
from .serializer import ResourceTestCase as SerializerTestCase
from .utils import UtilsTestCase
resourcesuite = unittest.TestLoader().loadTestsFromTestCase(ResourceTestCase)
serializersuite = unittest.TestLoader().loadTestsFromTestCase(SerializerTestCase)
utilssuite = unittest.TestLoader().loadTestsFromTestCase(UtilsTestCase)
return unittest.TestSuite([resourcesuite, serializersuite, utilssuite])
|
<commit_before>import os.path
import unittest
def get_tests():
start_dir = os.path.dirname(__file__)
return unittest.TestLoader().discover(start_dir, pattern="*.py")
<commit_msg>Update get_tests to be backwards compatible with Python 2.6, since the library is compatible it seems worth this extra effort to test against it.<commit_after>import os.path
import unittest
def get_tests():
return full_suite()
def full_suite():
from .resource import ResourceTestCase
from .serializer import ResourceTestCase as SerializerTestCase
from .utils import UtilsTestCase
resourcesuite = unittest.TestLoader().loadTestsFromTestCase(ResourceTestCase)
serializersuite = unittest.TestLoader().loadTestsFromTestCase(SerializerTestCase)
utilssuite = unittest.TestLoader().loadTestsFromTestCase(UtilsTestCase)
return unittest.TestSuite([resourcesuite, serializersuite, utilssuite])
|
365206a49adf81b8618206912001ff744b54f405
|
tests/conftest.py
|
tests/conftest.py
|
# coding: utf-8
import os
import pytest
from dynaconf import LazySettings
@pytest.fixture(scope='module')
def settings():
"""Settings fixture with some defaults"""
mode = 'TRAVIS' if os.environ.get('TRAVIS') else 'TEST'
os.environ['DYNA%s_HOSTNAME' % mode] = 'host.com'
os.environ['DYNA%s_PORT' % mode] = '@int 5000'
os.environ['DYNA%s_VALUE' % mode] = '@float 42.1'
os.environ['DYNA%s_ALIST' % mode] = '@json ["item1", "item2", "item3"]'
os.environ['DYNA%s_ADICT' % mode] = '@json {"key": "value"}'
os.environ['DYNA%s_DEBUG' % mode] = '@bool true'
os.environ['DYNA%s_TODELETE' % mode] = '@bool true'
os.environ['PROJECT1_HOSTNAME'] = 'otherhost.com'
sets = LazySettings(
LOADERS_FOR_DYNACONF=[
'dynaconf.loaders.env_loader',
'dynaconf.loaders.redis_loader'
],
DYNACONF_NAMESPACE="DYNA%s" % mode
)
sets.SIMPLE_BOOL = False
sets.configure()
return sets
|
# coding: utf-8
import os
import pytest
from dynaconf.base import LazySettings
@pytest.fixture(scope='module')
def settings():
"""Settings fixture with some defaults"""
mode = 'TRAVIS' if os.environ.get('TRAVIS') else 'TEST'
os.environ['DYNA%s_HOSTNAME' % mode] = 'host.com'
os.environ['DYNA%s_PORT' % mode] = '@int 5000'
os.environ['DYNA%s_VALUE' % mode] = '@float 42.1'
os.environ['DYNA%s_ALIST' % mode] = '@json ["item1", "item2", "item3"]'
os.environ['DYNA%s_ADICT' % mode] = '@json {"key": "value"}'
os.environ['DYNA%s_DEBUG' % mode] = '@bool true'
os.environ['DYNA%s_TODELETE' % mode] = '@bool true'
os.environ['PROJECT1_HOSTNAME'] = 'otherhost.com'
sets = LazySettings(
LOADERS_FOR_DYNACONF=[
'dynaconf.loaders.env_loader',
'dynaconf.loaders.redis_loader'
],
DYNACONF_NAMESPACE="DYNA%s" % mode
)
sets.SIMPLE_BOOL = False
sets.configure()
return sets
|
Make it clear where we are getting LazySettings from
|
Make it clear where we are getting LazySettings from
|
Python
|
mit
|
rochacbruno/dynaconf,rochacbruno/dynaconf,rochacbruno/dynaconf
|
# coding: utf-8
import os
import pytest
from dynaconf import LazySettings
@pytest.fixture(scope='module')
def settings():
"""Settings fixture with some defaults"""
mode = 'TRAVIS' if os.environ.get('TRAVIS') else 'TEST'
os.environ['DYNA%s_HOSTNAME' % mode] = 'host.com'
os.environ['DYNA%s_PORT' % mode] = '@int 5000'
os.environ['DYNA%s_VALUE' % mode] = '@float 42.1'
os.environ['DYNA%s_ALIST' % mode] = '@json ["item1", "item2", "item3"]'
os.environ['DYNA%s_ADICT' % mode] = '@json {"key": "value"}'
os.environ['DYNA%s_DEBUG' % mode] = '@bool true'
os.environ['DYNA%s_TODELETE' % mode] = '@bool true'
os.environ['PROJECT1_HOSTNAME'] = 'otherhost.com'
sets = LazySettings(
LOADERS_FOR_DYNACONF=[
'dynaconf.loaders.env_loader',
'dynaconf.loaders.redis_loader'
],
DYNACONF_NAMESPACE="DYNA%s" % mode
)
sets.SIMPLE_BOOL = False
sets.configure()
return sets
Make it clear where we are getting LazySettings from
|
# coding: utf-8
import os
import pytest
from dynaconf.base import LazySettings
@pytest.fixture(scope='module')
def settings():
"""Settings fixture with some defaults"""
mode = 'TRAVIS' if os.environ.get('TRAVIS') else 'TEST'
os.environ['DYNA%s_HOSTNAME' % mode] = 'host.com'
os.environ['DYNA%s_PORT' % mode] = '@int 5000'
os.environ['DYNA%s_VALUE' % mode] = '@float 42.1'
os.environ['DYNA%s_ALIST' % mode] = '@json ["item1", "item2", "item3"]'
os.environ['DYNA%s_ADICT' % mode] = '@json {"key": "value"}'
os.environ['DYNA%s_DEBUG' % mode] = '@bool true'
os.environ['DYNA%s_TODELETE' % mode] = '@bool true'
os.environ['PROJECT1_HOSTNAME'] = 'otherhost.com'
sets = LazySettings(
LOADERS_FOR_DYNACONF=[
'dynaconf.loaders.env_loader',
'dynaconf.loaders.redis_loader'
],
DYNACONF_NAMESPACE="DYNA%s" % mode
)
sets.SIMPLE_BOOL = False
sets.configure()
return sets
|
<commit_before># coding: utf-8
import os
import pytest
from dynaconf import LazySettings
@pytest.fixture(scope='module')
def settings():
"""Settings fixture with some defaults"""
mode = 'TRAVIS' if os.environ.get('TRAVIS') else 'TEST'
os.environ['DYNA%s_HOSTNAME' % mode] = 'host.com'
os.environ['DYNA%s_PORT' % mode] = '@int 5000'
os.environ['DYNA%s_VALUE' % mode] = '@float 42.1'
os.environ['DYNA%s_ALIST' % mode] = '@json ["item1", "item2", "item3"]'
os.environ['DYNA%s_ADICT' % mode] = '@json {"key": "value"}'
os.environ['DYNA%s_DEBUG' % mode] = '@bool true'
os.environ['DYNA%s_TODELETE' % mode] = '@bool true'
os.environ['PROJECT1_HOSTNAME'] = 'otherhost.com'
sets = LazySettings(
LOADERS_FOR_DYNACONF=[
'dynaconf.loaders.env_loader',
'dynaconf.loaders.redis_loader'
],
DYNACONF_NAMESPACE="DYNA%s" % mode
)
sets.SIMPLE_BOOL = False
sets.configure()
return sets
<commit_msg>Make it clear where we are getting LazySettings from<commit_after>
|
# coding: utf-8
import os
import pytest
from dynaconf.base import LazySettings
@pytest.fixture(scope='module')
def settings():
"""Settings fixture with some defaults"""
mode = 'TRAVIS' if os.environ.get('TRAVIS') else 'TEST'
os.environ['DYNA%s_HOSTNAME' % mode] = 'host.com'
os.environ['DYNA%s_PORT' % mode] = '@int 5000'
os.environ['DYNA%s_VALUE' % mode] = '@float 42.1'
os.environ['DYNA%s_ALIST' % mode] = '@json ["item1", "item2", "item3"]'
os.environ['DYNA%s_ADICT' % mode] = '@json {"key": "value"}'
os.environ['DYNA%s_DEBUG' % mode] = '@bool true'
os.environ['DYNA%s_TODELETE' % mode] = '@bool true'
os.environ['PROJECT1_HOSTNAME'] = 'otherhost.com'
sets = LazySettings(
LOADERS_FOR_DYNACONF=[
'dynaconf.loaders.env_loader',
'dynaconf.loaders.redis_loader'
],
DYNACONF_NAMESPACE="DYNA%s" % mode
)
sets.SIMPLE_BOOL = False
sets.configure()
return sets
|
# coding: utf-8
import os
import pytest
from dynaconf import LazySettings
@pytest.fixture(scope='module')
def settings():
"""Settings fixture with some defaults"""
mode = 'TRAVIS' if os.environ.get('TRAVIS') else 'TEST'
os.environ['DYNA%s_HOSTNAME' % mode] = 'host.com'
os.environ['DYNA%s_PORT' % mode] = '@int 5000'
os.environ['DYNA%s_VALUE' % mode] = '@float 42.1'
os.environ['DYNA%s_ALIST' % mode] = '@json ["item1", "item2", "item3"]'
os.environ['DYNA%s_ADICT' % mode] = '@json {"key": "value"}'
os.environ['DYNA%s_DEBUG' % mode] = '@bool true'
os.environ['DYNA%s_TODELETE' % mode] = '@bool true'
os.environ['PROJECT1_HOSTNAME'] = 'otherhost.com'
sets = LazySettings(
LOADERS_FOR_DYNACONF=[
'dynaconf.loaders.env_loader',
'dynaconf.loaders.redis_loader'
],
DYNACONF_NAMESPACE="DYNA%s" % mode
)
sets.SIMPLE_BOOL = False
sets.configure()
return sets
Make it clear where we are getting LazySettings from# coding: utf-8
import os
import pytest
from dynaconf.base import LazySettings
@pytest.fixture(scope='module')
def settings():
"""Settings fixture with some defaults"""
mode = 'TRAVIS' if os.environ.get('TRAVIS') else 'TEST'
os.environ['DYNA%s_HOSTNAME' % mode] = 'host.com'
os.environ['DYNA%s_PORT' % mode] = '@int 5000'
os.environ['DYNA%s_VALUE' % mode] = '@float 42.1'
os.environ['DYNA%s_ALIST' % mode] = '@json ["item1", "item2", "item3"]'
os.environ['DYNA%s_ADICT' % mode] = '@json {"key": "value"}'
os.environ['DYNA%s_DEBUG' % mode] = '@bool true'
os.environ['DYNA%s_TODELETE' % mode] = '@bool true'
os.environ['PROJECT1_HOSTNAME'] = 'otherhost.com'
sets = LazySettings(
LOADERS_FOR_DYNACONF=[
'dynaconf.loaders.env_loader',
'dynaconf.loaders.redis_loader'
],
DYNACONF_NAMESPACE="DYNA%s" % mode
)
sets.SIMPLE_BOOL = False
sets.configure()
return sets
|
<commit_before># coding: utf-8
import os
import pytest
from dynaconf import LazySettings
@pytest.fixture(scope='module')
def settings():
"""Settings fixture with some defaults"""
mode = 'TRAVIS' if os.environ.get('TRAVIS') else 'TEST'
os.environ['DYNA%s_HOSTNAME' % mode] = 'host.com'
os.environ['DYNA%s_PORT' % mode] = '@int 5000'
os.environ['DYNA%s_VALUE' % mode] = '@float 42.1'
os.environ['DYNA%s_ALIST' % mode] = '@json ["item1", "item2", "item3"]'
os.environ['DYNA%s_ADICT' % mode] = '@json {"key": "value"}'
os.environ['DYNA%s_DEBUG' % mode] = '@bool true'
os.environ['DYNA%s_TODELETE' % mode] = '@bool true'
os.environ['PROJECT1_HOSTNAME'] = 'otherhost.com'
sets = LazySettings(
LOADERS_FOR_DYNACONF=[
'dynaconf.loaders.env_loader',
'dynaconf.loaders.redis_loader'
],
DYNACONF_NAMESPACE="DYNA%s" % mode
)
sets.SIMPLE_BOOL = False
sets.configure()
return sets
<commit_msg>Make it clear where we are getting LazySettings from<commit_after># coding: utf-8
import os
import pytest
from dynaconf.base import LazySettings
@pytest.fixture(scope='module')
def settings():
"""Settings fixture with some defaults"""
mode = 'TRAVIS' if os.environ.get('TRAVIS') else 'TEST'
os.environ['DYNA%s_HOSTNAME' % mode] = 'host.com'
os.environ['DYNA%s_PORT' % mode] = '@int 5000'
os.environ['DYNA%s_VALUE' % mode] = '@float 42.1'
os.environ['DYNA%s_ALIST' % mode] = '@json ["item1", "item2", "item3"]'
os.environ['DYNA%s_ADICT' % mode] = '@json {"key": "value"}'
os.environ['DYNA%s_DEBUG' % mode] = '@bool true'
os.environ['DYNA%s_TODELETE' % mode] = '@bool true'
os.environ['PROJECT1_HOSTNAME'] = 'otherhost.com'
sets = LazySettings(
LOADERS_FOR_DYNACONF=[
'dynaconf.loaders.env_loader',
'dynaconf.loaders.redis_loader'
],
DYNACONF_NAMESPACE="DYNA%s" % mode
)
sets.SIMPLE_BOOL = False
sets.configure()
return sets
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.