commit
stringlengths 40
40
| old_file
stringlengths 4
150
| new_file
stringlengths 4
150
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
501
| message
stringlengths 15
4.06k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
| diff
stringlengths 0
4.35k
|
|---|---|---|---|---|---|---|---|---|---|---|
e6210d4d3fcdff4c9b4b22946e03062e01efd830
|
pika/adapters/__init__.py
|
pika/adapters/__init__.py
|
from asyncore_connection import AsyncoreConnection
from blocking_connection import BlockingConnection
from tornado_connection import TornadoConnection
|
# ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1/GPL 2.0
#
# The contents of this file are subject to the Mozilla Public License
# Version 1.1 (the "License"); you may not use this file except in
# compliance with the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS"
# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
# the License for the specific language governing rights and
# limitations under the License.
#
# The Original Code is Pika.
#
# The Initial Developers of the Original Code are LShift Ltd, Cohesive
# Financial Technologies LLC, and Rabbit Technologies Ltd. Portions
# created before 22-Nov-2008 00:00:00 GMT by LShift Ltd, Cohesive
# Financial Technologies LLC, or Rabbit Technologies Ltd are Copyright
# (C) 2007-2008 LShift Ltd, Cohesive Financial Technologies LLC, and
# Rabbit Technologies Ltd.
#
# Portions created by LShift Ltd are Copyright (C) 2007-2009 LShift
# Ltd. Portions created by Cohesive Financial Technologies LLC are
# Copyright (C) 2007-2009 Cohesive Financial Technologies
# LLC. Portions created by Rabbit Technologies Ltd are Copyright (C)
# 2007-2009 Rabbit Technologies Ltd.
#
# Portions created by Tony Garnock-Jones are Copyright (C) 2009-2010
# LShift Ltd and Tony Garnock-Jones.
#
# All Rights Reserved.
#
# Contributor(s): ______________________________________.
#
# Alternatively, the contents of this file may be used under the terms
# of the GNU General Public License Version 2 or later (the "GPL"), in
# which case the provisions of the GPL are applicable instead of those
# above. If you wish to allow use of your version of this file only
# under the terms of the GPL, and not to allow others to use your
# version of this file under the terms of the MPL, indicate your
# decision by deleting the provisions above and replace them with the
# notice and other provisions required by the GPL. If you do not
# delete the provisions above, a recipient may use your version of
# this file under the terms of any one of the MPL or the GPL.
#
# ***** END LICENSE BLOCK *****
from base_connection import BaseConnection
from asyncore_connection import AsyncoreConnection
from blocking_connection import BlockingConnection
from tornado_connection import TornadoConnection
|
Add the license block and BaseConnection
|
Add the license block and BaseConnection
|
Python
|
bsd-3-clause
|
skftn/pika,shinji-s/pika,Zephor5/pika,zixiliuyue/pika,reddec/pika,pika/pika,renshawbay/pika-python3,vrtsystems/pika,knowsis/pika,fkarb/pika-python3,jstnlef/pika,Tarsbot/pika,vitaly-krugl/pika,hugoxia/pika,benjamin9999/pika
|
---
+++
@@ -1,3 +1,53 @@
+# ***** BEGIN LICENSE BLOCK *****
+# Version: MPL 1.1/GPL 2.0
+#
+# The contents of this file are subject to the Mozilla Public License
+# Version 1.1 (the "License"); you may not use this file except in
+# compliance with the License. You may obtain a copy of the License at
+# http://www.mozilla.org/MPL/
+#
+# Software distributed under the License is distributed on an "AS IS"
+# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+# the License for the specific language governing rights and
+# limitations under the License.
+#
+# The Original Code is Pika.
+#
+# The Initial Developers of the Original Code are LShift Ltd, Cohesive
+# Financial Technologies LLC, and Rabbit Technologies Ltd. Portions
+# created before 22-Nov-2008 00:00:00 GMT by LShift Ltd, Cohesive
+# Financial Technologies LLC, or Rabbit Technologies Ltd are Copyright
+# (C) 2007-2008 LShift Ltd, Cohesive Financial Technologies LLC, and
+# Rabbit Technologies Ltd.
+#
+# Portions created by LShift Ltd are Copyright (C) 2007-2009 LShift
+# Ltd. Portions created by Cohesive Financial Technologies LLC are
+# Copyright (C) 2007-2009 Cohesive Financial Technologies
+# LLC. Portions created by Rabbit Technologies Ltd are Copyright (C)
+# 2007-2009 Rabbit Technologies Ltd.
+#
+# Portions created by Tony Garnock-Jones are Copyright (C) 2009-2010
+# LShift Ltd and Tony Garnock-Jones.
+#
+# All Rights Reserved.
+#
+# Contributor(s): ______________________________________.
+#
+# Alternatively, the contents of this file may be used under the terms
+# of the GNU General Public License Version 2 or later (the "GPL"), in
+# which case the provisions of the GPL are applicable instead of those
+# above. If you wish to allow use of your version of this file only
+# under the terms of the GPL, and not to allow others to use your
+# version of this file under the terms of the MPL, indicate your
+# decision by deleting the provisions above and replace them with the
+# notice and other provisions required by the GPL. If you do not
+# delete the provisions above, a recipient may use your version of
+# this file under the terms of any one of the MPL or the GPL.
+#
+# ***** END LICENSE BLOCK *****
+
+
+from base_connection import BaseConnection
from asyncore_connection import AsyncoreConnection
from blocking_connection import BlockingConnection
from tornado_connection import TornadoConnection
|
f7b1d233ed39eed24e3c1489738df01f700112e3
|
tensorflow/contrib/tensorrt/__init__.py
|
tensorflow/contrib/tensorrt/__init__.py
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Exposes the python wrapper for TensorRT graph transforms."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import,wildcard-import
try:
from tensorflow.contrib.tensorrt.python import * # pylint: disable=import-not-at-top
except Exception as e:
no_trt_message = (
'**** Failed to initialize TensorRT. This is either because the TensorRT'
' installation path is not in LD_LIBRARY_PATH, or because you do not have it'
' installed. If not installed, please go to'
' https://developer.nvidia.com/tensorrt to download and install'
' TensorRT ****')
print(no_trt_message)
raise e
# pylint: enable=unused-import,wildcard-import
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Exposes the python wrapper for TensorRT graph transforms."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import,wildcard-import,g-import-not-at-top
try:
from tensorflow.contrib.tensorrt.python import *
except Exception as e:
no_trt_message = (
'**** Failed to initialize TensorRT. This is either because the TensorRT'
' installation path is not in LD_LIBRARY_PATH, or because you do not have'
' it installed. If not installed, please go to'
' https://developer.nvidia.com/tensorrt to download and install'
' TensorRT ****')
print(no_trt_message)
raise e
# pylint: enable=unused-import,wildcard-import,g-import-not-at-top
|
Move the pylint message and fix comment length
|
Move the pylint message and fix comment length
|
Python
|
apache-2.0
|
paolodedios/tensorflow,lukeiwanski/tensorflow,alshedivat/tensorflow,kobejean/tensorflow,frreiss/tensorflow-fred,Xeralux/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,alsrgv/tensorflow,nburn42/tensorflow,meteorcloudy/tensorflow,Xeralux/tensorflow,paolodedios/tensorflow,ppwwyyxx/tensorflow,gautam1858/tensorflow,cxxgtxy/tensorflow,gautam1858/tensorflow,girving/tensorflow,yanchen036/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,petewarden/tensorflow,DavidNorman/tensorflow,seanli9jan/tensorflow,gautam1858/tensorflow,frreiss/tensorflow-fred,apark263/tensorflow,gojira/tensorflow,xodus7/tensorflow,jalexvig/tensorflow,allenlavoie/tensorflow,apark263/tensorflow,Intel-Corporation/tensorflow,hehongliang/tensorflow,nburn42/tensorflow,jart/tensorflow,ZhangXinNan/tensorflow,theflofly/tensorflow,caisq/tensorflow,xzturn/tensorflow,Intel-Corporation/tensorflow,gautam1858/tensorflow,cxxgtxy/tensorflow,alsrgv/tensorflow,AnishShah/tensorflow,brchiu/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,freedomtan/tensorflow,yongtang/tensorflow,arborh/tensorflow,alshedivat/tensorflow,freedomtan/tensorflow,alsrgv/tensorflow,dongjoon-hyun/tensorflow,gojira/tensorflow,arborh/tensorflow,adit-chandra/tensorflow,ppwwyyxx/tensorflow,nburn42/tensorflow,girving/tensorflow,Intel-tensorflow/tensorflow,gojira/tensorflow,girving/tensorflow,AnishShah/tensorflow,manipopopo/tensorflow,snnn/tensorflow,xzturn/tensorflow,alshedivat/tensorflow,gautam1858/tensorflow,jendap/tensorflow,jendap/tensorflow,snnn/tensorflow,gunan/tensorflow,frreiss/tensorflow-fred,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gunan/tensorflow,jalexvig/tensorflow,dancingdan/tensorflow,lukeiwanski/tensorflow,xzturn/tensorflow,gunan/tensorflow,lukeiwanski/tensorflow,jalexvig/tensorflow,dendisuhubdy/tensorflow,alshedivat/tensorflow,kobejean/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gunan/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,eaplatanios/tensorflow,dancingdan/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_tf_optimizer,kevin-coder/tensorflow-fork,ppwwyyxx/tensorflow,aam-at/tensorflow,aselle/tensorflow,yongtang/tensorflow,aam-at/tensorflow,frreiss/tensorflow-fred,paolodedios/tensorflow,ppwwyyxx/tensorflow,frreiss/tensorflow-fred,arborh/tensorflow,Intel-tensorflow/tensorflow,freedomtan/tensorflow,davidzchen/tensorflow,jbedorf/tensorflow,nburn42/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,hehongliang/tensorflow,dendisuhubdy/tensorflow,xzturn/tensorflow,jhseu/tensorflow,jendap/tensorflow,xzturn/tensorflow,girving/tensorflow,aam-at/tensorflow,karllessard/tensorflow,ageron/tensorflow,apark263/tensorflow,frreiss/tensorflow-fred,jalexvig/tensorflow,AnishShah/tensorflow,alsrgv/tensorflow,chemelnucfin/tensorflow,chemelnucfin/tensorflow,theflofly/tensorflow,kevin-coder/tensorflow-fork,gojira/tensorflow,Xeralux/tensorflow,sarvex/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Bismarrck/tensorflow,jbedorf/tensorflow,allenlavoie/tensorflow,girving/tensorflow,gautam1858/tensorflow,snnn/tensorflow,kevin-coder/tensorflow-fork,manipopopo/tensorflow,caisq/tensorflow,jart/tensorflow,aam-at/tensorflow,eaplatanios/tensorflow,petewarden/tensorflow,arborh/tensorflow,jart/tensorflow,snnn/tensorflow,caisq/tensorflow,annarev/tensorflow,ageron/tensorflow,xodus7/tensorflow,asimshankar/tensorflow,ghchinoy/tensorflow,yongtang/tensorflow,yanchen036/tensorflow,xzturn/tensorflow,chemelnucfin/tensorflow,yongtang/tensorflow,aselle/tensorflow,alsrgv/tensorflow,yanchen036/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,asimshankar/tensorflow,ZhangXinNan/tensorflow,annarev/tensorflow,gojira/tensorflow,paolodedios/tensorflow,gunan/tensorflow,aam-at/tensorflow,theflofly/tensorflow,drpngx/tensorflow,arborh/tensorflow,snnn/tensorflow,jart/tensorflow,xodus7/tensorflow,seanli9jan/tensorflow,jbedorf/tensorflow,dendisuhubdy/tensorflow,jendap/tensorflow,adit-chandra/tensorflow,apark263/tensorflow,seanli9jan/tensorflow,alshedivat/tensorflow,kevin-coder/tensorflow-fork,nburn42/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,lukeiwanski/tensorflow,hfp/tensorflow-xsmm,gunan/tensorflow,eaplatanios/tensorflow,dendisuhubdy/tensorflow,ageron/tensorflow,allenlavoie/tensorflow,dongjoon-hyun/tensorflow,xzturn/tensorflow,aldian/tensorflow,karllessard/tensorflow,benoitsteiner/tensorflow-xsmm,hfp/tensorflow-xsmm,yongtang/tensorflow,aam-at/tensorflow,karllessard/tensorflow,benoitsteiner/tensorflow-xsmm,chemelnucfin/tensorflow,xodus7/tensorflow,caisq/tensorflow,arborh/tensorflow,arborh/tensorflow,dendisuhubdy/tensorflow,ageron/tensorflow,jhseu/tensorflow,allenlavoie/tensorflow,xodus7/tensorflow,kobejean/tensorflow,aselle/tensorflow,ppwwyyxx/tensorflow,gautam1858/tensorflow,freedomtan/tensorflow,AnishShah/tensorflow,dendisuhubdy/tensorflow,paolodedios/tensorflow,AnishShah/tensorflow,adit-chandra/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow,meteorcloudy/tensorflow,apark263/tensorflow,kevin-coder/tensorflow-fork,chemelnucfin/tensorflow,manipopopo/tensorflow,kevin-coder/tensorflow-fork,girving/tensorflow,drpngx/tensorflow,sarvex/tensorflow,caisq/tensorflow,jhseu/tensorflow,kobejean/tensorflow,gunan/tensorflow,asimshankar/tensorflow,Xeralux/tensorflow,asimshankar/tensorflow,davidzchen/tensorflow,ZhangXinNan/tensorflow,adit-chandra/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,jhseu/tensorflow,ageron/tensorflow,ghchinoy/tensorflow,seanli9jan/tensorflow,tensorflow/tensorflow-pywrap_saved_model,cxxgtxy/tensorflow,yongtang/tensorflow,Intel-Corporation/tensorflow,jendap/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,girving/tensorflow,manipopopo/tensorflow,kobejean/tensorflow,ZhangXinNan/tensorflow,brchiu/tensorflow,hfp/tensorflow-xsmm,ppwwyyxx/tensorflow,annarev/tensorflow,xodus7/tensorflow,ageron/tensorflow,aselle/tensorflow,caisq/tensorflow,dendisuhubdy/tensorflow,DavidNorman/tensorflow,eaplatanios/tensorflow,ghchinoy/tensorflow,adit-chandra/tensorflow,renyi533/tensorflow,jalexvig/tensorflow,dongjoon-hyun/tensorflow,dancingdan/tensorflow,kobejean/tensorflow,aldian/tensorflow,renyi533/tensorflow,nburn42/tensorflow,davidzchen/tensorflow,annarev/tensorflow,ghchinoy/tensorflow,allenlavoie/tensorflow,alsrgv/tensorflow,petewarden/tensorflow,eaplatanios/tensorflow,gunan/tensorflow,Xeralux/tensorflow,tensorflow/tensorflow,apark263/tensorflow,jbedorf/tensorflow,brchiu/tensorflow,yanchen036/tensorflow,snnn/tensorflow,nburn42/tensorflow,benoitsteiner/tensorflow-xsmm,alsrgv/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,freedomtan/tensorflow,yanchen036/tensorflow,aselle/tensorflow,yongtang/tensorflow,AnishShah/tensorflow,meteorcloudy/tensorflow,drpngx/tensorflow,jhseu/tensorflow,sarvex/tensorflow,Intel-tensorflow/tensorflow,manipopopo/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,jendap/tensorflow,davidzchen/tensorflow,meteorcloudy/tensorflow,tensorflow/tensorflow-pywrap_saved_model,xodus7/tensorflow,hehongliang/tensorflow,meteorcloudy/tensorflow,girving/tensorflow,nburn42/tensorflow,hfp/tensorflow-xsmm,alsrgv/tensorflow,theflofly/tensorflow,Bismarrck/tensorflow,chemelnucfin/tensorflow,DavidNorman/tensorflow,jart/tensorflow,DavidNorman/tensorflow,kevin-coder/tensorflow-fork,xzturn/tensorflow,manipopopo/tensorflow,kobejean/tensorflow,benoitsteiner/tensorflow-xsmm,allenlavoie/tensorflow,theflofly/tensorflow,adit-chandra/tensorflow,drpngx/tensorflow,drpngx/tensorflow,chemelnucfin/tensorflow,aam-at/tensorflow,ageron/tensorflow,dongjoon-hyun/tensorflow,gautam1858/tensorflow,AnishShah/tensorflow,karllessard/tensorflow,xodus7/tensorflow,jbedorf/tensorflow,benoitsteiner/tensorflow-xsmm,seanli9jan/tensorflow,dongjoon-hyun/tensorflow,aldian/tensorflow,alsrgv/tensorflow,xodus7/tensorflow,asimshankar/tensorflow,davidzchen/tensorflow,dancingdan/tensorflow,hfp/tensorflow-xsmm,aam-at/tensorflow,Intel-tensorflow/tensorflow,freedomtan/tensorflow,Xeralux/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,DavidNorman/tensorflow,annarev/tensorflow,hehongliang/tensorflow,aselle/tensorflow,brchiu/tensorflow,lukeiwanski/tensorflow,eaplatanios/tensorflow,meteorcloudy/tensorflow,Xeralux/tensorflow,DavidNorman/tensorflow,petewarden/tensorflow,Bismarrck/tensorflow,brchiu/tensorflow,AnishShah/tensorflow,hfp/tensorflow-xsmm,gojira/tensorflow,aselle/tensorflow,jbedorf/tensorflow,benoitsteiner/tensorflow-xsmm,davidzchen/tensorflow,aselle/tensorflow,DavidNorman/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Bismarrck/tensorflow,AnishShah/tensorflow,renyi533/tensorflow,snnn/tensorflow,aselle/tensorflow,dancingdan/tensorflow,tensorflow/tensorflow-pywrap_saved_model,jalexvig/tensorflow,tensorflow/tensorflow,snnn/tensorflow,jart/tensorflow,tensorflow/tensorflow,benoitsteiner/tensorflow-xsmm,gojira/tensorflow,ppwwyyxx/tensorflow,dancingdan/tensorflow,snnn/tensorflow,adit-chandra/tensorflow,hehongliang/tensorflow,Intel-Corporation/tensorflow,asimshankar/tensorflow,manipopopo/tensorflow,xzturn/tensorflow,aldian/tensorflow,arborh/tensorflow,caisq/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,ZhangXinNan/tensorflow,lukeiwanski/tensorflow,aldian/tensorflow,DavidNorman/tensorflow,ghchinoy/tensorflow,chemelnucfin/tensorflow,davidzchen/tensorflow,annarev/tensorflow,ageron/tensorflow,Intel-Corporation/tensorflow,jbedorf/tensorflow,nburn42/tensorflow,arborh/tensorflow,davidzchen/tensorflow,cxxgtxy/tensorflow,Intel-tensorflow/tensorflow,alshedivat/tensorflow,jhseu/tensorflow,annarev/tensorflow,allenlavoie/tensorflow,aam-at/tensorflow,jbedorf/tensorflow,brchiu/tensorflow,asimshankar/tensorflow,kobejean/tensorflow,kevin-coder/tensorflow-fork,meteorcloudy/tensorflow,xzturn/tensorflow,tensorflow/tensorflow,jhseu/tensorflow,jalexvig/tensorflow,hfp/tensorflow-xsmm,eaplatanios/tensorflow,ghchinoy/tensorflow,dancingdan/tensorflow,manipopopo/tensorflow,renyi533/tensorflow,allenlavoie/tensorflow,DavidNorman/tensorflow,frreiss/tensorflow-fred,freedomtan/tensorflow,girving/tensorflow,seanli9jan/tensorflow,kevin-coder/tensorflow-fork,chemelnucfin/tensorflow,gunan/tensorflow,ghchinoy/tensorflow,cxxgtxy/tensorflow,asimshankar/tensorflow,allenlavoie/tensorflow,tensorflow/tensorflow,caisq/tensorflow,adit-chandra/tensorflow,yongtang/tensorflow,ZhangXinNan/tensorflow,theflofly/tensorflow,dancingdan/tensorflow,gautam1858/tensorflow,manipopopo/tensorflow,meteorcloudy/tensorflow,davidzchen/tensorflow,gunan/tensorflow,freedomtan/tensorflow,hfp/tensorflow-xsmm,ZhangXinNan/tensorflow,tensorflow/tensorflow,dongjoon-hyun/tensorflow,jendap/tensorflow,cxxgtxy/tensorflow,eaplatanios/tensorflow,DavidNorman/tensorflow,girving/tensorflow,Xeralux/tensorflow,ageron/tensorflow,theflofly/tensorflow,karllessard/tensorflow,benoitsteiner/tensorflow-xsmm,alshedivat/tensorflow,seanli9jan/tensorflow,ghchinoy/tensorflow,tensorflow/tensorflow,caisq/tensorflow,Intel-tensorflow/tensorflow,alshedivat/tensorflow,alsrgv/tensorflow,yanchen036/tensorflow,theflofly/tensorflow,aselle/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,DavidNorman/tensorflow,Intel-tensorflow/tensorflow,dongjoon-hyun/tensorflow,ghchinoy/tensorflow,seanli9jan/tensorflow,ppwwyyxx/tensorflow,gunan/tensorflow,allenlavoie/tensorflow,jart/tensorflow,freedomtan/tensorflow,lukeiwanski/tensorflow,renyi533/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,xodus7/tensorflow,renyi533/tensorflow,ghchinoy/tensorflow,aam-at/tensorflow,ageron/tensorflow,dongjoon-hyun/tensorflow,drpngx/tensorflow,cxxgtxy/tensorflow,hehongliang/tensorflow,Xeralux/tensorflow,hfp/tensorflow-xsmm,gojira/tensorflow,petewarden/tensorflow,ghchinoy/tensorflow,brchiu/tensorflow,brchiu/tensorflow,frreiss/tensorflow-fred,petewarden/tensorflow,Bismarrck/tensorflow,caisq/tensorflow,sarvex/tensorflow,jhseu/tensorflow,dendisuhubdy/tensorflow,karllessard/tensorflow,ZhangXinNan/tensorflow,Bismarrck/tensorflow,alsrgv/tensorflow,snnn/tensorflow,jhseu/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow,jart/tensorflow,benoitsteiner/tensorflow-xsmm,Intel-tensorflow/tensorflow,annarev/tensorflow,paolodedios/tensorflow,jalexvig/tensorflow,sarvex/tensorflow,xzturn/tensorflow,jalexvig/tensorflow,kevin-coder/tensorflow-fork,frreiss/tensorflow-fred,nburn42/tensorflow,gojira/tensorflow,gojira/tensorflow,alshedivat/tensorflow,davidzchen/tensorflow,girving/tensorflow,AnishShah/tensorflow,drpngx/tensorflow,freedomtan/tensorflow,ZhangXinNan/tensorflow,kobejean/tensorflow,chemelnucfin/tensorflow,apark263/tensorflow,annarev/tensorflow,xodus7/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,ageron/tensorflow,lukeiwanski/tensorflow,asimshankar/tensorflow,chemelnucfin/tensorflow,renyi533/tensorflow,dancingdan/tensorflow,allenlavoie/tensorflow,jart/tensorflow,arborh/tensorflow,aam-at/tensorflow,yanchen036/tensorflow,drpngx/tensorflow,sarvex/tensorflow,apark263/tensorflow,petewarden/tensorflow,seanli9jan/tensorflow,adit-chandra/tensorflow,davidzchen/tensorflow,eaplatanios/tensorflow,benoitsteiner/tensorflow-xsmm,Xeralux/tensorflow,petewarden/tensorflow,kobejean/tensorflow,tensorflow/tensorflow-pywrap_saved_model,renyi533/tensorflow,yanchen036/tensorflow,apark263/tensorflow,Intel-tensorflow/tensorflow,AnishShah/tensorflow,paolodedios/tensorflow,xzturn/tensorflow,alshedivat/tensorflow,yongtang/tensorflow,jbedorf/tensorflow,aam-at/tensorflow,renyi533/tensorflow,jalexvig/tensorflow,adit-chandra/tensorflow,dongjoon-hyun/tensorflow,hehongliang/tensorflow,nburn42/tensorflow,jhseu/tensorflow,Bismarrck/tensorflow,apark263/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Bismarrck/tensorflow,ZhangXinNan/tensorflow,dancingdan/tensorflow,manipopopo/tensorflow,theflofly/tensorflow,gojira/tensorflow,DavidNorman/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Bismarrck/tensorflow,apark263/tensorflow,adit-chandra/tensorflow,petewarden/tensorflow,seanli9jan/tensorflow,alsrgv/tensorflow,brchiu/tensorflow,karllessard/tensorflow,renyi533/tensorflow,gautam1858/tensorflow,jbedorf/tensorflow,jart/tensorflow,ppwwyyxx/tensorflow,hfp/tensorflow-xsmm,theflofly/tensorflow,petewarden/tensorflow,drpngx/tensorflow,annarev/tensorflow,petewarden/tensorflow,gunan/tensorflow,ppwwyyxx/tensorflow,hfp/tensorflow-xsmm,arborh/tensorflow,ppwwyyxx/tensorflow,dongjoon-hyun/tensorflow,jendap/tensorflow,freedomtan/tensorflow,drpngx/tensorflow,davidzchen/tensorflow,meteorcloudy/tensorflow,kevin-coder/tensorflow-fork,aldian/tensorflow,arborh/tensorflow,Bismarrck/tensorflow,kobejean/tensorflow,renyi533/tensorflow,benoitsteiner/tensorflow-xsmm,eaplatanios/tensorflow,karllessard/tensorflow,renyi533/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,jhseu/tensorflow,jendap/tensorflow,karllessard/tensorflow,Bismarrck/tensorflow,sarvex/tensorflow,aldian/tensorflow,asimshankar/tensorflow,snnn/tensorflow,jalexvig/tensorflow,sarvex/tensorflow,ZhangXinNan/tensorflow,eaplatanios/tensorflow,dancingdan/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow,ghchinoy/tensorflow,jbedorf/tensorflow,cxxgtxy/tensorflow,jendap/tensorflow,asimshankar/tensorflow,theflofly/tensorflow,seanli9jan/tensorflow,meteorcloudy/tensorflow,manipopopo/tensorflow,aldian/tensorflow,lukeiwanski/tensorflow,jendap/tensorflow,dendisuhubdy/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,theflofly/tensorflow,jbedorf/tensorflow,dongjoon-hyun/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,freedomtan/tensorflow,lukeiwanski/tensorflow,tensorflow/tensorflow,brchiu/tensorflow,petewarden/tensorflow,ppwwyyxx/tensorflow,paolodedios/tensorflow,alshedivat/tensorflow,ageron/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,aselle/tensorflow,brchiu/tensorflow,adit-chandra/tensorflow,jhseu/tensorflow,annarev/tensorflow,Xeralux/tensorflow,frreiss/tensorflow-fred,chemelnucfin/tensorflow,Intel-Corporation/tensorflow,dendisuhubdy/tensorflow
|
---
+++
@@ -18,16 +18,16 @@
from __future__ import division
from __future__ import print_function
-# pylint: disable=unused-import,wildcard-import
+# pylint: disable=unused-import,wildcard-import,g-import-not-at-top
try:
- from tensorflow.contrib.tensorrt.python import * # pylint: disable=import-not-at-top
+ from tensorflow.contrib.tensorrt.python import *
except Exception as e:
no_trt_message = (
'**** Failed to initialize TensorRT. This is either because the TensorRT'
- ' installation path is not in LD_LIBRARY_PATH, or because you do not have it'
- ' installed. If not installed, please go to'
+ ' installation path is not in LD_LIBRARY_PATH, or because you do not have'
+ ' it installed. If not installed, please go to'
' https://developer.nvidia.com/tensorrt to download and install'
' TensorRT ****')
print(no_trt_message)
raise e
-# pylint: enable=unused-import,wildcard-import
+# pylint: enable=unused-import,wildcard-import,g-import-not-at-top
|
dd21586d910dded2932f96b98d6d0588c18d2f58
|
great_expectations/cli/cli_logging.py
|
great_expectations/cli/cli_logging.py
|
import logging
import warnings
warnings.filterwarnings("ignore")
###
# REVIEWER NOTE: THE ORIGINAL IMPLEMENTATION WAS HEAVY HANDED AND I BELIEVE WAS A TEMPORARY WORKAROUND.
# PLEASE CAREFULLY REVIEW TO ENSURE REMOVING THIS DOES NOT AFFECT DESIRED BEHAVIOR
###
logger = logging.getLogger("great_expectations.cli")
def _set_up_logger():
# Log to console with a simple formatter; used by CLI
formatter = logging.Formatter("%(message)s")
handler = logging.StreamHandler()
handler.setLevel(level=logging.WARNING)
handler.setFormatter(formatter)
module_logger = logging.getLogger("great_expectations")
module_logger.addHandler(handler)
return module_logger
|
import logging
import warnings
warnings.filterwarnings("ignore")
###
# REVIEWER NOTE: THE ORIGINAL IMPLEMENTATION WAS HEAVY HANDED AND I BELIEVE WAS A TEMPORARY WORKAROUND.
# PLEASE CAREFULLY REVIEW TO ENSURE REMOVING THIS DOES NOT AFFECT DESIRED BEHAVIOR
###
logger = logging.getLogger("great_expectations.cli")
def _set_up_logger():
# Log to console with a simple formatter; used by CLI
formatter = logging.Formatter("%(message)s")
handler = logging.StreamHandler()
handler.setFormatter(formatter)
module_logger = logging.getLogger("great_expectations")
module_logger.addHandler(handler)
module_logger.setLevel(level=logging.WARNING)
return module_logger
|
Set level on module logger instead
|
Set level on module logger instead
|
Python
|
apache-2.0
|
great-expectations/great_expectations,great-expectations/great_expectations,great-expectations/great_expectations,great-expectations/great_expectations
|
---
+++
@@ -15,9 +15,10 @@
# Log to console with a simple formatter; used by CLI
formatter = logging.Formatter("%(message)s")
handler = logging.StreamHandler()
- handler.setLevel(level=logging.WARNING)
+
handler.setFormatter(formatter)
module_logger = logging.getLogger("great_expectations")
module_logger.addHandler(handler)
+ module_logger.setLevel(level=logging.WARNING)
return module_logger
|
25f26842b8371b13b3fc9f4abf12dfba0b0408bc
|
shapely/tests/__init__.py
|
shapely/tests/__init__.py
|
# package
from test_doctests import test_suite
|
from unittest import TestSuite
import test_doctests, test_prepared
def test_suite():
suite = TestSuite()
suite.addTest(test_doctests.test_suite())
suite.addTest(test_prepared.test_suite())
return suite
|
Integrate tests of prepared geoms into main test suite.
|
Integrate tests of prepared geoms into main test suite.
git-svn-id: 1a8067f95329a7fca9bad502d13a880b95ac544b@1508 b426a367-1105-0410-b9ff-cdf4ab011145
|
Python
|
bsd-3-clause
|
mindw/shapely,mindw/shapely,mouadino/Shapely,mouadino/Shapely,abali96/Shapely,jdmcbr/Shapely,abali96/Shapely,jdmcbr/Shapely
|
---
+++
@@ -1,2 +1,10 @@
-# package
-from test_doctests import test_suite
+from unittest import TestSuite
+
+import test_doctests, test_prepared
+
+def test_suite():
+ suite = TestSuite()
+ suite.addTest(test_doctests.test_suite())
+ suite.addTest(test_prepared.test_suite())
+ return suite
+
|
4fc803a61b7c6322b079554bfec52b34b130b810
|
config/urls.py
|
config/urls.py
|
"""wuppdays URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from django.conf import settings
import yunity.api.urls
import yunity.doc.flask_swagger
urlpatterns = [
url(r'^api/', include(yunity.api.urls)),
url(r'^admin/', include(admin.site.urls)),
url(r'^doc$', yunity.doc.flask_swagger.doc),
]
|
"""yunity URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from django.conf import settings
import yunity.api.urls
import yunity.doc.yunity_swagger
urlpatterns = [
url(r'^api/', include(yunity.api.urls)),
url(r'^admin/', include(admin.site.urls)),
url(r'^doc$', yunity.doc.yunity_swagger.doc),
]
|
Fix config to use yunity_swagger
|
Fix config to use yunity_swagger
|
Python
|
agpl-3.0
|
yunity/yunity-core,yunity/foodsaving-backend,yunity/foodsaving-backend,yunity/foodsaving-backend,yunity/yunity-core
|
---
+++
@@ -1,4 +1,4 @@
-"""wuppdays URL Configuration
+"""yunity URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
@@ -18,10 +18,10 @@
from django.conf import settings
import yunity.api.urls
-import yunity.doc.flask_swagger
+import yunity.doc.yunity_swagger
urlpatterns = [
url(r'^api/', include(yunity.api.urls)),
url(r'^admin/', include(admin.site.urls)),
- url(r'^doc$', yunity.doc.flask_swagger.doc),
+ url(r'^doc$', yunity.doc.yunity_swagger.doc),
]
|
356cb63327ce578d31a0c0ee5201423a8ed0e9d2
|
wensleydale/cli.py
|
wensleydale/cli.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import json
import click
from wensleydale import parser
@click.command()
@click.argument('path', type=str)
@click.argument('query', type=str)
@click.version_option()
def main(path, query, level=None, version=None):
'''
Mr Wensleydale. Query the AST using ObjectPath and return JSON.
'''
# Run the query.
result = parser.run(path, query)
# Spit out the result.
print(json.dumps(result))
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import json
import click
from wensleydale import parser
@click.command()
@click.argument('path', type=click.Path(exists=True))
@click.argument('query', type=str)
@click.version_option()
def main(path, query, level=None, version=None):
'''
Mr Wensleydale. Query the AST using ObjectPath and return JSON.
'''
# Run the query.
result = parser.run(path, query)
# Spit out the result.
print(json.dumps(result))
|
Add file path existince checking
|
Add file path existince checking
|
Python
|
mit
|
RishiRamraj/wensleydale
|
---
+++
@@ -8,7 +8,7 @@
@click.command()
-@click.argument('path', type=str)
+@click.argument('path', type=click.Path(exists=True))
@click.argument('query', type=str)
@click.version_option()
def main(path, query, level=None, version=None):
|
7eff20d706eb35513d8d1f420e59879e80400417
|
pseudon/ast_translator.py
|
pseudon/ast_translator.py
|
from ast import AST
import yaml
class ASTTranslator:
def __init__(self, tree):
self.tree = tree
def translate(self):
return yaml.dump({'type': 'program', 'code': []})
|
import ast
import yaml
class ASTTranslator:
def __init__(self, tree):
self.tree = tree
def translate(self):
return yaml.dump(self._translate_node(self.tree))
def _translate_node(self, node):
if isinstance(node, ast.AST):
return getattr('_translate_%s' % type(node).__name__)(**node.__dict__)
elif isinstance(node, list):
return [self._translate_node(n) for n in node]
elif isinstance(node, dict):
return {k: self._translate_node(v) for k, v in node.items()}
else:
return node
def _translate_module(self, body):
return {'type': 'program', 'code': self._translate_node(body)}
def _translate_int(self, n):
return {'type': 'int', 'value': n}
|
Add a basic ast translator
|
Add a basic ast translator
|
Python
|
mit
|
alehander42/pseudo-python
|
---
+++
@@ -1,4 +1,4 @@
-from ast import AST
+import ast
import yaml
@@ -8,4 +8,20 @@
self.tree = tree
def translate(self):
- return yaml.dump({'type': 'program', 'code': []})
+ return yaml.dump(self._translate_node(self.tree))
+
+ def _translate_node(self, node):
+ if isinstance(node, ast.AST):
+ return getattr('_translate_%s' % type(node).__name__)(**node.__dict__)
+ elif isinstance(node, list):
+ return [self._translate_node(n) for n in node]
+ elif isinstance(node, dict):
+ return {k: self._translate_node(v) for k, v in node.items()}
+ else:
+ return node
+
+ def _translate_module(self, body):
+ return {'type': 'program', 'code': self._translate_node(body)}
+
+ def _translate_int(self, n):
+ return {'type': 'int', 'value': n}
|
3efd5bb62a6b6a6f62c74f3dba9f8b2833b76473
|
knightos.py
|
knightos.py
|
import os
import requests
from sys import stderr, exit
from resources import get_resource_root
def get_key(platform):
if platform == "TI73": return 0x02
if platform == "TI83p" or platform == "TI83pSE": return 0x04
if platform == "TI84p" or platform == "TI84pSE": return 0x0A
if platform == "TI84pCSE": return 0x0F
def get_upgrade_ext(platform):
if platform == "TI73": return '73u'
if platform == "TI84pCSE": return '8cu'
return '8xu'
def get_privileged(platform):
if platform == "TI73": return 0x1C
if platform == "TI83p": return 0x3C
if platform == "TI83pSE": return 0x7C
if platform == "TI84p": return 0x3C
if platform == "TI84pSE": return 0x7C
if platform == "TI84pCSE": return 0xFC
def get_fat(platform):
if platform == "TI73": return 0x17
if platform == "TI83p": return 0x37
if platform == "TI83pSE": return 0x77
if platform == "TI84p": return 0x37
if platform == "TI84pSE": return 0x77
if platform == "TI84pCSE": return 0xF7
|
import os
import requests
from sys import stderr, exit
from resources import get_resource_root
def get_key(platform):
if platform == "TI73": return 0x02
if platform == "TI83p" or platform == "TI83pSE": return 0x04
if platform == "TI84p" or platform == "TI84pSE": return 0x0A
if platform == "TI84pCSE": return 0x0F
def get_upgrade_ext(platform):
if platform == "TI73": return '73u'
if platform == "TI84pCSE": return '8cu'
return '8xu'
def get_privileged(platform):
if platform == "TI73": return 0x1C
if platform == "TI83p": return 0x1C
if platform == "TI83pSE": return 0x7C
if platform == "TI84p": return 0x3C
if platform == "TI84pSE": return 0x7C
if platform == "TI84pCSE": return 0xFC
def get_fat(platform):
if platform == "TI73": return 0x17
if platform == "TI83p": return 0x37
if platform == "TI83pSE": return 0x77
if platform == "TI84p": return 0x37
if platform == "TI84pSE": return 0x77
if platform == "TI84pCSE": return 0xF7
|
Fix privledged page constant for TI-83+
|
Fix privledged page constant for TI-83+
This closes https://github.com/KnightOS/KnightOS/issues/265
|
Python
|
mit
|
KnightOS/sdk,KnightOS/sdk,KnightOS/sdk
|
---
+++
@@ -16,7 +16,7 @@
def get_privileged(platform):
if platform == "TI73": return 0x1C
- if platform == "TI83p": return 0x3C
+ if platform == "TI83p": return 0x1C
if platform == "TI83pSE": return 0x7C
if platform == "TI84p": return 0x3C
if platform == "TI84pSE": return 0x7C
|
df027db957b38656e3acf42d6065af34509ea053
|
project/api/managers.py
|
project/api/managers.py
|
# Django
from django.contrib.auth.models import BaseUserManager
class UserManager(BaseUserManager):
def create_user(self, email, password='', person, **kwargs):
user = self.model(
email=email,
password='',
person=person,
is_active=True,
**kwargs
)
user.save(using=self._db)
return user
def create_superuser(self, email, password, person, **kwargs):
user = self.model(
email=email,
person=person,
is_staff=True,
is_active=True,
**kwargs
)
user.set_password(password)
user.save(using=self._db)
return user
|
# Django
from django.contrib.auth.models import BaseUserManager
class UserManager(BaseUserManager):
def create_user(self, email, password='', **kwargs):
user = self.model(
email=email,
password='',
is_active=True,
**kwargs
)
user.save(using=self._db)
return user
def create_superuser(self, email, password, **kwargs):
user = self.model(
email=email,
is_staff=True,
is_active=True,
**kwargs
)
user.set_password(password)
user.save(using=self._db)
return user
|
Revert "Update manager for Person requirement"
|
Revert "Update manager for Person requirement"
This reverts commit 1f7c21280b7135f026f1ff807ffc50c97587f6fd.
|
Python
|
bsd-2-clause
|
barberscore/barberscore-api,dbinetti/barberscore,dbinetti/barberscore-django,barberscore/barberscore-api,barberscore/barberscore-api,barberscore/barberscore-api,dbinetti/barberscore-django,dbinetti/barberscore
|
---
+++
@@ -4,11 +4,10 @@
class UserManager(BaseUserManager):
- def create_user(self, email, password='', person, **kwargs):
+ def create_user(self, email, password='', **kwargs):
user = self.model(
email=email,
password='',
- person=person,
is_active=True,
**kwargs
)
@@ -16,10 +15,9 @@
return user
- def create_superuser(self, email, password, person, **kwargs):
+ def create_superuser(self, email, password, **kwargs):
user = self.model(
email=email,
- person=person,
is_staff=True,
is_active=True,
**kwargs
|
33a1df824ef3b339874e0a24d1c84ad05ebcb9e1
|
cvui.py
|
cvui.py
|
# This is a documentation block with several lines
# so I can test how it works.
import cv2
def main():
print("cvui main");
if __name__ == '__main__':
main()
def random_number_generator(arg1, arg2):
"""
Summary line.
Extended description of function.
Parameters
----------
arg1 : int
Description of arg1
arg2 : str
Description of arg2
Returns
-------
int
Description of return value
"""
return 42
def init(window_name, delay_wait_key = -1, create_named_window = True):
cv2.namedWindow(window_name)
def text(where, x, y, text, font_scale = 0.4, color = 0xCECECE):
cv2.putText(where, 'OpenCV', (x, y), cv2.FONT_HERSHEY_SIMPLEX, font_scale, (255, 255, 255), 1, cv2.LINE_AA)
def button(where, x, y, label):
# Not implemented yet!
return False
|
# This is a documentation block with several lines
# so I can test how it works.
import cv2
def main():
print("cvui main")
if __name__ == '__main__':
main()
def random_number_generator(arg1, arg2):
"""
Summary line.
Extended description of function.
Parameters
----------
arg1 : int
Description of arg1
arg2 : str
Description of arg2
Returns
-------
int
Description of return value
"""
return 42
def init(window_name, delay_wait_key = -1, create_named_window = True):
cv2.namedWindow(window_name)
def text(where, x, y, text, font_scale = 0.4, color = 0xCECECE):
cv2.putText(where, text, (x, y), cv2.FONT_HERSHEY_SIMPLEX, font_scale, (255, 255, 255), 1, cv2.LINE_AA)
def printf(theWhere, theX, theY, theFontScale, theColor, theFmt, *theArgs):
aText = theFmt % theArgs
text(theWhere, theX, theY, aText, theFontScale, theColor)
def button(where, x, y, label):
# Not implemented yet!
return False
def update(window_name = ""):
"""
Updates the library internal things. You need to call this function **AFTER** you are done adding/manipulating
UI elements in order for them to react to mouse interactions.
Parameters
----------
window_name : str
Name of the window whose components are being updated. If no window name is provided, cvui uses the default window.
\sa init()
\sa watch()
\sa context()
"""
|
Add draft of printf in python
|
Add draft of printf in python
|
Python
|
mit
|
Dovyski/cvui,Dovyski/cvui,Dovyski/cvui
|
---
+++
@@ -4,7 +4,7 @@
import cv2
def main():
- print("cvui main");
+ print("cvui main")
if __name__ == '__main__':
main()
@@ -34,8 +34,27 @@
cv2.namedWindow(window_name)
def text(where, x, y, text, font_scale = 0.4, color = 0xCECECE):
- cv2.putText(where, 'OpenCV', (x, y), cv2.FONT_HERSHEY_SIMPLEX, font_scale, (255, 255, 255), 1, cv2.LINE_AA)
+ cv2.putText(where, text, (x, y), cv2.FONT_HERSHEY_SIMPLEX, font_scale, (255, 255, 255), 1, cv2.LINE_AA)
+
+def printf(theWhere, theX, theY, theFontScale, theColor, theFmt, *theArgs):
+ aText = theFmt % theArgs
+ text(theWhere, theX, theY, aText, theFontScale, theColor)
def button(where, x, y, label):
# Not implemented yet!
return False
+
+def update(window_name = ""):
+ """
+ Updates the library internal things. You need to call this function **AFTER** you are done adding/manipulating
+ UI elements in order for them to react to mouse interactions.
+
+ Parameters
+ ----------
+ window_name : str
+ Name of the window whose components are being updated. If no window name is provided, cvui uses the default window.
+
+ \sa init()
+ \sa watch()
+ \sa context()
+ """
|
9ecf7d7aa6f6d3a80ba2a327ee5b402b665a3e0c
|
test/tests/nodalkernels/high_order_time_integration/convergence_study.py
|
test/tests/nodalkernels/high_order_time_integration/convergence_study.py
|
import os
import csv
from collections import deque
import matplotlib
import numpy as np
import matplotlib.pyplot as plt
schemes = ['implicit-euler', 'bdf2', 'crank-nicolson', 'dirk', 'explicit-euler', 'rk-2']
scheme_errors = {}
# Generate list of dts
dt = 1.0
dts = []
for i in range(0,10):
dts.append(dt)
dt = dt / 2.0
for scheme in schemes:
errors = []
for dt in dts:
command = '../../../moose_test-opt -i high_order_time_integration.i Executioner/dt=' + str(dt) + ' Executioner/scheme=' + scheme
os.system(command)
with open('high_order_time_integration_out.csv', 'r') as csvfile:
csv_data = csv.reader(csvfile, delimiter=',')
# Get the last row second column
error = deque(csv_data, 2)[0][1]
errors.append(error)
scheme_errors[scheme] = errors
for scheme, errors in scheme_errors.iteritems():
plt.plot(dts, errors, label=scheme)
plt.xscale('log')
plt.yscale('log')
plt.title('Time Convergence Study')
plt.xlabel('dt (s)')
plt.ylabel('L2 Error')
plt.legend(loc='upper left')
plt.show()
|
import os
import csv
from collections import deque
import matplotlib
import numpy as np
import matplotlib.pyplot as plt
schemes = ['implicit-euler', 'bdf2', 'crank-nicolson', 'dirk']
scheme_errors = {}
# Generate list of dts
dt = 1.0
dts = []
for i in range(0,5):
dts.append(dt)
dt = dt / 2.0
for scheme in schemes:
errors = []
for dt in dts:
command = '../../../moose_test-opt -i high_order_time_integration.i Executioner/dt=' + str(dt) + ' Executioner/scheme=' + scheme
os.system(command)
with open('high_order_time_integration_out.csv', 'r') as csvfile:
csv_data = csv.reader(csvfile, delimiter=',')
# Get the last row second column
error = deque(csv_data, 2)[0][1]
errors.append(error)
scheme_errors[scheme] = errors
for scheme, errors in scheme_errors.iteritems():
plt.plot(dts, errors, label=scheme)
plt.xscale('log')
plt.yscale('log')
plt.title('Time Convergence Study')
plt.xlabel('dt (s)')
plt.ylabel('L2 Error')
plt.legend(loc='upper left')
plt.show()
|
Test fewer dts, only test implicit methods.
|
Test fewer dts, only test implicit methods.
The timesteps used here are not valid for explicit methods.
|
Python
|
lgpl-2.1
|
jessecarterMOOSE/moose,backmari/moose,giopastor/moose,andrsd/moose,jiangwen84/moose,sapitts/moose,Chuban/moose,friedmud/moose,milljm/moose,bwspenc/moose,sapitts/moose,nuclear-wizard/moose,idaholab/moose,liuwenf/moose,dschwen/moose,permcody/moose,joshua-cogliati-inl/moose,lindsayad/moose,nuclear-wizard/moose,stimpsonsg/moose,jasondhales/moose,laagesen/moose,katyhuff/moose,SudiptaBiswas/moose,idaholab/moose,milljm/moose,bwspenc/moose,milljm/moose,jasondhales/moose,stimpsonsg/moose,friedmud/moose,permcody/moose,SudiptaBiswas/moose,lindsayad/moose,harterj/moose,stimpsonsg/moose,milljm/moose,idaholab/moose,wgapl/moose,YaqiWang/moose,idaholab/moose,liuwenf/moose,yipenggao/moose,YaqiWang/moose,katyhuff/moose,harterj/moose,katyhuff/moose,idaholab/moose,harterj/moose,mellis13/moose,wgapl/moose,friedmud/moose,jiangwen84/moose,joshua-cogliati-inl/moose,stimpsonsg/moose,permcody/moose,liuwenf/moose,SudiptaBiswas/moose,jhbradley/moose,backmari/moose,yipenggao/moose,wgapl/moose,joshua-cogliati-inl/moose,jhbradley/moose,liuwenf/moose,dschwen/moose,jiangwen84/moose,YaqiWang/moose,sapitts/moose,lindsayad/moose,andrsd/moose,lindsayad/moose,bwspenc/moose,permcody/moose,jhbradley/moose,milljm/moose,sapitts/moose,liuwenf/moose,giopastor/moose,harterj/moose,bwspenc/moose,giopastor/moose,YaqiWang/moose,katyhuff/moose,dschwen/moose,jessecarterMOOSE/moose,yipenggao/moose,jasondhales/moose,sapitts/moose,wgapl/moose,backmari/moose,backmari/moose,dschwen/moose,andrsd/moose,friedmud/moose,bwspenc/moose,mellis13/moose,nuclear-wizard/moose,dschwen/moose,mellis13/moose,jessecarterMOOSE/moose,laagesen/moose,jiangwen84/moose,giopastor/moose,yipenggao/moose,SudiptaBiswas/moose,jessecarterMOOSE/moose,laagesen/moose,mellis13/moose,Chuban/moose,SudiptaBiswas/moose,andrsd/moose,jessecarterMOOSE/moose,jhbradley/moose,Chuban/moose,laagesen/moose,andrsd/moose,liuwenf/moose,lindsayad/moose,joshua-cogliati-inl/moose,jasondhales/moose,Chuban/moose,laagesen/moose,harterj/moose,nuclear-wizard/moose
|
---
+++
@@ -6,14 +6,14 @@
import numpy as np
import matplotlib.pyplot as plt
-schemes = ['implicit-euler', 'bdf2', 'crank-nicolson', 'dirk', 'explicit-euler', 'rk-2']
+schemes = ['implicit-euler', 'bdf2', 'crank-nicolson', 'dirk']
scheme_errors = {}
# Generate list of dts
dt = 1.0
dts = []
-for i in range(0,10):
+for i in range(0,5):
dts.append(dt)
dt = dt / 2.0
|
3fed93e1c0c12dd98a1be7e024a4c637c5751549
|
src/sentry/tasks/base.py
|
src/sentry/tasks/base.py
|
"""
sentry.tasks.base
~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from celery.task import task
from django_statsd.clients import statsd
from functools import wraps
def instrumented_task(name, queue, stat_suffix=None, **kwargs):
statsd_key = 'tasks.{name}'.format(name=name)
if stat_suffix:
statsd_key += '.{key}'.format(key=stat_suffix)
def wrapped(func):
@wraps(func)
def _wrapped(*args, **kwargs):
with statsd.timer(statsd_key):
return func(*args, **kwargs)
return task(name=name, queue=queue, **kwargs)(func)
return wrapped
|
"""
sentry.tasks.base
~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from celery.task import task
from django_statsd.clients import statsd
from functools import wraps
def instrumented_task(name, queue, stat_suffix=None, **kwargs):
statsd_key = 'jobs.duration.{name}'.format(name=name)
if stat_suffix:
statsd_key += '.{key}'.format(key=stat_suffix)
def wrapped(func):
@wraps(func)
def _wrapped(*args, **kwargs):
with statsd.timer(statsd_key):
return func(*args, **kwargs)
return task(name=name, queue=queue, **kwargs)(func)
return wrapped
|
Change tasks key prefix to jobs.duration
|
Change tasks key prefix to jobs.duration
|
Python
|
bsd-3-clause
|
gencer/sentry,korealerts1/sentry,JTCunning/sentry,fuziontech/sentry,wujuguang/sentry,jean/sentry,mvaled/sentry,jean/sentry,BuildingLink/sentry,zenefits/sentry,drcapulet/sentry,ewdurbin/sentry,TedaLIEz/sentry,fotinakis/sentry,nicholasserra/sentry,looker/sentry,gencer/sentry,BayanGroup/sentry,BuildingLink/sentry,mvaled/sentry,boneyao/sentry,ewdurbin/sentry,hongliang5623/sentry,1tush/sentry,Natim/sentry,hongliang5623/sentry,TedaLIEz/sentry,pauloschilling/sentry,llonchj/sentry,felixbuenemann/sentry,camilonova/sentry,looker/sentry,ifduyue/sentry,nicholasserra/sentry,kevinastone/sentry,wujuguang/sentry,jean/sentry,JTCunning/sentry,mvaled/sentry,Kryz/sentry,imankulov/sentry,JamesMura/sentry,kevinlondon/sentry,felixbuenemann/sentry,Natim/sentry,songyi199111/sentry,fuziontech/sentry,fotinakis/sentry,mvaled/sentry,gencer/sentry,zenefits/sentry,llonchj/sentry,JamesMura/sentry,ifduyue/sentry,vperron/sentry,boneyao/sentry,beeftornado/sentry,Kryz/sentry,gencer/sentry,alexm92/sentry,mvaled/sentry,wong2/sentry,JackDanger/sentry,JamesMura/sentry,zenefits/sentry,wong2/sentry,fotinakis/sentry,jokey2k/sentry,daevaorn/sentry,pauloschilling/sentry,camilonova/sentry,1tush/sentry,1tush/sentry,gencer/sentry,zenefits/sentry,JTCunning/sentry,kevinastone/sentry,BuildingLink/sentry,fotinakis/sentry,gg7/sentry,jean/sentry,ifduyue/sentry,daevaorn/sentry,ngonzalvez/sentry,hongliang5623/sentry,JackDanger/sentry,nicholasserra/sentry,JamesMura/sentry,mvaled/sentry,llonchj/sentry,JamesMura/sentry,beeftornado/sentry,daevaorn/sentry,wong2/sentry,mitsuhiko/sentry,boneyao/sentry,wujuguang/sentry,songyi199111/sentry,vperron/sentry,vperron/sentry,songyi199111/sentry,beeftornado/sentry,ifduyue/sentry,camilonova/sentry,argonemyth/sentry,mitsuhiko/sentry,ifduyue/sentry,kevinastone/sentry,felixbuenemann/sentry,korealerts1/sentry,gg7/sentry,zenefits/sentry,BuildingLink/sentry,Natim/sentry,imankulov/sentry,Kryz/sentry,fuziontech/sentry,kevinlondon/sentry,pauloschilling/sentry,JackDanger/sentry,jokey2k/sentry,looker/sentry,alexm92/sentry,ewdurbin/sentry,imankulov/sentry,BuildingLink/sentry,BayanGroup/sentry,korealerts1/sentry,gg7/sentry,BayanGroup/sentry,ngonzalvez/sentry,ngonzalvez/sentry,drcapulet/sentry,daevaorn/sentry,TedaLIEz/sentry,argonemyth/sentry,kevinlondon/sentry,alexm92/sentry,jean/sentry,looker/sentry,drcapulet/sentry,jokey2k/sentry,looker/sentry,argonemyth/sentry
|
---
+++
@@ -12,7 +12,7 @@
def instrumented_task(name, queue, stat_suffix=None, **kwargs):
- statsd_key = 'tasks.{name}'.format(name=name)
+ statsd_key = 'jobs.duration.{name}'.format(name=name)
if stat_suffix:
statsd_key += '.{key}'.format(key=stat_suffix)
|
0c6a27b483fdfaf04c0481151d2c3e282e4eca4f
|
opps/images/templatetags/images_tags.py
|
opps/images/templatetags/images_tags.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import template
from ..generate import image_url as url
register = template.Library()
@register.simple_tag
def image_url(image_url, **kwargs):
return url(image_url=image_url, **kwargs)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import template
from ..generate import image_url as url
register = template.Library()
@register.simple_tag
def image_url(image_url, **kwargs):
return url(image_url=image_url, **kwargs)
@register.simple_tag
def image_obj(image, **kwargs):
new = {}
new['flip'] = image.flip
new['flop'] = image.flop
if image.halign:
new['halign'] = image.halign
if image.valign:
new['valign'] = image.valign
new['fit_in'] = image.fit_in
new['smart'] = image.smart
kwargs = dict(new, **kwargs)
return url(image_url=image.image.url, **kwargs)
|
Create template tag image obj on images receive obj image
|
Create template tag image obj on images
receive obj image
|
Python
|
mit
|
jeanmask/opps,williamroot/opps,opps/opps,williamroot/opps,opps/opps,YACOWS/opps,YACOWS/opps,williamroot/opps,YACOWS/opps,YACOWS/opps,opps/opps,jeanmask/opps,williamroot/opps,jeanmask/opps,jeanmask/opps,opps/opps
|
---
+++
@@ -10,3 +10,19 @@
@register.simple_tag
def image_url(image_url, **kwargs):
return url(image_url=image_url, **kwargs)
+
+
+@register.simple_tag
+def image_obj(image, **kwargs):
+ new = {}
+ new['flip'] = image.flip
+ new['flop'] = image.flop
+ if image.halign:
+ new['halign'] = image.halign
+ if image.valign:
+ new['valign'] = image.valign
+ new['fit_in'] = image.fit_in
+ new['smart'] = image.smart
+
+ kwargs = dict(new, **kwargs)
+ return url(image_url=image.image.url, **kwargs)
|
27eea99e2ca78f7af3fb308f91d377e70c53e3c4
|
app.py
|
app.py
|
from flask import Flask, request, jsonify, send_from_directory
import os
import uuid
app = Flask(__name__)
UPLOAD_FOLDER = "uploads/"
@app.route("/")
def index():
return send_from_directory('static/', 'index.html')
@app.route("/<path:path>")
def serve_static_files(path):
return send_from_directory('static/', path)
@app.route("/sounds")
def get_sounds_list():
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
sounds = os.listdir(UPLOAD_FOLDER)
return jsonify({'sounds': sounds})
@app.route("/sounds/<path:path>")
def serve_static(path):
return send_from_directory(UPLOAD_FOLDER, path)
@app.route("/upload", methods=["POST"])
def upload_file():
file = request.files["sound"]
if file:
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
filename = uuid.uuid4().__str__()
file.save(os.path.join(UPLOAD_FOLDER, filename))
return filename + "\n"
if __name__ == "__main__":
app.run(host = "0.0.0.0", debug=True)
|
from flask import Flask, request, jsonify, send_from_directory
import os
import uuid
app = Flask(__name__)
UPLOAD_FOLDER = "uploads/"
@app.route("/")
def index():
return send_from_directory('static/', 'index.html')
@app.route("/<path:path>")
def serve_static_files(path):
return send_from_directory('static/', path)
@app.route("/sounds")
def get_sounds_list():
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
sounds = os.listdir(UPLOAD_FOLDER)
_sounds = []
for sound in sounds:
_sounds.append({'title': sound, 'filename': sound})
return jsonify({'sounds': _sounds})
@app.route("/sounds/<path:path>")
def serve_static(path):
return send_from_directory(UPLOAD_FOLDER, path)
@app.route("/upload", methods=["POST"])
def upload_file():
file = request.files["file"]
if file:
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
filename = uuid.uuid4().__str__()
file.save(os.path.join(UPLOAD_FOLDER, filename))
return filename + "\n"
if __name__ == "__main__":
app.run(host = "0.0.0.0", debug=True)
|
Update sounds list data view
|
Update sounds list data view
|
Python
|
mit
|
spb201/turbulent-octo-rutabaga-api,spb201/turbulent-octo-rutabaga-api,spb201/turbulent-octo-rutabaga-api
|
---
+++
@@ -17,7 +17,10 @@
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
sounds = os.listdir(UPLOAD_FOLDER)
- return jsonify({'sounds': sounds})
+ _sounds = []
+ for sound in sounds:
+ _sounds.append({'title': sound, 'filename': sound})
+ return jsonify({'sounds': _sounds})
@app.route("/sounds/<path:path>")
@@ -26,7 +29,8 @@
@app.route("/upload", methods=["POST"])
def upload_file():
- file = request.files["sound"]
+ file = request.files["file"]
+
if file:
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
|
fb9a16917ba8f26caa0d941b181fa083fcb7a2da
|
bot.py
|
bot.py
|
from discord.ext.commands import Bot, CommandInvokeError
from discord.ext.commands.errors import CommandNotFound, MissingRequiredArgument
class BeattieBot(Bot):
async def reply(self, ctx, message):
return await ctx.send(f'{ctx.message.author.mention}\n{message}')
async def handle_error(self, exception, ctx):
if isinstance(exception, MissingRequiredArgument):
await ctx.send('Missing required arguments.')
elif not isinstance(exception, CommandNotFound):
await ctx.send('Generic error handler triggered. '
'This should never happen.')
if isinstance(exception, CommandInvokeError):
exception = exception.original
raise exception
async def on_ready(self):
print('Logged in as')
print(self.user.name)
print(self.user.id)
print('------')
async def on_message(self, message):
msg = message.content.split(None, 1)
msg[0] = msg[0].lower()
message.content = ' '.join(msg)
await self.process_commands(message)
async def on_command_error(self, exception, ctx):
if ctx.command is None or not hasattr(ctx.command, 'on_error'):
await self.handle_error(exception, ctx)
|
from discord.ext.commands import Bot, CommandInvokeError
from discord.ext.commands.errors import CommandNotFound, MissingRequiredArgument
class BeattieBot(Bot):
async def reply(self, ctx, message):
return await ctx.send(f'{ctx.message.author.mention}\n{message}')
async def handle_error(self, exception, ctx):
if isinstance(exception, MissingRequiredArgument):
await ctx.send('Missing required arguments.')
elif not isinstance(exception, CommandNotFound):
await ctx.send('Generic error handler triggered. '
'This should never happen.')
try:
raise exception.original
except AttributeError:
raise exception
async def on_ready(self):
print('Logged in as')
print(self.user.name)
print(self.user.id)
print('------')
async def on_message(self, message):
msg = message.content.split(None, 1)
msg[0] = msg[0].lower()
message.content = ' '.join(msg)
await self.process_commands(message)
async def on_command_error(self, exception, ctx):
if ctx.command is None or not hasattr(ctx.command, 'on_error'):
await self.handle_error(exception, ctx)
|
Change isinstance check to duck typing because this is Python lol
|
Change isinstance check to duck typing because this is Python lol
|
Python
|
mit
|
BeatButton/beattie-bot,BeatButton/beattie
|
---
+++
@@ -11,9 +11,10 @@
elif not isinstance(exception, CommandNotFound):
await ctx.send('Generic error handler triggered. '
'This should never happen.')
- if isinstance(exception, CommandInvokeError):
- exception = exception.original
- raise exception
+ try:
+ raise exception.original
+ except AttributeError:
+ raise exception
async def on_ready(self):
print('Logged in as')
|
fdb56c41b5151e9fc4ce3eb997b2bb88d16594c1
|
pyalp/pyalp/contexts.py
|
pyalp/pyalp/contexts.py
|
#main/contexts.py
from django.core.urlresolvers import resolve
from cl_module.cl_module import ModuleManager
from flags.registry import get_flag_registry
from pyalp.skin import get_skin
def app_name(request):
return {'app_name': resolve(request.path).app_name}
def url_name(request):
return {'url_name': resolve(request.path).url_name}
def skin(request):
return {'skin': get_skin()}
def modules(request):
return {'modules': ModuleManager()}
def lan(request):
# TODO: have this load info from the db instead
lan = {'name': 'RFLAN'}
return {'lan': lan}
def flags(request):
return {'flags': get_flag_registry().get_statusdict()}
|
#main/contexts.py
from os.path import join
import json
from django.core.urlresolvers import resolve
from django.conf import settings
from cl_module.cl_module import ModuleManager
from flags.registry import get_flag_registry
from pyalp.skin import get_skin
def app_name(request):
return {'app_name': resolve(request.path).app_name}
def url_name(request):
return {'url_name': resolve(request.path).url_name}
def skin(request):
return {'skin': get_skin()}
def modules(request):
return {'modules': ModuleManager()}
def lan(request):
# TODO: have this load info from the db instead
path = join(settings.PROJECT_ROOT, 'pyalp', 'config.json')
with open(path) as fh:
contents = fh.readlines()
lan = json.loads('\n'.join(
line
for line in map(str.lstrip, contents)
if not line.startswith('//')
))
return {'lan': lan}
def flags(request):
return {'flags': get_flag_registry().get_statusdict()}
|
Load the lan config from a json file
|
Load the lan config from a json file
|
Python
|
mit
|
Mause/pyalp,Mause/pyalp,Mause/pyalp,Mause/pyalp
|
---
+++
@@ -1,5 +1,9 @@
#main/contexts.py
+from os.path import join
+import json
+
from django.core.urlresolvers import resolve
+from django.conf import settings
from cl_module.cl_module import ModuleManager
from flags.registry import get_flag_registry
@@ -24,7 +28,15 @@
def lan(request):
# TODO: have this load info from the db instead
- lan = {'name': 'RFLAN'}
+ path = join(settings.PROJECT_ROOT, 'pyalp', 'config.json')
+ with open(path) as fh:
+ contents = fh.readlines()
+
+ lan = json.loads('\n'.join(
+ line
+ for line in map(str.lstrip, contents)
+ if not line.startswith('//')
+ ))
return {'lan': lan}
|
9e7d3c35857600445cb6df42ba18d289dc0e37a9
|
wsgi.py
|
wsgi.py
|
from os import getenv
from webapp import create_app
from argparse import ArgumentParser
app = create_app(getenv('FLASK_CONFIG') or 'development')
def main():
parser = ArgumentParser()
parser.add_argument("-p", "--port", help="port number")
args = parser.parse_args()
port = int(args.port or None)
app.run(port=port)
if __name__ == "__main__":
main()
|
from os import getenv
from webapp import create_app
from argparse import ArgumentParser
app = create_app(getenv('FLASK_CONFIG') or 'development')
def main():
parser = ArgumentParser()
parser.add_argument("-p", "--port", help="port number")
args = parser.parse_args()
port = int(args.port or 5000)
app.run(port=port)
if __name__ == "__main__":
main()
|
Fix in port number initialisation
|
Fix in port number initialisation
|
Python
|
bsd-3-clause
|
aleksandergurin/news,aleksandergurin/news,aleksandergurin/news
|
---
+++
@@ -11,7 +11,7 @@
parser = ArgumentParser()
parser.add_argument("-p", "--port", help="port number")
args = parser.parse_args()
- port = int(args.port or None)
+ port = int(args.port or 5000)
app.run(port=port)
if __name__ == "__main__":
|
4a2bd50b6747eb00ddedd0d3e26f28cc43980b11
|
tools/test-commands.py
|
tools/test-commands.py
|
#!/usr/bin/python
import socket
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(("localhost", 5001))
s.send("imei:123456789012345,tracker,151030080103,,F,000101.000,A,5443.3834,N,02512.9071,E,0.00,0;")
while True:
print s.recv(1024)
s.close()
|
#!/usr/bin/python
import socket
import binascii
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(("localhost", 5001))
#s.send(binascii.unhexlify('68680f0504035889905831401700df1a00000d0a'))
s.send("imei:123456789012345,tracker,151030080103,,F,000101.000,A,5443.3834,N,02512.9071,E,0.00,0;")
while True:
print s.recv(1024)
s.close()
|
Extend script to test binary commands
|
Extend script to test binary commands
|
Python
|
apache-2.0
|
5of9/traccar,jssenyange/traccar,tananaev/traccar,duke2906/traccar,AnshulJain1985/Roadcast-Tracker,tsmgeek/traccar,stalien/traccar_test,renaudallard/traccar,orcoliver/traccar,AnshulJain1985/Roadcast-Tracker,tsmgeek/traccar,joseant/traccar-1,ninioe/traccar,tananaev/traccar,5of9/traccar,renaudallard/traccar,al3x1s/traccar,jon-stumpf/traccar,orcoliver/traccar,al3x1s/traccar,tananaev/traccar,vipien/traccar,vipien/traccar,jssenyange/traccar,ninioe/traccar,duke2906/traccar,jon-stumpf/traccar,stalien/traccar_test,jon-stumpf/traccar,ninioe/traccar,orcoliver/traccar,joseant/traccar-1,jssenyange/traccar,tsmgeek/traccar
|
---
+++
@@ -1,9 +1,11 @@
#!/usr/bin/python
import socket
+import binascii
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(("localhost", 5001))
+#s.send(binascii.unhexlify('68680f0504035889905831401700df1a00000d0a'))
s.send("imei:123456789012345,tracker,151030080103,,F,000101.000,A,5443.3834,N,02512.9071,E,0.00,0;")
while True:
|
841da00ffa000acec3e287b8b2af91147271b728
|
cupy/array_api/_typing.py
|
cupy/array_api/_typing.py
|
"""
This file defines the types for type annotations.
These names aren't part of the module namespace, but they are used in the
annotations in the function signatures. The functions in the module are only
valid for inputs that match the given type annotations.
"""
from cupy.cuda import Device as _Device
__all__ = [
"Array",
"Device",
"Dtype",
"SupportsDLPack",
"SupportsBufferProtocol",
"PyCapsule",
]
from typing import Any, Literal, Sequence, Type, Union
from . import (
Array,
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
)
# This should really be recursive, but that isn't supported yet. See the
# similar comment in numpy/typing/_array_like.py
NestedSequence = Sequence[Sequence[Any]]
Device = _Device
Dtype = Type[
Union[[int8, int16, int32, int64, uint8, uint16, uint32, uint64, float32, float64]]
]
SupportsDLPack = Any
SupportsBufferProtocol = Any
PyCapsule = Any
|
"""
This file defines the types for type annotations.
These names aren't part of the module namespace, but they are used in the
annotations in the function signatures. The functions in the module are only
valid for inputs that match the given type annotations.
"""
from cupy.cuda import Device as _Device
__all__ = [
"Array",
"Device",
"Dtype",
"SupportsDLPack",
"SupportsBufferProtocol",
"PyCapsule",
]
import sys
from typing import Any, Literal, Sequence, Type, Union, TYPE_CHECKING
from . import Array
from numpy import (
dtype,
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
)
# This should really be recursive, but that isn't supported yet. See the
# similar comment in numpy/typing/_array_like.py
NestedSequence = Sequence[Sequence[Any]]
Device = _Device
if TYPE_CHECKING or sys.version_info >= (3, 9):
Dtype = dtype[Union[
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
]]
else:
Dtype = dtype
SupportsDLPack = Any
SupportsBufferProtocol = Any
PyCapsule = Any
|
Fix invalid parameter types used in `Dtype`
|
MAINT: Fix invalid parameter types used in `Dtype`
|
Python
|
mit
|
cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy
|
---
+++
@@ -18,10 +18,12 @@
"PyCapsule",
]
-from typing import Any, Literal, Sequence, Type, Union
+import sys
+from typing import Any, Literal, Sequence, Type, Union, TYPE_CHECKING
-from . import (
- Array,
+from . import Array
+from numpy import (
+ dtype,
int8,
int16,
int32,
@@ -39,9 +41,22 @@
NestedSequence = Sequence[Sequence[Any]]
Device = _Device
-Dtype = Type[
- Union[[int8, int16, int32, int64, uint8, uint16, uint32, uint64, float32, float64]]
-]
+if TYPE_CHECKING or sys.version_info >= (3, 9):
+ Dtype = dtype[Union[
+ int8,
+ int16,
+ int32,
+ int64,
+ uint8,
+ uint16,
+ uint32,
+ uint64,
+ float32,
+ float64,
+ ]]
+else:
+ Dtype = dtype
+
SupportsDLPack = Any
SupportsBufferProtocol = Any
PyCapsule = Any
|
7ef1d2fc9a99f0ac190206d5cb721dced0de5ad4
|
speedchart.py
|
speedchart.py
|
from flask import Flask, render_template
from parser import Parser
import json
app = Flask(__name__)
@app.route("/")
def index():
parser = Parser()
data = parser.parse_all()
print json.dumps(data)
return render_template("index.html", data=data)
if __name__ == "__main__":
app.run(debug=True)
|
from flask import Flask, render_template
from parser import Parser
import json
app = Flask(__name__)
@app.route("/")
def index():
parser = Parser()
data = parser.parse_all()
return render_template("index.html", data=data)
if __name__ == "__main__":
app.run(debug=True)
|
Remove unnecessary printing of JSON in log
|
Remove unnecessary printing of JSON in log
|
Python
|
mit
|
ruralocity/speedchart,ruralocity/speedchart
|
---
+++
@@ -7,7 +7,6 @@
def index():
parser = Parser()
data = parser.parse_all()
- print json.dumps(data)
return render_template("index.html", data=data)
if __name__ == "__main__":
|
3120f41f34735e0ab6f526f0a144fb3682d43391
|
pymagicc/definitions/__init__.py
|
pymagicc/definitions/__init__.py
|
from os.path import dirname, join
import pandas as pd
_dtrm = pd.read_csv(join(dirname(__file__), "magicc_dattype_regionmode_regions.csv"))
region_cols = _dtrm.columns.to_series().apply(lambda x: x.startswith("Region"))
dattype_regionmode_regions = _dtrm.loc[:, ~region_cols].copy()
dattype_regionmode_regions["Regions"] = [
[r for r in raw if not pd.isnull(r)]
for raw in _dtrm.loc[:, region_cols].values.tolist()
]
emissions_units = {}
concentrations_units = {}
|
from os.path import dirname, join
import pandas as pd
_dtrm = pd.read_csv(join(dirname(__file__), "magicc_dattype_regionmode_regions.csv"))
region_cols = _dtrm.columns.to_series().apply(lambda x: x.startswith("Region"))
dattype_regionmode_regions = _dtrm.loc[:, ~region_cols].copy()
dattype_regionmode_regions["Regions"] = [
[r for r in raw if not pd.isnull(r)]
for raw in _dtrm.loc[:, region_cols].values.tolist()
]
# TODO: do read ins for these too
emissions_units = {}
concentrations_units = {}
|
Update TODO for other definitions
|
Update TODO for other definitions
|
Python
|
agpl-3.0
|
openclimatedata/pymagicc,openclimatedata/pymagicc
|
---
+++
@@ -12,5 +12,6 @@
for raw in _dtrm.loc[:, region_cols].values.tolist()
]
+# TODO: do read ins for these too
emissions_units = {}
concentrations_units = {}
|
f048d62b7831f0364093025c6bf9e3458d1a7b11
|
projects/urls.py
|
projects/urls.py
|
from django.conf.urls import patterns, url
urlpatterns = patterns('projects.views',
url(r'^add/$', 'add_project', name='add_project'),
url(r'^edit/(?P<project_id>.*)/$', 'edit_project', name='edit_project'),
url(r'^status/(?P<project_id>.*)/$', 'edit_status', name='edit_status'),
url(r'^archive/$', 'projects_archive', name='projects_archive'),
)
|
from django.conf.urls import patterns, url
urlpatterns = patterns('projects.views',
url(r'^add/$', 'add_project', name='add_project'),
url(r'^edit/(?P<project_id>\d+)/$', 'edit_project', name='edit_project'),
url(r'^status/(?P<project_id>\d+)/$', 'edit_status', name='edit_status'),
url(r'^archive/', 'projects_archive', name='projects_archive'),
url(r'^archive/review/(?P<project_id>\d+)/$', 'show_project', name='show-project'),
)
|
Add url corresponding to the added view
|
Add url corresponding to the added view
|
Python
|
mit
|
Hackfmi/Diaphanum,Hackfmi/Diaphanum
|
---
+++
@@ -3,7 +3,8 @@
urlpatterns = patterns('projects.views',
url(r'^add/$', 'add_project', name='add_project'),
- url(r'^edit/(?P<project_id>.*)/$', 'edit_project', name='edit_project'),
- url(r'^status/(?P<project_id>.*)/$', 'edit_status', name='edit_status'),
- url(r'^archive/$', 'projects_archive', name='projects_archive'),
+ url(r'^edit/(?P<project_id>\d+)/$', 'edit_project', name='edit_project'),
+ url(r'^status/(?P<project_id>\d+)/$', 'edit_status', name='edit_status'),
+ url(r'^archive/', 'projects_archive', name='projects_archive'),
+ url(r'^archive/review/(?P<project_id>\d+)/$', 'show_project', name='show-project'),
)
|
0c1ecf09d892e15ae02a92a1643e7cdb4ae95069
|
unit_tests/test_ccs.py
|
unit_tests/test_ccs.py
|
#!/usr/bin/env python3
import pytest
import sys
# This line allows the tests to run if you just naively run this script.
# But the preferred way is to use run_tests.sh
sys.path.insert(0,'../MultiQC')
from multiqc.modules.ccs.ccs import parse_PacBio_log, parse_line
PARSABLE_LINES = [
'',
'ZMWs input :',
'ZMWs input (A) :',
'ZMWs input : 93',
'ZMWs input (A) : 93',
'Coefficient of correlation : 28.78%'
]
PARSED_RESULTS = [
{},
{
'name':'ZMWs input'
},
{
'name':'ZMWs input',
'annotation':'A'
},
{
'name':'ZMWs input',
'count': 93
},
{
'name':'ZMWs input',
'annotation':'A',
'count': 93
},
{
'name': 'Coefficient of correlation',
'percentage': 28.78
}
]
MARK = zip(PARSABLE_LINES, PARSED_RESULTS)
@pytest.mark.parametrize(['line', 'data'], MARK)
def test_parsable_lines(line, data):
parsed_line = parse_line(line)
assert parsed_line == data
|
#!/usr/bin/env python3
import pytest
import sys
# This line allows the tests to run if you just naively run this script.
# But the preferred way is to use run_tests.sh
sys.path.insert(0,'../MultiQC')
from multiqc.modules.ccs.ccs import parse_PacBio_log, parse_line
PARSABLE_LINES = [
'',
'ZMWs input :',
'ZMWs input (A) :',
'ZMWs input : 93',
'ZMWs input (A) : 93',
'Coefficient of correlation : 28.78%',
'ZMWs generating CCS (B) : 44 (47.31%)',
'Coefficient of correlation (A) : 28.78%',
]
PARSED_RESULTS = [
{},
{
'name':'ZMWs input'
},
{
'name':'ZMWs input',
'annotation':'A'
},
{
'name':'ZMWs input',
'count': 93
},
{
'name':'ZMWs input',
'annotation':'A',
'count': 93
},
{
'name': 'Coefficient of correlation',
'percentage': 28.78
},
{
'name': 'ZMWs generating CCS',
'annotation': 'B',
'count': 44,
'percentage': 47.31
},
{
'name': 'Coefficient of correlation',
'percentage': 28.78,
'annotation': 'A'
}
]
MARK = zip(PARSABLE_LINES, PARSED_RESULTS)
@pytest.mark.parametrize(['line', 'data'], MARK)
def test_parsable_lines(line, data):
parsed_line = parse_line(line)
assert parsed_line == data
|
Add tests for lines with both a count and percentage
|
Add tests for lines with both a count and percentage
|
Python
|
mit
|
ewels/MultiQC_TestData,ewels/MultiQC_TestData,ewels/MultiQC_TestData,ewels/MultiQC_TestData,ewels/MultiQC_TestData
|
---
+++
@@ -15,7 +15,9 @@
'ZMWs input (A) :',
'ZMWs input : 93',
'ZMWs input (A) : 93',
- 'Coefficient of correlation : 28.78%'
+ 'Coefficient of correlation : 28.78%',
+ 'ZMWs generating CCS (B) : 44 (47.31%)',
+ 'Coefficient of correlation (A) : 28.78%',
]
PARSED_RESULTS = [
@@ -39,6 +41,17 @@
{
'name': 'Coefficient of correlation',
'percentage': 28.78
+ },
+ {
+ 'name': 'ZMWs generating CCS',
+ 'annotation': 'B',
+ 'count': 44,
+ 'percentage': 47.31
+ },
+ {
+ 'name': 'Coefficient of correlation',
+ 'percentage': 28.78,
+ 'annotation': 'A'
}
]
|
9d5c534339c417842428d2a4dcca6c1745fb9770
|
test/test_integration.py
|
test/test_integration.py
|
import unittest
import http.client
class TestStringMethods(unittest.TestCase):
def test_404NoConfig(self):
connRouter = http.client.HTTPConnection("localhost", 8666)
connRouter.request("GET", "/google")
response = connRouter.getresponse()
connRouter.close()
self.assertEqual(response.status, 404)
def test_200NoConfig(self):
connRouter = http.client.HTTPConnection("localhost", 8666)
connConfig = http.client.HTTPConnection("localhost", 8888)
connConfig.request("GET","/configure?location=/google&upstream=http://www.google.com&ttl=10")
response = connConfig.getresponse()
print("Body:", response.read().decode("utf-8"),"\n")
#self.assertEqual(response.status, 200)
connRouter.request("GET", "/google")
response = connRouter.getresponse()
#self.assertEqual(response.status, 200)
connRouter.close()
connConfig.close()
if __name__ == '__main__':
unittest.main()
|
import unittest
import http.client
class TestStringMethods(unittest.TestCase):
def test_404NoConfig(self):
connRouter = http.client.HTTPConnection("localhost", 8666)
connRouter.request("GET", "/google")
response = connRouter.getresponse()
connRouter.close()
self.assertEqual(response.status, 404)
def test_200NoConfig(self):
connRouter = http.client.HTTPConnection("localhost", 8666)
connConfig = http.client.HTTPConnection("localhost", 8888)
params = {
'location': '/google',
'upstream': 'ttp://www.google.com/',
'ttl': '10'
}
connConfig.request("GET","/configure",params=params)
response = connConfig.getresponse()
print("Body:", response.read().decode("utf-8"),"\n")
#self.assertEqual(response.status, 200)
connRouter.request("GET", "/google")
response = connRouter.getresponse()
#self.assertEqual(response.status, 200)
connRouter.close()
connConfig.close()
if __name__ == '__main__':
unittest.main()
|
Use query parameters through the python library
|
Use query parameters through the python library
|
Python
|
apache-2.0
|
dhiaayachi/dynx,dhiaayachi/dynx
|
---
+++
@@ -13,7 +13,12 @@
def test_200NoConfig(self):
connRouter = http.client.HTTPConnection("localhost", 8666)
connConfig = http.client.HTTPConnection("localhost", 8888)
- connConfig.request("GET","/configure?location=/google&upstream=http://www.google.com&ttl=10")
+ params = {
+ 'location': '/google',
+ 'upstream': 'ttp://www.google.com/',
+ 'ttl': '10'
+ }
+ connConfig.request("GET","/configure",params=params)
response = connConfig.getresponse()
print("Body:", response.read().decode("utf-8"),"\n")
#self.assertEqual(response.status, 200)
|
7e4a8698532a79ec6338961e91e71c54c155f02a
|
demo/apps/catalogue/migrations/0011_auto_20160616_1335.py
|
demo/apps/catalogue/migrations/0011_auto_20160616_1335.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('wagtailimages', '0013_make_rendition_upload_callable'),
('catalogue', '0010_auto_20160616_1048'),
]
operations = [
migrations.AddField(
model_name='category',
name='description',
field=models.TextField(verbose_name='Description', blank=True),
),
migrations.AddField(
model_name='category',
name='image',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='wagtailimages.Image', null=True),
),
migrations.AddField(
model_name='category',
name='name',
field=models.CharField(max_length=255, verbose_name='Name', db_index=True),
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('wagtailimages', '0013_make_rendition_upload_callable'),
('catalogue', '0010_auto_20160616_1048'),
]
operations = [
migrations.AddField(
model_name='category',
name='description',
field=models.TextField(verbose_name='Description', blank=True),
),
migrations.AddField(
model_name='category',
name='image',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='wagtailimages.Image', null=True),
)
]
|
Remove name field. It already exists
|
Remove name field. It already exists
|
Python
|
mit
|
pgovers/oscar-wagtail-demo,pgovers/oscar-wagtail-demo
|
---
+++
@@ -22,10 +22,5 @@
model_name='category',
name='image',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='wagtailimages.Image', null=True),
- ),
- migrations.AddField(
- model_name='category',
- name='name',
- field=models.CharField(max_length=255, verbose_name='Name', db_index=True),
- ),
+ )
]
|
18998011bb52616a3002ca298a64ea61c5727a76
|
skeleton/website/jasyscript.py
|
skeleton/website/jasyscript.py
|
import konstrukteur.Konstrukteur
import jasy.asset.Manager2 as AssetManager
@task
def build(regenerate = False):
"""Generate source (development) version"""
# Initialize assets
AssetManager.AssetManager(profile, session)
# Build static website
konstrukteur.Konstrukteur.build(regenerate)
|
import konstrukteur.Konstrukteur
import jasy.asset.Manager2 as AssetManager
@task
def build(regenerate = False):
"""Generate source (development) version"""
# Initialize assets
assetManager = AssetManager.AssetManager(profile, session)
# Build static website
konstrukteur.Konstrukteur.build(regenerate)
# Copy assets to build path
assetManager.copyAssets()
|
Copy used assets to output path
|
Copy used assets to output path
|
Python
|
mit
|
fastner/konstrukteur,fastner/konstrukteur,fastner/konstrukteur
|
---
+++
@@ -6,7 +6,10 @@
"""Generate source (development) version"""
# Initialize assets
- AssetManager.AssetManager(profile, session)
+ assetManager = AssetManager.AssetManager(profile, session)
# Build static website
konstrukteur.Konstrukteur.build(regenerate)
+
+ # Copy assets to build path
+ assetManager.copyAssets()
|
fe442d84140b0a588c6a8490b58a10995df58f17
|
tests/optimizers/test_constant_optimizer.py
|
tests/optimizers/test_constant_optimizer.py
|
"""Test suite for optimizers.constant."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import ast
import pytest
from pycc.asttools import parse
from pycc.optimizers import constant
source = """
ONE = 1
TWO = 2
THREE = ONE + TWO
FOUR = THREE + ONE
FIVE = THREE + TWO
def return_const():
return FOUR
def return_var():
return FIVE
FIVE = FIVE + ONE
FIVE -= ONE
"""
@pytest.fixture
def node():
"""Get as AST node from the source."""
return parse.parse(source)
def test_constant_inliner(node):
"""Test that constant values are inlined."""
constant.ConstantOptimizer()(node)
# Check assignment values using constants.
assert node.body[2].value.n == 3
assert node.body[3].value.n == 4
assert node.body[4].value.n == 5
# Check return val of const function.
assert isinstance(node.body[5].body[0].value, ast.Num)
assert node.body[5].body[0].value.n == 4
# Check return val of var function.
assert isinstance(node.body[6].body[0].value, ast.Name)
assert node.body[6].body[0].value.id == 'FIVE'
|
"""Test suite for optimizers.constant."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import ast
import pytest
from pycc.asttools import parse
from pycc.optimizers import constant
source = """
ONE = 1
TWO = 2
THREE = ONE + TWO
FOUR = THREE + ONE
FIVE = THREE + TWO
def return_const():
return FOUR
def return_var():
return FIVE
FIVE = FIVE + ONE
FIVE -= ONE
"""
@pytest.fixture
def node():
"""Get as AST node from the source."""
return parse.parse(source)
def test_constant_inliner(node):
"""Test that constant values are inlined."""
constant.optimize(node)
# Check assignment values using constants.
assert node.body[2].value.n == 3
assert node.body[3].value.n == 4
assert node.body[4].value.n == 5
# Check return val of const function.
assert isinstance(node.body[5].body[0].value, ast.Num)
assert node.body[5].body[0].value.n == 4
# Check return val of var function.
assert isinstance(node.body[6].body[0].value, ast.Name)
assert node.body[6].body[0].value.id == 'FIVE'
|
Fix test to use new optimizer interface
|
Fix test to use new optimizer interface
Signed-off-by: Kevin Conway <3473c1f185ca03eadc40ad288d84425b54fd7d57@gmail.com>
|
Python
|
apache-2.0
|
kevinconway/pycc,kevinconway/pycc
|
---
+++
@@ -38,7 +38,7 @@
def test_constant_inliner(node):
"""Test that constant values are inlined."""
- constant.ConstantOptimizer()(node)
+ constant.optimize(node)
# Check assignment values using constants.
assert node.body[2].value.n == 3
|
b2396e90d9da252766979c154e6f98707dda6e0c
|
python/helpers/profiler/_prof_imports.py
|
python/helpers/profiler/_prof_imports.py
|
import sys
IS_PY3K = False
try:
if sys.version_info[0] >= 3:
IS_PY3K = True
except AttributeError:
pass #Not all versions have sys.version_info
if IS_PY3K:
# noinspection PyUnresolvedReferences
from thriftpy3 import TSerialization
# noinspection PyUnresolvedReferences
from thriftpy3.protocol import TJSONProtocol, TBinaryProtocol
# noinspection PyUnresolvedReferences
from profilerpy3.ttypes import ProfilerRequest, ProfilerResponse, Stats, FuncStat, Function
else:
# noinspection PyUnresolvedReferences
from thrift import TSerialization
# noinspection PyUnresolvedReferences
from thrift.protocol import TJSONProtocol, TBinaryProtocol
# noinspection PyUnresolvedReferences
from profiler.ttypes import ProfilerRequest, ProfilerResponse, Stats, FuncStat, Function
|
import sys
IS_PY3K = False
try:
if sys.version_info[0] >= 3:
IS_PY3K = True
except AttributeError:
pass #Not all versions have sys.version_info
if IS_PY3K:
# noinspection PyUnresolvedReferences
from thriftpy3 import TSerialization
# noinspection PyUnresolvedReferences
from thriftpy3.protocol import TBinaryProtocol
# noinspection PyUnresolvedReferences
from profilerpy3.ttypes import ProfilerRequest, ProfilerResponse, Stats, FuncStat, Function
else:
# noinspection PyUnresolvedReferences
from thrift import TSerialization
# noinspection PyUnresolvedReferences
from thrift.protocol import TBinaryProtocol
# noinspection PyUnresolvedReferences
from profiler.ttypes import ProfilerRequest, ProfilerResponse, Stats, FuncStat, Function
|
Remove JSON serialization usages (PY-16388, PY-16389)
|
Remove JSON serialization usages (PY-16388, PY-16389)
|
Python
|
apache-2.0
|
orekyuu/intellij-community,MER-GROUP/intellij-community,semonte/intellij-community,da1z/intellij-community,slisson/intellij-community,xfournet/intellij-community,youdonghai/intellij-community,wreckJ/intellij-community,blademainer/intellij-community,robovm/robovm-studio,lucafavatella/intellij-community,salguarnieri/intellij-community,pwoodworth/intellij-community,Distrotech/intellij-community,ivan-fedorov/intellij-community,da1z/intellij-community,MER-GROUP/intellij-community,da1z/intellij-community,Distrotech/intellij-community,supersven/intellij-community,dslomov/intellij-community,holmes/intellij-community,jagguli/intellij-community,kdwink/intellij-community,izonder/intellij-community,retomerz/intellij-community,SerCeMan/intellij-community,signed/intellij-community,muntasirsyed/intellij-community,pwoodworth/intellij-community,clumsy/intellij-community,supersven/intellij-community,orekyuu/intellij-community,hurricup/intellij-community,alphafoobar/intellij-community,ahb0327/intellij-community,samthor/intellij-community,clumsy/intellij-community,salguarnieri/intellij-community,lucafavatella/intellij-community,pwoodworth/intellij-community,MichaelNedzelsky/intellij-community,idea4bsd/idea4bsd,adedayo/intellij-community,youdonghai/intellij-community,clumsy/intellij-community,ftomassetti/intellij-community,idea4bsd/idea4bsd,izonder/intellij-community,semonte/intellij-community,retomerz/intellij-community,ryano144/intellij-community,signed/intellij-community,pwoodworth/intellij-community,MichaelNedzelsky/intellij-community,ahb0327/intellij-community,michaelgallacher/intellij-community,izonder/intellij-community,TangHao1987/intellij-community,amith01994/intellij-community,fnouama/intellij-community,fitermay/intellij-community,fengbaicanhe/intellij-community,blademainer/intellij-community,retomerz/intellij-community,SerCeMan/intellij-community,asedunov/intellij-community,fitermay/intellij-community,dslomov/intellij-community,dslomov/intellij-community,orekyuu/intellij-community,izonder/intellij-community,ibinti/intellij-community,ibinti/intellij-community,allotria/intellij-community,vladmm/intellij-community,michaelgallacher/intellij-community,retomerz/intellij-community,allotria/intellij-community,xfournet/intellij-community,slisson/intellij-community,wreckJ/intellij-community,xfournet/intellij-community,robovm/robovm-studio,signed/intellij-community,ftomassetti/intellij-community,clumsy/intellij-community,orekyuu/intellij-community,jagguli/intellij-community,ahb0327/intellij-community,asedunov/intellij-community,samthor/intellij-community,tmpgit/intellij-community,SerCeMan/intellij-community,MER-GROUP/intellij-community,slisson/intellij-community,suncycheng/intellij-community,apixandru/intellij-community,ivan-fedorov/intellij-community,lucafavatella/intellij-community,nicolargo/intellij-community,youdonghai/intellij-community,caot/intellij-community,youdonghai/intellij-community,MichaelNedzelsky/intellij-community,adedayo/intellij-community,asedunov/intellij-community,nicolargo/intellij-community,fengbaicanhe/intellij-community,slisson/intellij-community,asedunov/intellij-community,suncycheng/intellij-community,jagguli/intellij-community,lucafavatella/intellij-community,wreckJ/intellij-community,signed/intellij-community,michaelgallacher/intellij-community,amith01994/intellij-community,supersven/intellij-community,MER-GROUP/intellij-community,hurricup/intellij-community,hurricup/intellij-community,michaelgallacher/intellij-community,nicolargo/intellij-community,vladmm/intellij-community,ryano144/intellij-community,supersven/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,alphafoobar/intellij-community,hurricup/intellij-community,TangHao1987/intellij-community,salguarnieri/intellij-community,ahb0327/intellij-community,kdwink/intellij-community,semonte/intellij-community,fnouama/intellij-community,alphafoobar/intellij-community,ryano144/intellij-community,idea4bsd/idea4bsd,fitermay/intellij-community,robovm/robovm-studio,orekyuu/intellij-community,clumsy/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,izonder/intellij-community,TangHao1987/intellij-community,MER-GROUP/intellij-community,lucafavatella/intellij-community,ibinti/intellij-community,orekyuu/intellij-community,pwoodworth/intellij-community,ibinti/intellij-community,vladmm/intellij-community,ibinti/intellij-community,ryano144/intellij-community,xfournet/intellij-community,TangHao1987/intellij-community,salguarnieri/intellij-community,ftomassetti/intellij-community,vladmm/intellij-community,orekyuu/intellij-community,adedayo/intellij-community,SerCeMan/intellij-community,dslomov/intellij-community,youdonghai/intellij-community,ivan-fedorov/intellij-community,ThiagoGarciaAlves/intellij-community,youdonghai/intellij-community,fitermay/intellij-community,fitermay/intellij-community,alphafoobar/intellij-community,alphafoobar/intellij-community,alphafoobar/intellij-community,tmpgit/intellij-community,ThiagoGarciaAlves/intellij-community,ahb0327/intellij-community,izonder/intellij-community,asedunov/intellij-community,ahb0327/intellij-community,vvv1559/intellij-community,robovm/robovm-studio,robovm/robovm-studio,mglukhikh/intellij-community,samthor/intellij-community,youdonghai/intellij-community,vladmm/intellij-community,ThiagoGarciaAlves/intellij-community,tmpgit/intellij-community,kool79/intellij-community,amith01994/intellij-community,FHannes/intellij-community,adedayo/intellij-community,da1z/intellij-community,tmpgit/intellij-community,kool79/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,signed/intellij-community,tmpgit/intellij-community,MichaelNedzelsky/intellij-community,pwoodworth/intellij-community,fengbaicanhe/intellij-community,kdwink/intellij-community,kool79/intellij-community,supersven/intellij-community,MichaelNedzelsky/intellij-community,slisson/intellij-community,kool79/intellij-community,MichaelNedzelsky/intellij-community,holmes/intellij-community,ryano144/intellij-community,ftomassetti/intellij-community,blademainer/intellij-community,TangHao1987/intellij-community,fnouama/intellij-community,apixandru/intellij-community,asedunov/intellij-community,blademainer/intellij-community,fengbaicanhe/intellij-community,samthor/intellij-community,salguarnieri/intellij-community,jagguli/intellij-community,alphafoobar/intellij-community,MichaelNedzelsky/intellij-community,izonder/intellij-community,MER-GROUP/intellij-community,vladmm/intellij-community,ThiagoGarciaAlves/intellij-community,Distrotech/intellij-community,vladmm/intellij-community,slisson/intellij-community,FHannes/intellij-community,muntasirsyed/intellij-community,ryano144/intellij-community,fitermay/intellij-community,xfournet/intellij-community,adedayo/intellij-community,tmpgit/intellij-community,asedunov/intellij-community,fitermay/intellij-community,SerCeMan/intellij-community,nicolargo/intellij-community,salguarnieri/intellij-community,fnouama/intellij-community,robovm/robovm-studio,xfournet/intellij-community,muntasirsyed/intellij-community,vvv1559/intellij-community,nicolargo/intellij-community,retomerz/intellij-community,robovm/robovm-studio,SerCeMan/intellij-community,ahb0327/intellij-community,caot/intellij-community,signed/intellij-community,michaelgallacher/intellij-community,salguarnieri/intellij-community,slisson/intellij-community,apixandru/intellij-community,fnouama/intellij-community,kdwink/intellij-community,caot/intellij-community,hurricup/intellij-community,MichaelNedzelsky/intellij-community,suncycheng/intellij-community,apixandru/intellij-community,kdwink/intellij-community,hurricup/intellij-community,da1z/intellij-community,caot/intellij-community,idea4bsd/idea4bsd,dslomov/intellij-community,mglukhikh/intellij-community,hurricup/intellij-community,caot/intellij-community,fengbaicanhe/intellij-community,orekyuu/intellij-community,lucafavatella/intellij-community,ivan-fedorov/intellij-community,apixandru/intellij-community,clumsy/intellij-community,ivan-fedorov/intellij-community,alphafoobar/intellij-community,ryano144/intellij-community,kdwink/intellij-community,fengbaicanhe/intellij-community,asedunov/intellij-community,SerCeMan/intellij-community,ibinti/intellij-community,kool79/intellij-community,ivan-fedorov/intellij-community,kdwink/intellij-community,ryano144/intellij-community,salguarnieri/intellij-community,retomerz/intellij-community,fengbaicanhe/intellij-community,michaelgallacher/intellij-community,xfournet/intellij-community,apixandru/intellij-community,clumsy/intellij-community,supersven/intellij-community,slisson/intellij-community,mglukhikh/intellij-community,fitermay/intellij-community,apixandru/intellij-community,caot/intellij-community,amith01994/intellij-community,pwoodworth/intellij-community,wreckJ/intellij-community,ftomassetti/intellij-community,clumsy/intellij-community,holmes/intellij-community,ryano144/intellij-community,TangHao1987/intellij-community,ivan-fedorov/intellij-community,ibinti/intellij-community,alphafoobar/intellij-community,wreckJ/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,hurricup/intellij-community,mglukhikh/intellij-community,fitermay/intellij-community,supersven/intellij-community,dslomov/intellij-community,da1z/intellij-community,blademainer/intellij-community,supersven/intellij-community,ryano144/intellij-community,retomerz/intellij-community,vvv1559/intellij-community,caot/intellij-community,izonder/intellij-community,idea4bsd/idea4bsd,hurricup/intellij-community,hurricup/intellij-community,izonder/intellij-community,blademainer/intellij-community,slisson/intellij-community,ThiagoGarciaAlves/intellij-community,Distrotech/intellij-community,jagguli/intellij-community,asedunov/intellij-community,da1z/intellij-community,blademainer/intellij-community,Distrotech/intellij-community,da1z/intellij-community,SerCeMan/intellij-community,ibinti/intellij-community,holmes/intellij-community,supersven/intellij-community,blademainer/intellij-community,holmes/intellij-community,adedayo/intellij-community,clumsy/intellij-community,signed/intellij-community,youdonghai/intellij-community,MER-GROUP/intellij-community,apixandru/intellij-community,robovm/robovm-studio,muntasirsyed/intellij-community,pwoodworth/intellij-community,amith01994/intellij-community,amith01994/intellij-community,jagguli/intellij-community,retomerz/intellij-community,suncycheng/intellij-community,wreckJ/intellij-community,salguarnieri/intellij-community,clumsy/intellij-community,fnouama/intellij-community,kool79/intellij-community,semonte/intellij-community,jagguli/intellij-community,michaelgallacher/intellij-community,nicolargo/intellij-community,ibinti/intellij-community,ahb0327/intellij-community,michaelgallacher/intellij-community,pwoodworth/intellij-community,muntasirsyed/intellij-community,amith01994/intellij-community,lucafavatella/intellij-community,TangHao1987/intellij-community,semonte/intellij-community,caot/intellij-community,da1z/intellij-community,vvv1559/intellij-community,ahb0327/intellij-community,semonte/intellij-community,wreckJ/intellij-community,kool79/intellij-community,FHannes/intellij-community,samthor/intellij-community,dslomov/intellij-community,ThiagoGarciaAlves/intellij-community,vladmm/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,vladmm/intellij-community,tmpgit/intellij-community,asedunov/intellij-community,adedayo/intellij-community,hurricup/intellij-community,dslomov/intellij-community,suncycheng/intellij-community,suncycheng/intellij-community,samthor/intellij-community,izonder/intellij-community,clumsy/intellij-community,wreckJ/intellij-community,youdonghai/intellij-community,vvv1559/intellij-community,fengbaicanhe/intellij-community,da1z/intellij-community,ibinti/intellij-community,alphafoobar/intellij-community,signed/intellij-community,ryano144/intellij-community,semonte/intellij-community,youdonghai/intellij-community,samthor/intellij-community,youdonghai/intellij-community,adedayo/intellij-community,FHannes/intellij-community,jagguli/intellij-community,slisson/intellij-community,dslomov/intellij-community,caot/intellij-community,kdwink/intellij-community,TangHao1987/intellij-community,signed/intellij-community,xfournet/intellij-community,wreckJ/intellij-community,suncycheng/intellij-community,semonte/intellij-community,holmes/intellij-community,fengbaicanhe/intellij-community,amith01994/intellij-community,xfournet/intellij-community,adedayo/intellij-community,retomerz/intellij-community,vvv1559/intellij-community,kdwink/intellij-community,michaelgallacher/intellij-community,jagguli/intellij-community,robovm/robovm-studio,fitermay/intellij-community,nicolargo/intellij-community,holmes/intellij-community,Distrotech/intellij-community,ftomassetti/intellij-community,orekyuu/intellij-community,jagguli/intellij-community,izonder/intellij-community,ivan-fedorov/intellij-community,ibinti/intellij-community,hurricup/intellij-community,muntasirsyed/intellij-community,adedayo/intellij-community,blademainer/intellij-community,holmes/intellij-community,fitermay/intellij-community,fengbaicanhe/intellij-community,ftomassetti/intellij-community,Distrotech/intellij-community,supersven/intellij-community,robovm/robovm-studio,lucafavatella/intellij-community,amith01994/intellij-community,FHannes/intellij-community,samthor/intellij-community,ibinti/intellij-community,lucafavatella/intellij-community,MER-GROUP/intellij-community,apixandru/intellij-community,salguarnieri/intellij-community,ftomassetti/intellij-community,ThiagoGarciaAlves/intellij-community,alphafoobar/intellij-community,fnouama/intellij-community,alphafoobar/intellij-community,muntasirsyed/intellij-community,allotria/intellij-community,orekyuu/intellij-community,tmpgit/intellij-community,fnouama/intellij-community,blademainer/intellij-community,vladmm/intellij-community,lucafavatella/intellij-community,ivan-fedorov/intellij-community,Distrotech/intellij-community,supersven/intellij-community,salguarnieri/intellij-community,MER-GROUP/intellij-community,apixandru/intellij-community,nicolargo/intellij-community,blademainer/intellij-community,allotria/intellij-community,tmpgit/intellij-community,ivan-fedorov/intellij-community,caot/intellij-community,slisson/intellij-community,mglukhikh/intellij-community,TangHao1987/intellij-community,robovm/robovm-studio,dslomov/intellij-community,FHannes/intellij-community,robovm/robovm-studio,SerCeMan/intellij-community,dslomov/intellij-community,MichaelNedzelsky/intellij-community,da1z/intellij-community,samthor/intellij-community,ivan-fedorov/intellij-community,caot/intellij-community,ahb0327/intellij-community,youdonghai/intellij-community,slisson/intellij-community,fnouama/intellij-community,supersven/intellij-community,fengbaicanhe/intellij-community,orekyuu/intellij-community,fnouama/intellij-community,SerCeMan/intellij-community,vvv1559/intellij-community,retomerz/intellij-community,ibinti/intellij-community,Distrotech/intellij-community,ThiagoGarciaAlves/intellij-community,TangHao1987/intellij-community,muntasirsyed/intellij-community,da1z/intellij-community,FHannes/intellij-community,pwoodworth/intellij-community,allotria/intellij-community,youdonghai/intellij-community,wreckJ/intellij-community,wreckJ/intellij-community,blademainer/intellij-community,salguarnieri/intellij-community,ftomassetti/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,MichaelNedzelsky/intellij-community,holmes/intellij-community,allotria/intellij-community,nicolargo/intellij-community,kool79/intellij-community,kdwink/intellij-community,holmes/intellij-community,FHannes/intellij-community,kdwink/intellij-community,holmes/intellij-community,MichaelNedzelsky/intellij-community,vvv1559/intellij-community,jagguli/intellij-community,retomerz/intellij-community,michaelgallacher/intellij-community,MichaelNedzelsky/intellij-community,ryano144/intellij-community,kdwink/intellij-community,kool79/intellij-community,vvv1559/intellij-community,allotria/intellij-community,ahb0327/intellij-community,kool79/intellij-community,signed/intellij-community,muntasirsyed/intellij-community,jagguli/intellij-community,allotria/intellij-community,ftomassetti/intellij-community,MER-GROUP/intellij-community,lucafavatella/intellij-community,lucafavatella/intellij-community,xfournet/intellij-community,adedayo/intellij-community,orekyuu/intellij-community,idea4bsd/idea4bsd,SerCeMan/intellij-community,FHannes/intellij-community,idea4bsd/idea4bsd,SerCeMan/intellij-community,idea4bsd/idea4bsd,fengbaicanhe/intellij-community,amith01994/intellij-community,fnouama/intellij-community,allotria/intellij-community,semonte/intellij-community,samthor/intellij-community,suncycheng/intellij-community,semonte/intellij-community,retomerz/intellij-community,muntasirsyed/intellij-community,vladmm/intellij-community,caot/intellij-community,FHannes/intellij-community,pwoodworth/intellij-community,vvv1559/intellij-community,idea4bsd/idea4bsd,idea4bsd/idea4bsd,hurricup/intellij-community,fitermay/intellij-community,MER-GROUP/intellij-community,TangHao1987/intellij-community,kool79/intellij-community,fnouama/intellij-community,xfournet/intellij-community,tmpgit/intellij-community,apixandru/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,FHannes/intellij-community,idea4bsd/idea4bsd,Distrotech/intellij-community,retomerz/intellij-community,ahb0327/intellij-community,clumsy/intellij-community,mglukhikh/intellij-community,Distrotech/intellij-community,michaelgallacher/intellij-community,muntasirsyed/intellij-community,Distrotech/intellij-community,asedunov/intellij-community,dslomov/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,muntasirsyed/intellij-community,holmes/intellij-community,semonte/intellij-community,tmpgit/intellij-community,adedayo/intellij-community,nicolargo/intellij-community,allotria/intellij-community,lucafavatella/intellij-community,ivan-fedorov/intellij-community,wreckJ/intellij-community,kool79/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,fitermay/intellij-community,ftomassetti/intellij-community,suncycheng/intellij-community,FHannes/intellij-community,amith01994/intellij-community,FHannes/intellij-community,idea4bsd/idea4bsd,semonte/intellij-community,TangHao1987/intellij-community,tmpgit/intellij-community,izonder/intellij-community,idea4bsd/idea4bsd,samthor/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,vladmm/intellij-community,pwoodworth/intellij-community,xfournet/intellij-community,amith01994/intellij-community,nicolargo/intellij-community,samthor/intellij-community,asedunov/intellij-community,signed/intellij-community,MER-GROUP/intellij-community,da1z/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,signed/intellij-community,michaelgallacher/intellij-community,ftomassetti/intellij-community,nicolargo/intellij-community
|
---
+++
@@ -13,14 +13,14 @@
# noinspection PyUnresolvedReferences
from thriftpy3 import TSerialization
# noinspection PyUnresolvedReferences
- from thriftpy3.protocol import TJSONProtocol, TBinaryProtocol
+ from thriftpy3.protocol import TBinaryProtocol
# noinspection PyUnresolvedReferences
from profilerpy3.ttypes import ProfilerRequest, ProfilerResponse, Stats, FuncStat, Function
else:
# noinspection PyUnresolvedReferences
from thrift import TSerialization
# noinspection PyUnresolvedReferences
- from thrift.protocol import TJSONProtocol, TBinaryProtocol
+ from thrift.protocol import TBinaryProtocol
# noinspection PyUnresolvedReferences
from profiler.ttypes import ProfilerRequest, ProfilerResponse, Stats, FuncStat, Function
|
f300f3b31dcdefa91fa8fe46bdaab2d2490ac06a
|
snd/image_board/serializers.py
|
snd/image_board/serializers.py
|
from django.contrib.auth.models import User
from .models import ContentItem, Profile, Comment, Hashtag, ContentHashTag, Like
from rest_framework import serializers
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('id', 'url', 'username', 'email', 'last_name', 'first_name')
class ProfileSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Profile
fields = ('user', 'personal_info', 'job_title', 'department', 'location', 'expertise',
'phone_number', 'contact_skype', 'contact_facebook', 'contact_linkedin', 'user_photo')
class ContentItemSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = ContentItem
fields = ('id', 'upload_date', 'title', 'description', 'image', 'uploaded_by')
class HashtagSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Hashtag
fields = ('id', 'hashtag_text')
class ContentHashtagSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = ContentHashTag
fields = ('id', 'content_id', 'hashtag_id')
class LikeSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Like
fields = ('id', 'user_id', 'content_id')
class CommentSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Comment
fields = ('id', 'comment_text', 'publication_date', 'author', 'contentItem')
|
from django.contrib.auth.models import User
from .models import ContentItem, Profile, Comment, Hashtag, ContentHashTag, Like
from rest_framework import serializers
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('id', 'url', 'username', 'email', 'last_name', 'first_name')
class ProfileSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Profile
fields = ('id', 'url', 'user', 'personal_info', 'job_title', 'department', 'location', 'expertise',
'phone_number', 'contact_skype', 'contact_facebook', 'contact_linkedin', 'user_photo')
class ContentItemSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = ContentItem
fields = ('id', 'url', 'upload_date', 'title', 'description', 'image', 'uploaded_by')
class HashtagSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Hashtag
fields = ('id', 'url', 'hashtag_text')
class ContentHashtagSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = ContentHashTag
fields = ('id', 'url', 'content_id', 'hashtag_id')
class LikeSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Like
fields = ('id', 'url', 'user_id', 'content_id')
class CommentSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Comment
fields = ('id', 'url', 'comment_text', 'publication_date', 'author', 'contentItem')
|
Add URLs to each searializer
|
Add URLs to each searializer
|
Python
|
mit
|
SNDjango/server,SNDjango/server,SNDjango/server
|
---
+++
@@ -12,35 +12,35 @@
class ProfileSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Profile
- fields = ('user', 'personal_info', 'job_title', 'department', 'location', 'expertise',
+ fields = ('id', 'url', 'user', 'personal_info', 'job_title', 'department', 'location', 'expertise',
'phone_number', 'contact_skype', 'contact_facebook', 'contact_linkedin', 'user_photo')
class ContentItemSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = ContentItem
- fields = ('id', 'upload_date', 'title', 'description', 'image', 'uploaded_by')
+ fields = ('id', 'url', 'upload_date', 'title', 'description', 'image', 'uploaded_by')
class HashtagSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Hashtag
- fields = ('id', 'hashtag_text')
+ fields = ('id', 'url', 'hashtag_text')
class ContentHashtagSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = ContentHashTag
- fields = ('id', 'content_id', 'hashtag_id')
+ fields = ('id', 'url', 'content_id', 'hashtag_id')
class LikeSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Like
- fields = ('id', 'user_id', 'content_id')
+ fields = ('id', 'url', 'user_id', 'content_id')
class CommentSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Comment
- fields = ('id', 'comment_text', 'publication_date', 'author', 'contentItem')
+ fields = ('id', 'url', 'comment_text', 'publication_date', 'author', 'contentItem')
|
f68daf88cd7fb6cad64a72ef48af5b9b616ca4c6
|
StudentsListHandler.py
|
StudentsListHandler.py
|
__author__ = 'Mael Beuget, Pierre Monnin & Thibaut Smith'
from BaseHandler import *
import logging
from XMLAnalyser import XMLAnalyser
from google.appengine.api import memcache
class StudentsListHandler(BaseHandler):
def __init__(self, request=None, response=None):
self.initialize(request, response)
self.pageName = "students_list"
def get(self):
self.render("groupchoice.html")
def post(self):
group_to_find = self.request.get("group_name")
groups = memcache.get("group_list")
if groups is None:
logging.error("CACHE MISS StudentsListHandler l. 24")
parser = XMLAnalyser()
groups = parser.get_members()
memcache.set("group_list", groups, time=7200);
to_display = dict()
for key in groups:
if group_to_find in key:
to_display[key] = groups[key]
if len(to_display) > 0:
self.render("groupdisplay.html", group_name=group_to_find, groups=to_display)
else:
self.render("message.html", title="No such group", subtitle="", argument=group_to_find)
|
__author__ = 'Mael Beuget, Pierre Monnin & Thibaut Smith'
from BaseHandler import *
import logging
from XMLAnalyser import XMLAnalyser
from google.appengine.api import memcache
class StudentsListHandler(BaseHandler):
def __init__(self, request=None, response=None):
self.initialize(request, response)
self.pageName = "students_list"
def get(self):
self.render("groupchoice.html")
def post(self):
group_to_find = self.request.get("group_name")
groups = memcache.get("group_list")
if groups is None:
logging.error("CACHE MISS StudentsListHandler l. 24")
parser = XMLAnalyser()
groups = parser.get_members()
memcache.set("group_list", groups, time=604800);
to_display = dict()
for key in groups:
if group_to_find in key:
to_display[key] = groups[key]
if len(to_display) > 0:
self.render("groupdisplay.html", group_name=group_to_find, groups=to_display)
else:
self.render("message.html", title="No such group", subtitle="", argument=group_to_find)
|
Change memcache expiration timing to 1 week
|
Change memcache expiration timing to 1 week
|
Python
|
mit
|
Videl/absentees-blackboard,Videl/absentees-blackboard
|
---
+++
@@ -21,7 +21,7 @@
logging.error("CACHE MISS StudentsListHandler l. 24")
parser = XMLAnalyser()
groups = parser.get_members()
- memcache.set("group_list", groups, time=7200);
+ memcache.set("group_list", groups, time=604800);
to_display = dict()
for key in groups:
|
85db49b33f793a1ed5c66684996862fa8f1614b1
|
web/geosearch/tests/test_bag_dataset.py
|
web/geosearch/tests/test_bag_dataset.py
|
import unittest
from datapunt_geosearch import config
from datapunt_geosearch import datasource
class TestBAGDataset(unittest.TestCase):
def test_query(self):
x = 120993
y = 485919
ds = datasource.BagDataSource(dsn=config.DSN_BAG)
results = ds.query(x, y)
self.assertEqual(len(results['features']), 7)
self.assertIn('distance', results['features'][0]['properties'])
def test_query_wgs84(self):
x = 52.36011
y = 4.88798
ds = datasource.BagDataSource(dsn=config.DSN_BAG)
results = ds.query(x, y, rd=False)
self.assertEqual(len(results['features']), 6)
if __name__ == '__main__':
unittest.main()
|
import unittest
from datapunt_geosearch import config
from datapunt_geosearch import datasource
class TestBAGDataset(unittest.TestCase):
def test_query(self):
x = 120993
y = 485919
ds = datasource.BagDataSource(dsn=config.DSN_BAG)
results = ds.query(x, y)
self.assertEqual(len(results['features']), 6)
self.assertIn('distance', results['features'][0]['properties'])
def test_query_wgs84(self):
x = 52.36011
y = 4.88798
ds = datasource.BagDataSource(dsn=config.DSN_BAG)
results = ds.query(x, y, rd=False)
self.assertEqual(len(results['features']), 6)
if __name__ == '__main__':
unittest.main()
|
Remove WKPB from geosearch - also change other test
|
Remove WKPB from geosearch - also change other test
|
Python
|
mpl-2.0
|
DatapuntAmsterdam/datapunt_geosearch,DatapuntAmsterdam/datapunt_geosearch
|
---
+++
@@ -12,7 +12,7 @@
ds = datasource.BagDataSource(dsn=config.DSN_BAG)
results = ds.query(x, y)
- self.assertEqual(len(results['features']), 7)
+ self.assertEqual(len(results['features']), 6)
self.assertIn('distance', results['features'][0]['properties'])
def test_query_wgs84(self):
|
0df8a958b479e01d9c931bd4ca185c68720e14e6
|
analyser/api.py
|
analyser/api.py
|
import os
import json
import requests
import rethinkdb as r
from flask import Blueprint, current_app
from utils.decorators import validate, require
from utils.validators import validate_url
from krunchr.vendors.rethinkdb import db
from .parser import Parser
from .tasks import get_file
endpoint = Blueprint('analyse_url', __name__)
@endpoint.route('analyse/', methods=['POST'])
@require('url')
@validate({
'url': validate_url
})
def analyse_url(url):
name, ext = os.path.splitext(url)
parse = Parser(ext=ext[1:])
response = requests.get(url, stream=True)
fields = []
for chunk in response.iter_lines(1024):
fields = parse(chunk)
if fields:
break
task_id = get_file.delay(url, current_app.config['DISCO_FILES']).task_id
r.table('jobs').insert({
'url': url,
'task_id': task_id,
'state': 'starting'
}).run(db.conn)
return json.dumps(fields)
|
import os
import json
import requests
import rethinkdb as r
from flask import Blueprint, current_app
from utils.decorators import validate, require
from utils.validators import validate_url
from krunchr.vendors.rethinkdb import db
from .parser import Parser
from .tasks import get_file, push_data
endpoint = Blueprint('analyse_url', __name__)
@endpoint.route('analyse/', methods=['POST'])
@require('url')
@validate({
'url': validate_url
})
def analyse_url(url):
name, ext = os.path.splitext(url)
parse = Parser(ext=ext[1:])
response = requests.get(url, stream=True)
fields = []
for chunk in response.iter_lines(1024):
fields = parse(chunk)
if fields:
break
task_id = (get_file.s(url, current_app.config['DISCO_FILES']) |
push_data.s()).apply_async().task_id
r.table('jobs').insert({
'url': url,
'task_id': task_id,
'state': 'starting'
}).run(db.conn)
return json.dumps(fields)
|
Use a chord in order to start tasks
|
Use a chord in order to start tasks
|
Python
|
apache-2.0
|
vtemian/kruncher
|
---
+++
@@ -12,7 +12,7 @@
from krunchr.vendors.rethinkdb import db
from .parser import Parser
-from .tasks import get_file
+from .tasks import get_file, push_data
endpoint = Blueprint('analyse_url', __name__)
@@ -33,7 +33,8 @@
if fields:
break
- task_id = get_file.delay(url, current_app.config['DISCO_FILES']).task_id
+ task_id = (get_file.s(url, current_app.config['DISCO_FILES']) |
+ push_data.s()).apply_async().task_id
r.table('jobs').insert({
'url': url,
'task_id': task_id,
|
1fd2aef4fcaabddcb533ffe2f999e55d1e3ce7fe
|
docs/source/conf.py
|
docs/source/conf.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import subprocess
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if on_rtd:
subprocess.call('cd ..; doxygen', shell=True)
import sphinx_rtd_theme
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
def setup(app):
app.add_stylesheet("main_stylesheet.css")
extensions = ['breathe']
breathe_projects = { 'xtl': '../xml' }
templates_path = ['_templates']
html_static_path = ['_static']
source_suffix = '.rst'
master_doc = 'index'
project = 'xtl'
copyright = '2017, Johan Mabille and Sylvain Corlay'
author = 'Johan Mabille and Sylvain Corlay'
html_logo = 'quantstack-white.svg'
exclude_patterns = []
highlight_language = 'c++'
pygments_style = 'sphinx'
todo_include_todos = False
htmlhelp_basename = 'xtldoc'
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import subprocess
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if on_rtd:
subprocess.call('cd ..; doxygen', shell=True)
import sphinx_rtd_theme
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
def setup(app):
app.add_css_file("main_stylesheet.css")
extensions = ['breathe']
breathe_projects = { 'xtl': '../xml' }
templates_path = ['_templates']
html_static_path = ['_static']
source_suffix = '.rst'
master_doc = 'index'
project = 'xtl'
copyright = '2017, Johan Mabille and Sylvain Corlay'
author = 'Johan Mabille and Sylvain Corlay'
html_logo = 'quantstack-white.svg'
exclude_patterns = []
highlight_language = 'c++'
pygments_style = 'sphinx'
todo_include_todos = False
htmlhelp_basename = 'xtldoc'
|
Fix build with Sphinx 4.
|
Fix build with Sphinx 4.
`add_stylesheet` was deprecated in 1.8 and removed in 4.0 [1]. The
replacement, `add_css_file` was added in 1.0, which is older than any
version required by `breathe`.
[1] https://www.sphinx-doc.org/en/master/extdev/deprecated.html?highlight=add_stylesheet
|
Python
|
bsd-3-clause
|
xtensor-stack/xtl,xtensor-stack/xtl
|
---
+++
@@ -15,8 +15,10 @@
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
+
def setup(app):
- app.add_stylesheet("main_stylesheet.css")
+ app.add_css_file("main_stylesheet.css")
+
extensions = ['breathe']
breathe_projects = { 'xtl': '../xml' }
@@ -35,4 +37,3 @@
pygments_style = 'sphinx'
todo_include_todos = False
htmlhelp_basename = 'xtldoc'
-
|
8bcc4fe29468868190dcfcbea5438dc0aa638387
|
sweetercat/test_utils.py
|
sweetercat/test_utils.py
|
from __future__ import division
from utils import absolute_magnitude, plDensity, hz
def test_absolute_magnitude():
m = 10
assert isinstance(absolute_magnitude(1, 1), float)
assert absolute_magnitude(1, m) > m
assert absolute_magnitude(1, m) == 15
assert absolute_magnitude(0.1, m) == m
assert absolute_magnitude(0.01, m) < m
assert absolute_magnitude(1/10, m) == m
def test_plDensity():
m, r = 1, 1
assert isinstance(plDensity(m, r), float)
assert round(plDensity(m, r), 2) == 1.33
assert plDensity(0, r) == 0
def test_hz():
teff = 5777
lum = 1
for model in range(1, 6):
assert isinstance(hz(teff, lum, model), float)
results = [0.75, 0.98, 0.99, 1.71, 1.77]
for model, result in enumerate(results, start=1):
assert round(hz(teff, lum, model), 2) == result
|
from __future__ import division
import pytest
import pandas as pd
from utils import absolute_magnitude, plDensity, hz, readSC
def test_absolute_magnitude():
m = 10
assert isinstance(absolute_magnitude(1, 1), float)
assert absolute_magnitude(1, m) > m
assert absolute_magnitude(1, m) == 15
assert absolute_magnitude(0.1, m) == m
assert absolute_magnitude(0.01, m) < m
assert absolute_magnitude(1/10, m) == m
with pytest.raises(ZeroDivisionError):
absolute_magnitude(0, m)
def test_plDensity():
m, r = 1, 1
assert isinstance(plDensity(m, r), float)
assert round(plDensity(m, r), 2) == 1.33
assert plDensity(0, r) == 0
def test_hz():
teff = 5777
lum = 1
for model in range(1, 6):
assert isinstance(hz(teff, lum, model), float)
results = [0.75, 0.98, 0.99, 1.71, 1.77]
for model, result in enumerate(results, start=1):
assert round(hz(teff, lum, model), 2) == result
assert hz(teff, lum, 2) < hz(teff, lum, 4) # hz1 < hz2
def test_readSC():
df, plot_names = readSC()
assert isinstance(df, pd.DataFrame) #
assert isinstance(plot_names, list)
for name in plot_names:
assert isinstance(name, str)
|
Add couple more utils tests.
|
Add couple more utils tests.
|
Python
|
mit
|
DanielAndreasen/SWEETer-Cat,DanielAndreasen/SWEETer-Cat
|
---
+++
@@ -1,5 +1,7 @@
from __future__ import division
-from utils import absolute_magnitude, plDensity, hz
+import pytest
+import pandas as pd
+from utils import absolute_magnitude, plDensity, hz, readSC
def test_absolute_magnitude():
@@ -10,6 +12,8 @@
assert absolute_magnitude(0.1, m) == m
assert absolute_magnitude(0.01, m) < m
assert absolute_magnitude(1/10, m) == m
+ with pytest.raises(ZeroDivisionError):
+ absolute_magnitude(0, m)
def test_plDensity():
@@ -27,3 +31,12 @@
results = [0.75, 0.98, 0.99, 1.71, 1.77]
for model, result in enumerate(results, start=1):
assert round(hz(teff, lum, model), 2) == result
+ assert hz(teff, lum, 2) < hz(teff, lum, 4) # hz1 < hz2
+
+
+def test_readSC():
+ df, plot_names = readSC()
+ assert isinstance(df, pd.DataFrame) #
+ assert isinstance(plot_names, list)
+ for name in plot_names:
+ assert isinstance(name, str)
|
310cab802d7040dfb914ed60529d38011aa83ae8
|
app/views.py
|
app/views.py
|
from flask import render_template, request, Blueprint
import json
from app.state import state
PROGRAMS_LIST = [
["ascii_text", "ASCII Text"],
["cheertree", "Cheertree"],
["cross", "Cross"],
["demo", "Demo"],
["dna", "DNA"],
["game_of_life", "Game of Life"],
["matrix", "Matrix"],
["psychedelia", "Psychedelia"],
["rain", "Rain"],
["rainbow", "Rainbow"],
["random_blinky", "Random Blinky"],
["random_sparkles", "Random Sparkles"],
["simple", "Simple"],
["snow", "Snow"],
["tri", "Trig"],
]
index = Blueprint('index', __name__, template_folder='templates')
@index.route('/', methods=['GET'])
def show():
if request.method == 'GET':
return render_template('index.html', programs_list=PROGRAMS_LIST)
|
from flask import render_template, request, Blueprint
import json
from app.state import state
PROGRAMS_LIST = [
["ascii_text", "ASCII Text"],
["cheertree", "Cheertree"],
["cross", "Cross"],
["demo", "Demo"],
["dna", "DNA"],
["game_of_life", "Game of Life"],
["matrix", "Matrix"],
["psychedelia", "Psychedelia"],
["rain", "Rain"],
["rainbow", "Rainbow"],
["random_blinky", "Random Blinky"],
["random_sparkles", "Random Sparkles"],
["simple", "Simple"],
["snow", "Snow"],
["trig", "Trig"],
]
index = Blueprint('index', __name__, template_folder='templates')
@index.route('/', methods=['GET'])
def show():
if request.method == 'GET':
return render_template('index.html', programs_list=PROGRAMS_LIST)
|
Fix typo broke trig program
|
Fix typo broke trig program
|
Python
|
mit
|
njbbaer/unicorn-remote,njbbaer/unicorn-remote,njbbaer/unicorn-remote
|
---
+++
@@ -19,7 +19,7 @@
["random_sparkles", "Random Sparkles"],
["simple", "Simple"],
["snow", "Snow"],
- ["tri", "Trig"],
+ ["trig", "Trig"],
]
|
bc74780d42dd63a1dee005fe78db45cc994392d2
|
twisted/plugins/proxy.py
|
twisted/plugins/proxy.py
|
from zope.interface import implements
from twisted.plugin import IPlugin
from twisted.application.service import IServiceMaker
from twisted.application import internet
from oauth_proxy import oauth_proxy
class OAuthProxyServiceMaker(object):
implements(IServiceMaker, IPlugin)
tapname = "oauth_proxy"
description = "OAuth HTTP proxy"
options = oauth_proxy.Options
def makeService(self, options):
# TODO add error handling for missing params
useSSL = options["ssl"]
consumerKey = options["consumer-key"]
consumerSecret = options["consumer-secret"]
if options.has_key("token") and options.has_key("token-secret"):
token = options["token"]
tokenSecret = options["token-secret"]
else:
token = tokenSecret = None
port = options["port"]
credentials = oauth_proxy.OAuthCredentials(consumerKey, consumerSecret, token, tokenSecret)
credentialProvider = oauth_proxy.StaticOAuthCredentialProvider(credentials)
return internet.TCPServer(port, oauth_proxy.OAuthProxyFactory(credentialProvider, useSSL))
serviceMaker = OAuthProxyServiceMaker()
|
from zope.interface import implements
from twisted.plugin import IPlugin
from twisted.application.service import IServiceMaker
from twisted.application import internet
from oauth_proxy import oauth_proxy
class OAuthProxyServiceMaker(object):
implements(IServiceMaker, IPlugin)
tapname = "oauth_proxy"
description = "OAuth HTTP proxy"
options = oauth_proxy.Options
def makeService(self, options):
# TODO add error handling for missing params
useSSL = options["ssl"]
consumerKey = options["consumer-key"]
consumerSecret = options["consumer-secret"]
if options.has_key("token") and options.has_key("token-secret"):
token = options["token"]
tokenSecret = options["token-secret"]
else:
token = tokenSecret = None
port = int(options["port"])
credentials = oauth_proxy.OAuthCredentials(consumerKey, consumerSecret, token, tokenSecret)
credentialProvider = oauth_proxy.StaticOAuthCredentialProvider(credentials)
return internet.TCPServer(port, oauth_proxy.OAuthProxyFactory(credentialProvider, useSSL))
serviceMaker = OAuthProxyServiceMaker()
|
Allow specifying a port to actually work
|
Allow specifying a port to actually work
|
Python
|
bsd-3-clause
|
mojodna/oauth-proxy
|
---
+++
@@ -25,7 +25,7 @@
else:
token = tokenSecret = None
- port = options["port"]
+ port = int(options["port"])
credentials = oauth_proxy.OAuthCredentials(consumerKey, consumerSecret, token, tokenSecret)
credentialProvider = oauth_proxy.StaticOAuthCredentialProvider(credentials)
|
11df95a61f93a8654817f9837226a33c98f34af8
|
arguments.py
|
arguments.py
|
import argparse
"""
usage: mfh.py [-h] [-c | --client [PORT]] [-u] [-v]
Serve some sweet honey to the ubiquitous bots!
optional arguments:
-h, --help show this help message and exit
-c launch client with on port defined in settings
--client [PORT] port to start a client on
-u, --updater enable self updating
-v, --verbose increase output verbosity
"""
def parse():
parser = argparse.ArgumentParser(
description='Serve some sweet honey to the ubiquitous bots!',
epilog='And that`s how you`d detect a sneaky chinese bot.',
prog='mfh.py',
)
client_group = parser.add_mutually_exclusive_group()
client_group.add_argument(
'-c',
action='store_true',
help='launch client with on port defined in settings',
)
client_group.add_argument(
'--client',
help='port to start a client on',
metavar='PORT',
nargs='?',
type=int,
)
parser.add_argument(
'-u',
'--updater',
action='store_true',
help='enable self updating',
)
parser.add_argument(
'-v',
'--verbose',
action='store_true',
help='increase output verbosity',
)
return parser.parse_args()
|
import argparse
"""
usage: mfh.py [-h] [-c | --client [PORT]] [-u] [-v]
Serve some sweet honey to the ubiquitous bots!
optional arguments:
-h, --help show this help message and exit
-c launch client with on port defined in settings
--client [PORT] port to start a client on
-u, --updater enable self updating
-v, --verbose increase output verbosity
"""
def parse():
parser = argparse.ArgumentParser(
description='Serve some sweet honey to the ubiquitous bots!',
epilog='And that`s how you`d detect a sneaky chinese bot.',
prog='mfh.py',
)
client_group = parser.add_mutually_exclusive_group()
client_group.add_argument(
'-c',
action='store_true',
help='launch client with on port defined in settings',
)
client_group.add_argument(
'--client',
help='port to start a client on',
metavar='PORT',
nargs='?',
type=int,
)
server_group = parser.add_mutually_exclusive_group()
server_group.add_argument(
'-s',
action='store_true',
help='launch server with on port defined in settings',
)
server_group.add_argument(
'--server',
help='port to start a server on',
metavar='PORT',
nargs='?',
type=int,
)
parser.add_argument(
'-u',
'--updater',
action='store_true',
help='enable self updating',
)
parser.add_argument(
'-v',
'--verbose',
action='store_true',
help='increase output verbosity',
)
return parser.parse_args()
|
Add option for launching server
|
Add option for launching server
There was no option to start the server and you had to do it manually.
Now it can be started with:
1) -s with default configuration
2) --server <PORT> for manual port choice
|
Python
|
mit
|
Zloool/manyfaced-honeypot
|
---
+++
@@ -37,6 +37,22 @@
type=int,
)
+ server_group = parser.add_mutually_exclusive_group()
+
+ server_group.add_argument(
+ '-s',
+ action='store_true',
+ help='launch server with on port defined in settings',
+ )
+
+ server_group.add_argument(
+ '--server',
+ help='port to start a server on',
+ metavar='PORT',
+ nargs='?',
+ type=int,
+ )
+
parser.add_argument(
'-u',
'--updater',
|
8cef502afb45638d74306b2fcebec37f445b13c6
|
Recorders.py
|
Recorders.py
|
from Measurement import Measurement
class Recorder(object):
def __init__(self, recorderType):
self.recorderType = recorderType
def record(self, measure: Measurement):
None
class PrintRecorder(Recorder):
def __init__(self, config):
Recorder.__init__(self, 'file')
self.format = config['format']
def record(self, measure: Measurement):
line = self.format.format(
device_id=measure.device_id,
celsius=measure.get_celsius(),
fahrenheit=measure.get_fahrenheit(),
timestamp=measure.timestamp)
print(line, end='\n')
class FileRecorder(Recorder):
def __init__(self, config):
Recorder.__init__(self, 'file')
self.format = config['format']
self.container = config['container']
self.extension = config['extension']
def record(self, measure: Measurement):
log_entry = self.format.format(
device_id=measure.device_id,
celsius=measure.get_celsius(),
fahrenheit=measure.get_fahrenheit(),
timestamp=measure.timestamp)
file_path = self.container + measure.device_id.split('/')[-1] + '/' + self.extension
f = open(file_path, 'w')
f.writelines([log_entry])
|
from Measurement import Measurement
class Recorder(object):
def __init__(self, recorderType):
self.recorderType = recorderType
def record(self, measure: Measurement):
None
class PrintRecorder(Recorder):
def __init__(self, config):
Recorder.__init__(self, 'file')
self.format = config['format']
def record(self, measure: Measurement):
line = self.format.format(
device_id=measure.device_id,
celsius=measure.get_celsius(),
fahrenheit=measure.get_fahrenheit(),
timestamp=measure.timestamp)
print(line, end='\n')
class FileRecorder(Recorder):
def __init__(self, config):
Recorder.__init__(self, 'file')
self.format = config['format']
self.container = config['container']
self.extension = config['extension']
def record(self, measure: Measurement):
log_entry = self.format.format(
device_id=measure.device_id,
celsius=measure.get_celsius(),
fahrenheit=measure.get_fahrenheit(),
timestamp=measure.timestamp)
file_path = self.container + measure.device_id.split('/')[-1] + self.extension
f = open(file_path, 'w')
f.writelines([log_entry])
|
Remove last slash from file path
|
Remove last slash from file path
|
Python
|
mit
|
hectortosa/py-temperature-recorder
|
---
+++
@@ -36,8 +36,8 @@
celsius=measure.get_celsius(),
fahrenheit=measure.get_fahrenheit(),
timestamp=measure.timestamp)
-
- file_path = self.container + measure.device_id.split('/')[-1] + '/' + self.extension
+
+ file_path = self.container + measure.device_id.split('/')[-1] + self.extension
f = open(file_path, 'w')
f.writelines([log_entry])
|
d05fdd1ed6657894ecc624777762b463a3ea69da
|
tests/basics/fun_name.py
|
tests/basics/fun_name.py
|
def Fun():
pass
class A:
def __init__(self):
pass
def Fun(self):
pass
try:
print(Fun.__name__)
print(A.__init__.__name__)
print(A.Fun.__name__)
print(A().Fun.__name__)
except AttributeError:
print('SKIP')
raise SystemExit
# __name__ of a bound native method is not implemented in uPy
# the test here is to make sure it doesn't crash
try:
str((1).to_bytes.__name__)
except AttributeError:
pass
|
def Fun():
pass
class A:
def __init__(self):
pass
def Fun(self):
pass
try:
print(Fun.__name__)
print(A.__init__.__name__)
print(A.Fun.__name__)
print(A().Fun.__name__)
except AttributeError:
print('SKIP')
raise SystemExit
# __name__ of a bound native method is not implemented in uPy
# the test here is to make sure it doesn't crash
try:
str((1).to_bytes.__name__)
except AttributeError:
pass
# name of a function that has closed over variables
def outer():
x = 1
def inner():
return x
return inner
print(outer.__name__)
|
Add test for getting name of func with closed over locals.
|
tests/basics: Add test for getting name of func with closed over locals.
Tests correct decoding of the prelude to get the function name.
|
Python
|
mit
|
pfalcon/micropython,pfalcon/micropython,pfalcon/micropython,pfalcon/micropython,pfalcon/micropython
|
---
+++
@@ -22,3 +22,11 @@
str((1).to_bytes.__name__)
except AttributeError:
pass
+
+# name of a function that has closed over variables
+def outer():
+ x = 1
+ def inner():
+ return x
+ return inner
+print(outer.__name__)
|
91fc886bf302f9850977c8d88abba3bffd51928b
|
tests/test_compliance.py
|
tests/test_compliance.py
|
#!/usr/bin/env python
import os.path
import nose.tools as nose
import pep8
def test_pep8():
'''all Python files should comply with PEP 8'''
for subdir_path, subdir_names, file_names in os.walk('.'):
if '.git' in subdir_names:
subdir_names.remove('.git')
for file_name in file_names:
file_path = os.path.join(subdir_path, file_name)
file_ext = os.path.splitext(file_name)[1]
if file_ext == '.py':
style_guide = pep8.StyleGuide(quiet=True)
total_errors = style_guide.input_file(file_path)
msg = '{} does not comply with PEP 8'.format(file_path)
yield nose.assert_equal, total_errors, 0, msg
|
#!/usr/bin/env python
import os.path
import nose.tools as nose
import pep8
import radon.complexity as radon
def test_pep8():
'''all Python files should comply with PEP 8'''
for subdir_path, subdir_names, file_names in os.walk('.'):
if '.git' in subdir_names:
subdir_names.remove('.git')
for file_name in file_names:
file_path = os.path.join(subdir_path, file_name)
file_ext = os.path.splitext(file_name)[1]
if file_ext == '.py':
style_guide = pep8.StyleGuide(quiet=True)
total_errors = style_guide.input_file(file_path)
msg = '{} does not comply with PEP 8'.format(file_path)
yield nose.assert_equal, total_errors, 0, msg
def test_complexity():
'''all Python functions should have a low cyclomatic complexity score'''
for subdir_path, subdir_names, file_names in os.walk('.'):
if '.git' in subdir_names:
subdir_names.remove('.git')
for file_name in file_names:
file_path = os.path.join(subdir_path, file_name)
file_ext = os.path.splitext(file_name)[1]
if file_ext == '.py':
with open(file_path, 'r') as file:
blocks = radon.cc_visit(file.read())
for block in blocks:
complexity = block.complexity
test_doc = '{} ({}) should have a low complexity score'
test_complexity.__doc__ = test_doc.format(
block.name, file_path)
fail_msg = '{} ({}) has a complexity of {}'.format(
block.name, file_path, complexity)
yield nose.assert_less_equal, complexity, 10, fail_msg
|
Add test generator for function complexity
|
Add test generator for function complexity
|
Python
|
mit
|
caleb531/ssh-wp-backup,caleb531/ssh-wp-backup
|
---
+++
@@ -3,6 +3,7 @@
import os.path
import nose.tools as nose
import pep8
+import radon.complexity as radon
def test_pep8():
@@ -18,3 +19,24 @@
total_errors = style_guide.input_file(file_path)
msg = '{} does not comply with PEP 8'.format(file_path)
yield nose.assert_equal, total_errors, 0, msg
+
+
+def test_complexity():
+ '''all Python functions should have a low cyclomatic complexity score'''
+ for subdir_path, subdir_names, file_names in os.walk('.'):
+ if '.git' in subdir_names:
+ subdir_names.remove('.git')
+ for file_name in file_names:
+ file_path = os.path.join(subdir_path, file_name)
+ file_ext = os.path.splitext(file_name)[1]
+ if file_ext == '.py':
+ with open(file_path, 'r') as file:
+ blocks = radon.cc_visit(file.read())
+ for block in blocks:
+ complexity = block.complexity
+ test_doc = '{} ({}) should have a low complexity score'
+ test_complexity.__doc__ = test_doc.format(
+ block.name, file_path)
+ fail_msg = '{} ({}) has a complexity of {}'.format(
+ block.name, file_path, complexity)
+ yield nose.assert_less_equal, complexity, 10, fail_msg
|
315ad5f2f31f82f8d42d2a65fe4f056b4e3fcfd7
|
tests/test_quickstart.py
|
tests/test_quickstart.py
|
import pytest
from lektor.quickstart import get_default_author
from lektor.quickstart import get_default_author_email
from lektor.utils import locate_executable
def test_default_author(os_user):
assert get_default_author() == "Lektor Test"
@pytest.mark.skipif(locate_executable("git") is None, reason="git not installed")
def test_default_author_email():
assert isinstance(get_default_author_email(), str)
|
import os
import pytest
from lektor.quickstart import get_default_author
from lektor.quickstart import get_default_author_email
from lektor.utils import locate_executable
def test_default_author(os_user):
assert get_default_author() == "Lektor Test"
@pytest.mark.skipif(locate_executable("git") is None, reason="git not installed")
def test_default_author_email():
assert isinstance(get_default_author_email(), str)
def test_default_author_email_git_unavailable(monkeypatch):
monkeypatch.setitem(os.environ, "PATH", "/dev/null")
locate_executable.cache_clear()
assert get_default_author_email() is None
|
Add test case for when git is not available
|
Add test case for when git is not available
|
Python
|
bsd-3-clause
|
lektor/lektor,lektor/lektor,lektor/lektor,lektor/lektor
|
---
+++
@@ -1,3 +1,5 @@
+import os
+
import pytest
from lektor.quickstart import get_default_author
@@ -12,3 +14,9 @@
@pytest.mark.skipif(locate_executable("git") is None, reason="git not installed")
def test_default_author_email():
assert isinstance(get_default_author_email(), str)
+
+
+def test_default_author_email_git_unavailable(monkeypatch):
+ monkeypatch.setitem(os.environ, "PATH", "/dev/null")
+ locate_executable.cache_clear()
+ assert get_default_author_email() is None
|
f0c0f8816f93ec56cf16db3f5c2298a95e7e9181
|
server/butler/server.py
|
server/butler/server.py
|
#!/usr/bin/env python
from __future__ import print_function
import os
import sys
import gevent
import gevent.wsgi
import simplejson as json
from butler import service
from butler.options import Options
from butler.routing import Dispatcher
default_config_path = \
os.path.expanduser(os.path.join('~', '.config', 'butler', 'butler.cfg'))
def load_config(path):
try:
with open(path, 'r') as config_file:
return Options(json.load(config_file))
except (IOError, TypeError, ValueError) as e:
print(e, file=sys.stderr)
def serve(config_path):
options = load_config(config_path)
services = list(service.find_all('butler.services'))
services.append(service.static('config', options))
delegates = service.start(services)
address = options.options('server').str('address', '127.0.0.1:8000')
server = gevent.wsgi.WSGIServer(address, Dispatcher(delegates))
server.serve_forever()
def main():
config_path = default_config_path
if len(sys.argv) > 1:
config_path = sys.argv[1]
serve(config_path)
|
#!/usr/bin/env python
from __future__ import print_function
import os
import sys
import gevent
import gevent.wsgi
import simplejson as json
from butler import service
from butler.options import Options
from butler.routing import Dispatcher
default_config_path = \
os.path.expanduser(os.path.join('~', '.config', 'butler', 'butler.cfg'))
def load_config(path):
try:
with open(path, 'r') as config_file:
return Options(json.load(config_file))
except (IOError, TypeError, ValueError) as e:
print(e, file=sys.stderr)
def serve(config_path):
options = load_config(config_path)
services = list(service.find_all('butler.services'))
services.append(service.static('options', options))
delegates = service.start(services)
address = options.options('server').str('address', '127.0.0.1:8000')
server = gevent.wsgi.WSGIServer(address, Dispatcher(delegates))
server.serve_forever()
def main():
config_path = default_config_path
if len(sys.argv) > 1:
config_path = sys.argv[1]
serve(config_path)
|
Rename 'config' service to 'options'
|
Rename 'config' service to 'options'
|
Python
|
mit
|
knrafto/butler,knrafto/butler,knrafto/butler
|
---
+++
@@ -25,7 +25,7 @@
def serve(config_path):
options = load_config(config_path)
services = list(service.find_all('butler.services'))
- services.append(service.static('config', options))
+ services.append(service.static('options', options))
delegates = service.start(services)
address = options.options('server').str('address', '127.0.0.1:8000')
server = gevent.wsgi.WSGIServer(address, Dispatcher(delegates))
|
ce2f07e7fa5ac38235cbb6ea6c4fee3a60031246
|
social_core/tests/backends/test_udata.py
|
social_core/tests/backends/test_udata.py
|
import json
from six.moves.urllib_parse import urlencode
from .oauth import OAuth2Test
class DatagouvfrOAuth2Test(OAuth2Test):
backend_path = 'social_core.backends.udata.DatagouvfrOAuth2'
user_data_url = 'https://www.data.gouv.fr/api/1/me/'
expected_username = 'foobar'
access_token_body = json.dumps({
'access_token': 'foobar',
'token_type': 'bearer'
})
request_token_body = urlencode({
'oauth_token_secret': 'foobar-secret',
'oauth_token': 'foobar',
'oauth_callback_confirmed': 'true'
})
user_data_body = json.dumps({})
def test_login(self):
self.do_login()
def test_partial_pipeline(self):
self.do_partial_pipeline()
|
import json
from six.moves.urllib_parse import urlencode
from .oauth import OAuth2Test
class DatagouvfrOAuth2Test(OAuth2Test):
backend_path = 'social_core.backends.udata.DatagouvfrOAuth2'
user_data_url = 'https://www.data.gouv.fr/api/1/me/'
expected_username = 'foobar'
access_token_body = json.dumps({
'access_token': 'foobar',
'token_type': 'bearer',
'first_name': 'foobar',
'email': 'foobar@example.com'
})
request_token_body = urlencode({
'oauth_token_secret': 'foobar-secret',
'oauth_token': 'foobar',
'oauth_callback_confirmed': 'true'
})
user_data_body = json.dumps({})
def test_login(self):
self.do_login()
def test_partial_pipeline(self):
self.do_partial_pipeline()
|
Fix tests for udata/datagouvfr backend
|
Fix tests for udata/datagouvfr backend
|
Python
|
bsd-3-clause
|
python-social-auth/social-core,python-social-auth/social-core
|
---
+++
@@ -11,7 +11,9 @@
expected_username = 'foobar'
access_token_body = json.dumps({
'access_token': 'foobar',
- 'token_type': 'bearer'
+ 'token_type': 'bearer',
+ 'first_name': 'foobar',
+ 'email': 'foobar@example.com'
})
request_token_body = urlencode({
'oauth_token_secret': 'foobar-secret',
|
61be68b330c6e37a4f53b2441370c96c9aa13777
|
PRESUBMIT.py
|
PRESUBMIT.py
|
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Top-level presubmit script for catapult.
See https://www.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into depot_tools.
"""
def GetPreferredTryMasters(project, change): # pylint: disable=unused-argument
return {
'tryserver.client.catapult': {
'Catapult Linux Tryserver': {'defaulttests'},
'Catapult Mac Tryserver': {'defaulttests'},
'Catapult Windows Tryserver': {'defaulttests'},
}
}
def _CommonChecks(input_api, output_api):
results = []
results.extend(input_api.canned_checks.PanProjectChecks(
input_api, output_api))
return results
def CheckChangeOnUpload(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
return results
def CheckChangeOnCommit(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
return results
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Top-level presubmit script for catapult.
See https://www.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into depot_tools.
"""
def GetPreferredTryMasters(project, change): # pylint: disable=unused-argument
return {
'tryserver.client.catapult': {
'Catapult Linux Tryserver': {'defaulttests'},
'Catapult Mac Tryserver': {'defaulttests'},
'Catapult Windows Tryserver': {'defaulttests'},
}
}
def _CommonChecks(input_api, output_api):
results = []
results.extend(input_api.canned_checks.PanProjectChecks(
input_api, output_api))
return results
def CheckChangeOnUpload(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
results.extend(_RunPylint(input_api, output_api))
return results
def CheckChangeOnCommit(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
return results
def _RunPylint(input_api, output_api):
tests = input_api.canned_checks.GetPylint(input_api, output_api)
return input_api.RunTests(tests)
|
Add a presubmit script that runs pylint in catapult/dashboard.
|
Add a presubmit script that runs pylint in catapult/dashboard.
Also, resolve current issues in catapult/dashboard that pylint warns about. (Note: these were also resolved in cl/95586698.)
Review URL: https://codereview.chromium.org/1188483002
|
Python
|
bsd-3-clause
|
sahiljain/catapult,catapult-project/catapult,danbeam/catapult,SummerLW/Perf-Insight-Report,sahiljain/catapult,catapult-project/catapult,dstockwell/catapult,catapult-project/catapult-csm,SummerLW/Perf-Insight-Report,benschmaus/catapult,benschmaus/catapult,scottmcmaster/catapult,benschmaus/catapult,0x90sled/catapult,catapult-project/catapult-csm,0x90sled/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult,catapult-project/catapult-csm,catapult-project/catapult-csm,zeptonaut/catapult,catapult-project/catapult-csm,zeptonaut/catapult,dstockwell/catapult,sahiljain/catapult,modulexcite/catapult,catapult-project/catapult-csm,SummerLW/Perf-Insight-Report,catapult-project/catapult,scottmcmaster/catapult,catapult-project/catapult,sahiljain/catapult,benschmaus/catapult,SummerLW/Perf-Insight-Report,benschmaus/catapult,catapult-project/catapult,zeptonaut/catapult,danbeam/catapult,0x90sled/catapult,SummerLW/Perf-Insight-Report,danbeam/catapult,sahiljain/catapult,scottmcmaster/catapult,catapult-project/catapult-csm,benschmaus/catapult,catapult-project/catapult,benschmaus/catapult,dstockwell/catapult,dstockwell/catapult,sahiljain/catapult,modulexcite/catapult,danbeam/catapult,modulexcite/catapult
|
---
+++
@@ -1,4 +1,4 @@
-# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -29,6 +29,7 @@
def CheckChangeOnUpload(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
+ results.extend(_RunPylint(input_api, output_api))
return results
@@ -36,3 +37,8 @@
results = []
results.extend(_CommonChecks(input_api, output_api))
return results
+
+
+def _RunPylint(input_api, output_api):
+ tests = input_api.canned_checks.GetPylint(input_api, output_api)
+ return input_api.RunTests(tests)
|
ae7960e2e3b7c3cd4bd63e55613e7a1f58b51949
|
utils/http.py
|
utils/http.py
|
import httplib2
def url_exists(url):
"""Check that a url- when following redirection - exists.
This is needed because django's validators rely on python's urllib2
which in verions < 2.6 won't follow redirects.
"""
h = httplib2.Http()
try:
resp, content = h.request(url, method="HEAD")
except httplib2.ServerNotFoundError:
return False
h.follow_all_redirects = True
return 200 <= resp.status < 400
|
import requests
def url_exists(url):
"""Check that a url (when following redirection) exists.
This is needed because Django's validators rely on Python's urllib2
which in verions < 2.6 won't follow redirects.
"""
try:
return 200 <= requests.head(url).status_code < 400
except requests.ConnectionError:
return False
|
Switch to requests for checking if links are valid.
|
Switch to requests for checking if links are valid.
This should have the side effect of not caring about invalid SSL certs, for
https://unisubs.sifterapp.com/projects/12298/issues/557501/comments
|
Python
|
agpl-3.0
|
norayr/unisubs,eloquence/unisubs,norayr/unisubs,pculture/unisubs,wevoice/wesub,eloquence/unisubs,wevoice/wesub,pculture/unisubs,eloquence/unisubs,pculture/unisubs,ofer43211/unisubs,eloquence/unisubs,ReachingOut/unisubs,ujdhesa/unisubs,norayr/unisubs,ofer43211/unisubs,ujdhesa/unisubs,wevoice/wesub,ReachingOut/unisubs,ujdhesa/unisubs,ofer43211/unisubs,ReachingOut/unisubs,wevoice/wesub,ujdhesa/unisubs,ofer43211/unisubs,norayr/unisubs,ReachingOut/unisubs,pculture/unisubs
|
---
+++
@@ -1,16 +1,13 @@
-import httplib2
+import requests
def url_exists(url):
- """Check that a url- when following redirection - exists.
+ """Check that a url (when following redirection) exists.
- This is needed because django's validators rely on python's urllib2
+ This is needed because Django's validators rely on Python's urllib2
which in verions < 2.6 won't follow redirects.
"""
- h = httplib2.Http()
try:
- resp, content = h.request(url, method="HEAD")
- except httplib2.ServerNotFoundError:
+ return 200 <= requests.head(url).status_code < 400
+ except requests.ConnectionError:
return False
- h.follow_all_redirects = True
- return 200 <= resp.status < 400
|
f529c9f5092262f9089ad51831a8545d8f8650fa
|
workers/subscriptions.py
|
workers/subscriptions.py
|
import os
import telegram
from leonard import Leonard
if __name__ == '__main__':
os.chdir('../')
telegram_client = telegram.Bot(os.environ['BOT_TOKEN'])
bot = Leonard(telegram_client)
i = 0
while True:
if i % 10 == 0:
bot.collect_plugins()
for name, check, send in bot.subscriptions:
send(bot, check(bot))
i += 1
|
import os
import time
import telegram
from leonard import Leonard
if __name__ == '__main__':
os.chdir('../')
telegram_client = telegram.Bot(os.environ['BOT_TOKEN'])
bot = Leonard(telegram_client)
bot.collect_plugins()
while True:
for name, check, send in bot.subscriptions:
send(bot, check(bot))
time.sleep(60)
|
Remove collecting plugins every second
|
Remove collecting plugins every second
|
Python
|
mit
|
sevazhidkov/leonard
|
---
+++
@@ -1,4 +1,5 @@
import os
+import time
import telegram
@@ -8,10 +9,8 @@
os.chdir('../')
telegram_client = telegram.Bot(os.environ['BOT_TOKEN'])
bot = Leonard(telegram_client)
- i = 0
+ bot.collect_plugins()
while True:
- if i % 10 == 0:
- bot.collect_plugins()
for name, check, send in bot.subscriptions:
send(bot, check(bot))
- i += 1
+ time.sleep(60)
|
038bc954ed63db6df192de50483f218b037c2438
|
searchlight/cmd/listener.py
|
searchlight/cmd/listener.py
|
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
from oslo_service import service as os_service
from searchlight import listener
from searchlight import service
CONF = cfg.CONF
CONF.import_group("listener", "searchlight.listener")
def main():
service.prepare_service()
launcher = os_service.ProcessLauncher(CONF)
launcher.launch_service(
listener.ListenerService(),
workers=CONF.listener.workers)
launcher.wait()
if __name__ == "__main__":
main()
|
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
from oslo_service import service as os_service
from searchlight import listener
from searchlight import service
CONF = cfg.CONF
CONF.import_group("listener", "searchlight.listener")
def main():
service.prepare_service()
launcher = os_service.ProcessLauncher(CONF, restart_method='mutate')
launcher.launch_service(
listener.ListenerService(),
workers=CONF.listener.workers)
launcher.wait()
if __name__ == "__main__":
main()
|
Enable mutable config in searchlight
|
Enable mutable config in searchlight
New releases of oslo.config support a 'mutable' parameter to Opts.
oslo.service provides an option here Icec3e664f3fe72614e373b2938e8dee53cf8bc5e
allows services to tell oslo.service they want mutate_config_files to be
called by passing a parameter.
This commit is to use the same. This allows searchlight to benefit from
I1e7a69de169cc85f4c09954b2f46ce2da7106d90, where the 'debug' option
(owned by oslo.log) is made mutable. we should be able to turn debug
logging on and off by changing the config.
tc goal:
https://governance.openstack.org/tc/goals/rocky/enable-mutable-configuration.html
Change-Id: I1c2833bef93b1382c7ddb71dba8621004dcd4cb1
|
Python
|
apache-2.0
|
openstack/searchlight,openstack/searchlight,openstack/searchlight
|
---
+++
@@ -26,7 +26,7 @@
def main():
service.prepare_service()
- launcher = os_service.ProcessLauncher(CONF)
+ launcher = os_service.ProcessLauncher(CONF, restart_method='mutate')
launcher.launch_service(
listener.ListenerService(),
workers=CONF.listener.workers)
|
aed365f2f86090c22640c05e9fb2f821a6ab12a5
|
minique_tests/conftest.py
|
minique_tests/conftest.py
|
import logging
import os
import pytest
from redis import Redis
from minique.utils import get_random_pronounceable_string
def pytest_configure() -> None:
logging.basicConfig(datefmt="%Y-%m-%d %H:%M:%S", level=logging.INFO)
@pytest.fixture()
def redis() -> Redis:
redis_url = os.environ.get("REDIS_URL")
if not redis_url: # pragma: no cover
pytest.skip("no REDIS_URL (required for redis fixture)")
return Redis.from_url(redis_url)
@pytest.fixture()
def random_queue_name() -> str:
return "test_queue_%s" % get_random_pronounceable_string()
|
import logging
import os
import pytest
from redis import Redis
from minique.utils import get_random_pronounceable_string
def pytest_configure() -> None:
logging.basicConfig(datefmt="%Y-%m-%d %H:%M:%S", level=logging.INFO)
@pytest.fixture(scope="session")
def redis_url() -> str:
url = os.environ.get("REDIS_URL")
if not url: # pragma: no cover
pytest.skip("no REDIS_URL (required for redis fixture)")
return url
@pytest.fixture()
def redis(redis_url) -> Redis:
return Redis.from_url(redis_url)
@pytest.fixture()
def random_queue_name() -> str:
return "test_queue_%s" % get_random_pronounceable_string()
|
Make test fixtures more modular
|
Make test fixtures more modular
|
Python
|
mit
|
valohai/minique
|
---
+++
@@ -11,11 +11,16 @@
logging.basicConfig(datefmt="%Y-%m-%d %H:%M:%S", level=logging.INFO)
+@pytest.fixture(scope="session")
+def redis_url() -> str:
+ url = os.environ.get("REDIS_URL")
+ if not url: # pragma: no cover
+ pytest.skip("no REDIS_URL (required for redis fixture)")
+ return url
+
+
@pytest.fixture()
-def redis() -> Redis:
- redis_url = os.environ.get("REDIS_URL")
- if not redis_url: # pragma: no cover
- pytest.skip("no REDIS_URL (required for redis fixture)")
+def redis(redis_url) -> Redis:
return Redis.from_url(redis_url)
|
b6f51e8873d1905da53027b73614f2eeb4c4ed3d
|
web/form/fields/validators.py
|
web/form/fields/validators.py
|
# Copyright (c) 2015 The Pycroft Authors. See the AUTHORS file.
# This file is part of the Pycroft project and licensed under the terms of
# the Apache License, Version 2.0. See the LICENSE file for details.
from wtforms.validators import Optional
class OptionalIf(Optional):
# makes a field optional if some other data is supplied
def __init__(self, deciding_field, *args, **kwargs):
self.deciding_field = deciding_field
super(OptionalIf, self).__init__(*args, **kwargs)
def __call__(self, form, field):
deciding_field = form._fields.get(self.deciding_field)
if deciding_field is None:
raise Exception('no field named "{}" in form'.format(
self.deciding_field))
if bool(deciding_field.data):
super(OptionalIf, self).__call__(form, field)
|
# Copyright (c) 2015 The Pycroft Authors. See the AUTHORS file.
# This file is part of the Pycroft project and licensed under the terms of
# the Apache License, Version 2.0. See the LICENSE file for details.
from wtforms.validators import Optional
class OptionalIf(Optional):
# makes a field optional if some other data is supplied or is not supplied
def __init__(self, deciding_field, invert=False, *args, **kwargs):
self.deciding_field = deciding_field
self.invert = invert
super(OptionalIf, self).__init__(*args, **kwargs)
def __call__(self, form, field):
deciding_field = form._fields.get(self.deciding_field)
if deciding_field is None:
raise Exception('no field named "{}" in form'.format(
self.deciding_field))
if (bool(deciding_field.data) and deciding_field.data != 'None')\
^ self.invert:
super(OptionalIf, self).__call__(form, field)
|
Add option to invert `OptionalIf` validator
|
Add option to invert `OptionalIf` validator
|
Python
|
apache-2.0
|
agdsn/pycroft,lukasjuhrich/pycroft,agdsn/pycroft,lukasjuhrich/pycroft,lukasjuhrich/pycroft,agdsn/pycroft,agdsn/pycroft,lukasjuhrich/pycroft,agdsn/pycroft
|
---
+++
@@ -5,9 +5,10 @@
class OptionalIf(Optional):
- # makes a field optional if some other data is supplied
- def __init__(self, deciding_field, *args, **kwargs):
+ # makes a field optional if some other data is supplied or is not supplied
+ def __init__(self, deciding_field, invert=False, *args, **kwargs):
self.deciding_field = deciding_field
+ self.invert = invert
super(OptionalIf, self).__init__(*args, **kwargs)
def __call__(self, form, field):
@@ -15,5 +16,6 @@
if deciding_field is None:
raise Exception('no field named "{}" in form'.format(
self.deciding_field))
- if bool(deciding_field.data):
+ if (bool(deciding_field.data) and deciding_field.data != 'None')\
+ ^ self.invert:
super(OptionalIf, self).__call__(form, field)
|
1b31a86fcf5a449c67c20e4c971e6cb8b6bba126
|
providers/org/ttu/apps.py
|
providers/org/ttu/apps.py
|
from share.provider import OAIProviderAppConfig
class AppConfig(OAIProviderAppConfig):
name = 'providers.org.ttu'
version = '0.0.1'
title = 'ttu'
long_title = 'Texas Tech Univeristy Libraries'
home_page = 'http://ttu-ir.tdl.org/'
url = 'http://ttu-ir.tdl.org/ttu-oai/request'
time_granularity = False
|
from share.provider import OAIProviderAppConfig
class AppConfig(OAIProviderAppConfig):
name = 'providers.org.ttu'
version = '0.0.1'
title = 'ttu'
long_title = 'Texas Tech Univeristy Libraries'
home_page = 'http://ttu-ir.tdl.org/'
url = 'http://ttu-ir.tdl.org/ttu-oai/request'
time_granularity = False
approved_sets = ['col_2346_521', 'col_2346_469']
|
Add approved sets for Texas Tech
|
Add approved sets for Texas Tech
|
Python
|
apache-2.0
|
CenterForOpenScience/SHARE,aaxelb/SHARE,CenterForOpenScience/SHARE,CenterForOpenScience/SHARE,laurenbarker/SHARE,aaxelb/SHARE,laurenbarker/SHARE,zamattiac/SHARE,zamattiac/SHARE,aaxelb/SHARE,laurenbarker/SHARE,zamattiac/SHARE
|
---
+++
@@ -9,3 +9,4 @@
home_page = 'http://ttu-ir.tdl.org/'
url = 'http://ttu-ir.tdl.org/ttu-oai/request'
time_granularity = False
+ approved_sets = ['col_2346_521', 'col_2346_469']
|
cad612fff70f89307cb4601e4dfca2c3b4a0b420
|
test_suite.py
|
test_suite.py
|
import os
import sys
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
from django.core import management
apps = sys.argv[1:]
if not apps:
apps = [
'resources',
'forms',
'base',
]
management.call_command('test', *apps, interactive=False)
|
import os
import sys
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
import django
if hasattr(django, 'setup'):
django.setup()
from django.core import management
apps = sys.argv[1:]
if not apps:
apps = [
'resources',
'forms',
'base',
]
management.call_command('test', *apps, interactive=False)
|
Call setup() before testing on Django 1.7+
|
Call setup() before testing on Django 1.7+
Signed-off-by: Don Naegely <e690a32c1e2176a2bfface09e204830e1b5491e3@gmail.com>
|
Python
|
bsd-2-clause
|
chop-dbhi/serrano,chop-dbhi/serrano
|
---
+++
@@ -2,6 +2,10 @@
import sys
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
+
+import django
+if hasattr(django, 'setup'):
+ django.setup()
from django.core import management
|
72fe5252a05f8afc8b2cc4e2f10a0572acb1a629
|
wispy/tree.py
|
wispy/tree.py
|
"""
wispy.tree
~~~~~~~~~~
Contains the AST nodes defined by *wispy*.
"""
# pylint: disable=no-init, too-few-public-methods, missing-docstring
class Node:
""" Base class for all CST nodes.
Exports a couple of attributes, which can be
find in any CST node:
* parent: the parent of this node or None, if the node
doesn't have a parent, as in the case of the root node.
* grammar: The grammar from which this node was built.
"""
parent = None
grammar = None
class ScriptBlock(Node):
_fields = ('statements', 'named_blocks')
class NamedBlock(Node):
_fields = ('block_name', 'statements')
|
"""
wispy.tree
~~~~~~~~~~
Contains the AST nodes defined by *wispy*.
"""
# pylint: disable=no-init, too-few-public-methods, missing-docstring
# pylint: disable=protected-access
from inspect import Parameter as SignatureParameter, Signature
def make_signature(names):
""" Build a Signature object from a list of names. """
return Signature(
SignatureParameter(name,
SignatureParameter.POSITIONAL_OR_KEYWORD,
default=None)
for name in names)
class NodeMeta(type):
"""
Metaclass which reads the '_fields' attribute
and builds a signature from it. This allows a class
to be defined either as ::
m = MyClass(a=1, b=2)
or as ::
m = MyClass()
m.a = 1
m.b = 2
"""
def __new__(mcs, name, bases, clsdict):
clsobj = super().__new__(mcs, name, bases, clsdict)
sig = make_signature(clsobj._fields)
setattr(clsobj, '__signature__', sig)
return clsobj
class Node(metaclass=NodeMeta):
""" Base class for all CST nodes.
Exports a couple of attributes, which can be
find in any CST node:
* parent: the parent of this node or None, if the node
doesn't have a parent, as in the case of the root node.
* grammar: The grammar from which this node was built.
"""
parent = None
grammar = None
_fields = ()
def __init__(self, *args, **kwargs):
# pylint: disable=no-member
bound = self.__signature__.bind(*args, **kwargs)
for name, val in bound.arguments.items():
setattr(self, name, val)
class ScriptBlock(Node):
_fields = ('statements', 'named_blocks')
class NamedBlock(Node):
_fields = ('block_name', 'statements')
|
Allow creating AST nodes using call syntax, through signatures.
|
Allow creating AST nodes using call syntax, through signatures.
By using Python 3's signatures, we can create AST nodes using
call syntax, as in Name(value=...). This also improves the
introspection.
|
Python
|
apache-2.0
|
RoPython/wispy
|
---
+++
@@ -5,8 +5,43 @@
Contains the AST nodes defined by *wispy*.
"""
# pylint: disable=no-init, too-few-public-methods, missing-docstring
+# pylint: disable=protected-access
-class Node:
+from inspect import Parameter as SignatureParameter, Signature
+
+
+def make_signature(names):
+ """ Build a Signature object from a list of names. """
+ return Signature(
+ SignatureParameter(name,
+ SignatureParameter.POSITIONAL_OR_KEYWORD,
+ default=None)
+ for name in names)
+
+
+class NodeMeta(type):
+ """
+ Metaclass which reads the '_fields' attribute
+ and builds a signature from it. This allows a class
+ to be defined either as ::
+
+ m = MyClass(a=1, b=2)
+
+ or as ::
+
+ m = MyClass()
+ m.a = 1
+ m.b = 2
+ """
+
+ def __new__(mcs, name, bases, clsdict):
+ clsobj = super().__new__(mcs, name, bases, clsdict)
+ sig = make_signature(clsobj._fields)
+ setattr(clsobj, '__signature__', sig)
+ return clsobj
+
+
+class Node(metaclass=NodeMeta):
""" Base class for all CST nodes.
Exports a couple of attributes, which can be
@@ -21,6 +56,13 @@
parent = None
grammar = None
+ _fields = ()
+
+ def __init__(self, *args, **kwargs):
+ # pylint: disable=no-member
+ bound = self.__signature__.bind(*args, **kwargs)
+ for name, val in bound.arguments.items():
+ setattr(self, name, val)
class ScriptBlock(Node):
|
0c47a60185122dbea6ded2118305094d6917afb1
|
tests/urls.py
|
tests/urls.py
|
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^edtf/', include('edtf.urls', namespace='edtf'))
]
|
from django.conf.urls import include, url
urlpatterns = [
url(r'^edtf/', include('edtf.urls', namespace='edtf'))
]
|
Remove admin site from test project.
|
Remove admin site from test project.
|
Python
|
bsd-3-clause
|
unt-libraries/django-edtf,unt-libraries/django-edtf,unt-libraries/django-edtf
|
---
+++
@@ -1,7 +1,5 @@
from django.conf.urls import include, url
-from django.contrib import admin
urlpatterns = [
- url(r'^admin/', admin.site.urls),
url(r'^edtf/', include('edtf.urls', namespace='edtf'))
]
|
7a855a5cd09785a23061af1be13135ad23c4daf1
|
runtests.py
|
runtests.py
|
#!/usr/bin/env python
import os
import sys
from django.conf import settings
if not settings.configured:
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
INSTALLED_APPS=(
'selectable',
),
SITE_ID=1,
SECRET_KEY='super-secret',
ROOT_URLCONF='selectable.tests.urls',
TEST_RUNNER='django.test.simple.DjangoTestSuiteRunner',
)
from django.test.utils import get_runner
def runtests():
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True, failfast=False)
args = sys.argv[1:] or ['selectable', ]
failures = test_runner.run_tests(args)
sys.exit(failures)
if __name__ == '__main__':
runtests()
|
#!/usr/bin/env python
import os
import sys
from django.conf import settings
if not settings.configured:
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
INSTALLED_APPS=(
'selectable',
),
SITE_ID=1,
SECRET_KEY='super-secret',
ROOT_URLCONF='selectable.tests.urls',
)
from django.test.utils import get_runner
def runtests():
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True, failfast=False)
args = sys.argv[1:] or ['selectable', ]
failures = test_runner.run_tests(args)
sys.exit(failures)
if __name__ == '__main__':
runtests()
|
Test with the default test runner for all Django versions.
|
Test with the default test runner for all Django versions.
|
Python
|
bsd-2-clause
|
mlavin/django-selectable,affan2/django-selectable,mlavin/django-selectable,mlavin/django-selectable,affan2/django-selectable,affan2/django-selectable
|
---
+++
@@ -19,7 +19,6 @@
SITE_ID=1,
SECRET_KEY='super-secret',
ROOT_URLCONF='selectable.tests.urls',
- TEST_RUNNER='django.test.simple.DjangoTestSuiteRunner',
)
|
e4f4ad313cb9d89114a1189861405148ef6f19ae
|
runtests.py
|
runtests.py
|
#!/usr/bin/env python
# Adapted from https://raw.githubusercontent.com/hzy/django-polarize/master/runtests.py
import sys
from django.conf import settings
from django.core.management import execute_from_command_line
if not settings.configured:
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
INSTALLED_APPS=(
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.auth',
'django.contrib.admin',
'flatblocks',
'tests',
),
ROOT_URLCONF='tests.urls',
MIDDLEWARE_CLASSES=(
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
),
TEMPLATES=[
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'debug': True
}
},
],
)
def runtests():
argv = sys.argv[:1] + ['test', 'tests']
execute_from_command_line(argv)
if __name__ == '__main__':
runtests()
|
#!/usr/bin/env python
# Adapted from https://raw.githubusercontent.com/hzy/django-polarize/master/runtests.py
import sys
from django.conf import settings
from django.core.management import execute_from_command_line
if not settings.configured:
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
INSTALLED_APPS=(
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.auth',
'django.contrib.admin',
'flatblocks',
'tests',
),
ROOT_URLCONF='tests.urls',
MIDDLEWARE_CLASSES=(
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
),
TEMPLATES=[
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'debug': True,
'context_processors': [
'django.contrib.auth.context_processors.auth',
],
}
},
],
)
def runtests():
argv = sys.argv[:1] + ['test', 'tests']
execute_from_command_line(argv)
if __name__ == '__main__':
runtests()
|
Update test config for django 1.11
|
Update test config for django 1.11
|
Python
|
bsd-3-clause
|
funkybob/django-flatblocks,funkybob/django-flatblocks
|
---
+++
@@ -34,7 +34,10 @@
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
- 'debug': True
+ 'debug': True,
+ 'context_processors': [
+ 'django.contrib.auth.context_processors.auth',
+ ],
}
},
],
|
0cd5deefc61f56351af24f6597a1509ea4b4b567
|
settings.py
|
settings.py
|
import os
INTERVAL = int(os.environ.get('INTERVAL', 60))
AWS_ACCESS_KEY_ID = os.environ['AWS_ACCESS_KEY_ID']
AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET_ACCESS_KEY']
AWS_REGION = os.environ.get('AWS_REGION', 'us-west-2')
ALERTS = os.environ['ALERTS']
ANALYTICS_KEY_NAME = os.environ['ANALYTICS_KEY_NAME']
FROM_EMAIL = os.environ['FROM_EMAIL']
LOG_FILE = 'rightnowalerts.log'
|
import os
INTERVAL = int(os.environ.get('INTERVAL', 60))
AWS_ACCESS_KEY_ID = os.environ['AWS_ACCESS_KEY_ID']
AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET_ACCESS_KEY']
AWS_REGION = os.environ.get('AWS_REGION', 'us-west-2')
ALERTS = os.environ['ALERTS']
ANALYTICS_KEY_NAME = os.environ['ANALYTICS_KEY_NAME']
FROM_EMAIL = os.environ['FROM_EMAIL']
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
LOG_FILE = os.environ.get('LOG_FILE', BASE_DIR + '/rightnowalerts.log')
|
Read log file from ENV and add full path for default
|
Read log file from ENV and add full path for default
|
Python
|
mit
|
lorden/right-now-alerts
|
---
+++
@@ -7,4 +7,5 @@
ALERTS = os.environ['ALERTS']
ANALYTICS_KEY_NAME = os.environ['ANALYTICS_KEY_NAME']
FROM_EMAIL = os.environ['FROM_EMAIL']
-LOG_FILE = 'rightnowalerts.log'
+BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+LOG_FILE = os.environ.get('LOG_FILE', BASE_DIR + '/rightnowalerts.log')
|
6c57f18e9f40f5dd54fc35ef2f34924ccc50ee76
|
app/views.py
|
app/views.py
|
from app import app
from flask import render_template, request
from .looper import getNextLink
@app.route('/')
def index_page():
return render_template('index.html')
@app.route('/loop')
def loop_request():
link = request.args.get('link', '', type=str)
return getNextLink(link.strip())
|
from app import app
from flask import render_template, request
from .looper import getNextLink
@app.route('/')
def index_page():
return render_template('index.html')
@app.route('/loop')
def loop_request():
if "unicode" in __builtins__:
str_type = unicode
else:
str_type = str
link = request.args.get('link', '', type=str_type)
return getNextLink(link.strip())
|
Fix link being empty string in py2 if it's unicode
|
Fix link being empty string in py2 if it's unicode
Should fix any remaining Unicode errors.
|
Python
|
mit
|
kartikanand/wikilooper,kartikanand/wikilooper,kartikanand/wikilooper
|
---
+++
@@ -8,5 +8,9 @@
@app.route('/loop')
def loop_request():
- link = request.args.get('link', '', type=str)
+ if "unicode" in __builtins__:
+ str_type = unicode
+ else:
+ str_type = str
+ link = request.args.get('link', '', type=str_type)
return getNextLink(link.strip())
|
044b08b4097f5c1739ed8d212d1815edfc1468d1
|
humbug/wsgi.py
|
humbug/wsgi.py
|
"""
WSGI config for humbug project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "humbug.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
"""
WSGI config for humbug project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "humbug.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
Remove more commented out example code
|
Remove more commented out example code
(imported from commit 83f7c8763d96af5341fe630d1d8be11eef1f33aa)
|
Python
|
apache-2.0
|
bowlofstew/zulip,EasonYi/zulip,so0k/zulip,jeffcao/zulip,babbage/zulip,souravbadami/zulip,krtkmj/zulip,Diptanshu8/zulip,sup95/zulip,easyfmxu/zulip,brockwhittaker/zulip,deer-hope/zulip,firstblade/zulip,esander91/zulip,johnny9/zulip,easyfmxu/zulip,peiwei/zulip,vaidap/zulip,lfranchi/zulip,Galexrt/zulip,guiquanz/zulip,dawran6/zulip,peiwei/zulip,Frouk/zulip,gigawhitlocks/zulip,susansls/zulip,arpith/zulip,ryansnowboarder/zulip,peiwei/zulip,thomasboyt/zulip,tommyip/zulip,bluesea/zulip,verma-varsha/zulip,firstblade/zulip,JPJPJPOPOP/zulip,eeshangarg/zulip,Gabriel0402/zulip,suxinde2009/zulip,PhilSk/zulip,sup95/zulip,verma-varsha/zulip,amyliu345/zulip,sonali0901/zulip,mohsenSy/zulip,eeshangarg/zulip,ApsOps/zulip,aps-sids/zulip,arpith/zulip,noroot/zulip,gigawhitlocks/zulip,mansilladev/zulip,qq1012803704/zulip,xuxiao/zulip,jackrzhang/zulip,vaidap/zulip,dotcool/zulip,dwrpayne/zulip,johnnygaddarr/zulip,jerryge/zulip,MayB/zulip,johnny9/zulip,dnmfarrell/zulip,jackrzhang/zulip,bowlofstew/zulip,kokoar/zulip,zulip/zulip,jeffcao/zulip,kou/zulip,sup95/zulip,suxinde2009/zulip,isht3/zulip,kaiyuanheshang/zulip,xuxiao/zulip,hj3938/zulip,PaulPetring/zulip,mdavid/zulip,aakash-cr7/zulip,zacps/zulip,PaulPetring/zulip,bowlofstew/zulip,TigorC/zulip,seapasulli/zulip,wweiradio/zulip,LeeRisk/zulip,rishig/zulip,susansls/zulip,Vallher/zulip,cosmicAsymmetry/zulip,wavelets/zulip,saitodisse/zulip,m1ssou/zulip,mdavid/zulip,brainwane/zulip,shubhamdhama/zulip,seapasulli/zulip,johnny9/zulip,amyliu345/zulip,kokoar/zulip,ryansnowboarder/zulip,RobotCaleb/zulip,ryanbackman/zulip,mansilladev/zulip,umkay/zulip,deer-hope/zulip,voidException/zulip,jeffcao/zulip,udxxabp/zulip,karamcnair/zulip,andersk/zulip,luyifan/zulip,ahmadassaf/zulip,seapasulli/zulip,itnihao/zulip,LeeRisk/zulip,mansilladev/zulip,Suninus/zulip,kou/zulip,babbage/zulip,he15his/zulip,j831/zulip,ryanbackman/zulip,tommyip/zulip,natanovia/zulip,wweiradio/zulip,Juanvulcano/zulip,jessedhillon/zulip,zorojean/zulip,lfranchi/zulip,Frouk/zulip,zhaoweigg/zulip,SmartPeople/zulip,xuanhan863/zulip,reyha/zulip,akuseru/zulip,atomic-labs/zulip,zwily/zulip,EasonYi/zulip,fw1121/zulip,shrikrishnaholla/zulip,yuvipanda/zulip,m1ssou/zulip,tdr130/zulip,zachallaun/zulip,hustlzp/zulip,KJin99/zulip,zachallaun/zulip,gigawhitlocks/zulip,sharmaeklavya2/zulip,ashwinirudrappa/zulip,vaidap/zulip,moria/zulip,he15his/zulip,bitemyapp/zulip,hengqujushi/zulip,ufosky-server/zulip,johnnygaddarr/zulip,suxinde2009/zulip,wweiradio/zulip,aps-sids/zulip,amallia/zulip,AZtheAsian/zulip,hackerkid/zulip,hj3938/zulip,kokoar/zulip,jonesgithub/zulip,stamhe/zulip,karamcnair/zulip,ashwinirudrappa/zulip,christi3k/zulip,amyliu345/zulip,wavelets/zulip,luyifan/zulip,schatt/zulip,wangdeshui/zulip,jrowan/zulip,Juanvulcano/zulip,adnanh/zulip,jerryge/zulip,jainayush975/zulip,synicalsyntax/zulip,praveenaki/zulip,hustlzp/zulip,showell/zulip,levixie/zulip,ericzhou2008/zulip,KJin99/zulip,dotcool/zulip,Cheppers/zulip,krtkmj/zulip,zwily/zulip,dhcrzf/zulip,saitodisse/zulip,he15his/zulip,dnmfarrell/zulip,seapasulli/zulip,tiansiyuan/zulip,atomic-labs/zulip,Drooids/zulip,aps-sids/zulip,MariaFaBella85/zulip,sup95/zulip,SmartPeople/zulip,so0k/zulip,Vallher/zulip,christi3k/zulip,aliceriot/zulip,mansilladev/zulip,he15his/zulip,codeKonami/zulip,easyfmxu/zulip,andersk/zulip,so0k/zulip,Jianchun1/zulip,vikas-parashar/zulip,Qgap/zulip,babbage/zulip,andersk/zulip,huangkebo/zulip,sonali0901/zulip,swinghu/zulip,aliceriot/zulip,ryanbackman/zulip,mdavid/zulip,synicalsyntax/zulip,Drooids/zulip,MayB/zulip,reyha/zulip,jainayush975/zulip,he15his/zulip,andersk/zulip,udxxabp/zulip,amyliu345/zulip,showell/zulip,littledogboy/zulip,glovebx/zulip,synicalsyntax/zulip,firstblade/zulip,bastianh/zulip,dawran6/zulip,Suninus/zulip,amanharitsh123/zulip,lfranchi/zulip,tommyip/zulip,niftynei/zulip,ericzhou2008/zulip,codeKonami/zulip,Jianchun1/zulip,ashwinirudrappa/zulip,jackrzhang/zulip,samatdav/zulip,tbutter/zulip,niftynei/zulip,jphilipsen05/zulip,stamhe/zulip,cosmicAsymmetry/zulip,AZtheAsian/zulip,xuanhan863/zulip,jphilipsen05/zulip,christi3k/zulip,dattatreya303/zulip,samatdav/zulip,RobotCaleb/zulip,joyhchen/zulip,m1ssou/zulip,zulip/zulip,jainayush975/zulip,saitodisse/zulip,levixie/zulip,zulip/zulip,Drooids/zulip,KingxBanana/zulip,ryanbackman/zulip,Suninus/zulip,zhaoweigg/zulip,jerryge/zulip,avastu/zulip,KingxBanana/zulip,samatdav/zulip,atomic-labs/zulip,brainwane/zulip,PaulPetring/zulip,RobotCaleb/zulip,jimmy54/zulip,swinghu/zulip,bastianh/zulip,itnihao/zulip,luyifan/zulip,PhilSk/zulip,fw1121/zulip,PaulPetring/zulip,moria/zulip,pradiptad/zulip,tiansiyuan/zulip,DazWorrall/zulip,pradiptad/zulip,reyha/zulip,stamhe/zulip,amanharitsh123/zulip,levixie/zulip,JanzTam/zulip,eeshangarg/zulip,dwrpayne/zulip,LAndreas/zulip,udxxabp/zulip,JanzTam/zulip,dotcool/zulip,wavelets/zulip,timabbott/zulip,zorojean/zulip,voidException/zulip,alliejones/zulip,zwily/zulip,voidException/zulip,Drooids/zulip,KingxBanana/zulip,DazWorrall/zulip,Batterfii/zulip,jimmy54/zulip,punchagan/zulip,shrikrishnaholla/zulip,nicholasbs/zulip,umkay/zulip,mdavid/zulip,Suninus/zulip,blaze225/zulip,jainayush975/zulip,bluesea/zulip,technicalpickles/zulip,ApsOps/zulip,vaidap/zulip,ashwinirudrappa/zulip,jrowan/zulip,AZtheAsian/zulip,EasonYi/zulip,willingc/zulip,krtkmj/zulip,developerfm/zulip,praveenaki/zulip,blaze225/zulip,umkay/zulip,JanzTam/zulip,ikasumiwt/zulip,vakila/zulip,pradiptad/zulip,JPJPJPOPOP/zulip,peguin40/zulip,luyifan/zulip,zhaoweigg/zulip,aakash-cr7/zulip,AZtheAsian/zulip,eeshangarg/zulip,amanharitsh123/zulip,Frouk/zulip,umkay/zulip,so0k/zulip,developerfm/zulip,calvinleenyc/zulip,jackrzhang/zulip,souravbadami/zulip,swinghu/zulip,dwrpayne/zulip,deer-hope/zulip,esander91/zulip,thomasboyt/zulip,mahim97/zulip,andersk/zulip,samatdav/zulip,m1ssou/zulip,voidException/zulip,PhilSk/zulip,Gabriel0402/zulip,hafeez3000/zulip,punchagan/zulip,hackerkid/zulip,proliming/zulip,LAndreas/zulip,mohsenSy/zulip,Diptanshu8/zulip,Frouk/zulip,jerryge/zulip,Juanvulcano/zulip,dwrpayne/zulip,DazWorrall/zulip,wdaher/zulip,shaunstanislaus/zulip,bssrdf/zulip,amanharitsh123/zulip,dxq-git/zulip,jackrzhang/zulip,wangdeshui/zulip,MayB/zulip,esander91/zulip,tommyip/zulip,levixie/zulip,shaunstanislaus/zulip,krtkmj/zulip,Batterfii/zulip,souravbadami/zulip,seapasulli/zulip,tbutter/zulip,ipernet/zulip,littledogboy/zulip,RobotCaleb/zulip,adnanh/zulip,easyfmxu/zulip,christi3k/zulip,Qgap/zulip,wweiradio/zulip,rishig/zulip,deer-hope/zulip,zacps/zulip,MariaFaBella85/zulip,ryansnowboarder/zulip,paxapy/zulip,jphilipsen05/zulip,Vallher/zulip,technicalpickles/zulip,suxinde2009/zulip,kaiyuanheshang/zulip,tbutter/zulip,LAndreas/zulip,jonesgithub/zulip,dotcool/zulip,jeffcao/zulip,MariaFaBella85/zulip,jackrzhang/zulip,timabbott/zulip,saitodisse/zulip,christi3k/zulip,Gabriel0402/zulip,schatt/zulip,dnmfarrell/zulip,lfranchi/zulip,jrowan/zulip,natanovia/zulip,kokoar/zulip,bowlofstew/zulip,synicalsyntax/zulip,hengqujushi/zulip,wdaher/zulip,DazWorrall/zulip,joyhchen/zulip,tbutter/zulip,rht/zulip,udxxabp/zulip,niftynei/zulip,fw1121/zulip,zacps/zulip,gkotian/zulip,Batterfii/zulip,eastlhu/zulip,Vallher/zulip,developerfm/zulip,yocome/zulip,hayderimran7/zulip,xuxiao/zulip,vaidap/zulip,bastianh/zulip,rishig/zulip,Vallher/zulip,fw1121/zulip,fw1121/zulip,JPJPJPOPOP/zulip,dnmfarrell/zulip,armooo/zulip,vabs22/zulip,bitemyapp/zulip,peguin40/zulip,amallia/zulip,rht/zulip,swinghu/zulip,natanovia/zulip,jessedhillon/zulip,glovebx/zulip,aakash-cr7/zulip,j831/zulip,TigorC/zulip,natanovia/zulip,ufosky-server/zulip,Diptanshu8/zulip,littledogboy/zulip,dotcool/zulip,JanzTam/zulip,zorojean/zulip,hustlzp/zulip,brockwhittaker/zulip,shaunstanislaus/zulip,avastu/zulip,tbutter/zulip,suxinde2009/zulip,hustlzp/zulip,glovebx/zulip,isht3/zulip,TigorC/zulip,ryansnowboarder/zulip,ikasumiwt/zulip,joshisa/zulip,ipernet/zulip,gigawhitlocks/zulip,j831/zulip,akuseru/zulip,saitodisse/zulip,rishig/zulip,sonali0901/zulip,peiwei/zulip,joyhchen/zulip,armooo/zulip,pradiptad/zulip,JPJPJPOPOP/zulip,qq1012803704/zulip,calvinleenyc/zulip,lfranchi/zulip,jphilipsen05/zulip,arpitpanwar/zulip,littledogboy/zulip,Qgap/zulip,aps-sids/zulip,johnnygaddarr/zulip,yuvipanda/zulip,samatdav/zulip,atomic-labs/zulip,itnihao/zulip,RobotCaleb/zulip,moria/zulip,armooo/zulip,johnny9/zulip,adnanh/zulip,hengqujushi/zulip,willingc/zulip,bowlofstew/zulip,DazWorrall/zulip,dhcrzf/zulip,PaulPetring/zulip,EasonYi/zulip,dnmfarrell/zulip,so0k/zulip,rishig/zulip,zofuthan/zulip,noroot/zulip,kaiyuanheshang/zulip,ahmadassaf/zulip,proliming/zulip,zofuthan/zulip,dxq-git/zulip,umkay/zulip,ufosky-server/zulip,susansls/zulip,andersk/zulip,ApsOps/zulip,levixie/zulip,adnanh/zulip,vakila/zulip,vakila/zulip,blaze225/zulip,Galexrt/zulip,brockwhittaker/zulip,Diptanshu8/zulip,firstblade/zulip,vabs22/zulip,ApsOps/zulip,yuvipanda/zulip,jonesgithub/zulip,swinghu/zulip,sharmaeklavya2/zulip,seapasulli/zulip,bluesea/zulip,johnnygaddarr/zulip,vaidap/zulip,Drooids/zulip,dotcool/zulip,schatt/zulip,susansls/zulip,blaze225/zulip,timabbott/zulip,amallia/zulip,wdaher/zulip,proliming/zulip,vakila/zulip,tiansiyuan/zulip,natanovia/zulip,LAndreas/zulip,hackerkid/zulip,firstblade/zulip,vakila/zulip,ikasumiwt/zulip,hafeez3000/zulip,vakila/zulip,stamhe/zulip,deer-hope/zulip,pradiptad/zulip,joshisa/zulip,aliceriot/zulip,tommyip/zulip,themass/zulip,Frouk/zulip,jainayush975/zulip,hustlzp/zulip,rht/zulip,yocome/zulip,vabs22/zulip,ikasumiwt/zulip,m1ssou/zulip,verma-varsha/zulip,alliejones/zulip,Galexrt/zulip,joshisa/zulip,joshisa/zulip,wdaher/zulip,wavelets/zulip,johnnygaddarr/zulip,andersk/zulip,shubhamdhama/zulip,yocome/zulip,thomasboyt/zulip,umkay/zulip,j831/zulip,bluesea/zulip,wangdeshui/zulip,proliming/zulip,noroot/zulip,littledogboy/zulip,jonesgithub/zulip,m1ssou/zulip,arpitpanwar/zulip,shaunstanislaus/zulip,dawran6/zulip,dotcool/zulip,bastianh/zulip,Galexrt/zulip,sonali0901/zulip,arpitpanwar/zulip,glovebx/zulip,dhcrzf/zulip,timabbott/zulip,stamhe/zulip,sharmaeklavya2/zulip,grave-w-grave/zulip,shubhamdhama/zulip,bitemyapp/zulip,Jianchun1/zulip,zhaoweigg/zulip,codeKonami/zulip,zwily/zulip,jerryge/zulip,developerfm/zulip,huangkebo/zulip,Vallher/zulip,aliceriot/zulip,willingc/zulip,j831/zulip,eeshangarg/zulip,ryanbackman/zulip,ApsOps/zulip,vabs22/zulip,avastu/zulip,KJin99/zulip,so0k/zulip,jimmy54/zulip,shubhamdhama/zulip,luyifan/zulip,PhilSk/zulip,alliejones/zulip,punchagan/zulip,tommyip/zulip,Vallher/zulip,zachallaun/zulip,zofuthan/zulip,zulip/zulip,dattatreya303/zulip,kou/zulip,tdr130/zulip,JanzTam/zulip,thomasboyt/zulip,themass/zulip,Batterfii/zulip,zorojean/zulip,qq1012803704/zulip,umkay/zulip,jeffcao/zulip,showell/zulip,schatt/zulip,gkotian/zulip,so0k/zulip,zulip/zulip,mdavid/zulip,joyhchen/zulip,arpith/zulip,brockwhittaker/zulip,hafeez3000/zulip,mahim97/zulip,akuseru/zulip,stamhe/zulip,aps-sids/zulip,ahmadassaf/zulip,luyifan/zulip,shrikrishnaholla/zulip,Galexrt/zulip,tiansiyuan/zulip,thomasboyt/zulip,Drooids/zulip,KJin99/zulip,suxinde2009/zulip,jessedhillon/zulip,tdr130/zulip,themass/zulip,Galexrt/zulip,mohsenSy/zulip,dattatreya303/zulip,bitemyapp/zulip,bastianh/zulip,gkotian/zulip,arpith/zulip,jonesgithub/zulip,saitodisse/zulip,huangkebo/zulip,Batterfii/zulip,dwrpayne/zulip,dattatreya303/zulip,armooo/zulip,christi3k/zulip,codeKonami/zulip,hackerkid/zulip,hafeez3000/zulip,j831/zulip,swinghu/zulip,voidException/zulip,ashwinirudrappa/zulip,calvinleenyc/zulip,dnmfarrell/zulip,yuvipanda/zulip,kaiyuanheshang/zulip,TigorC/zulip,huangkebo/zulip,cosmicAsymmetry/zulip,isht3/zulip,lfranchi/zulip,dwrpayne/zulip,hj3938/zulip,KJin99/zulip,xuxiao/zulip,akuseru/zulip,qq1012803704/zulip,wweiradio/zulip,jimmy54/zulip,ahmadassaf/zulip,dxq-git/zulip,karamcnair/zulip,peiwei/zulip,arpitpanwar/zulip,willingc/zulip,Qgap/zulip,brainwane/zulip,wangdeshui/zulip,jonesgithub/zulip,yuvipanda/zulip,niftynei/zulip,babbage/zulip,hackerkid/zulip,kokoar/zulip,zhaoweigg/zulip,kou/zulip,paxapy/zulip,qq1012803704/zulip,tbutter/zulip,noroot/zulip,praveenaki/zulip,aliceriot/zulip,ashwinirudrappa/zulip,ufosky-server/zulip,sharmaeklavya2/zulip,alliejones/zulip,hustlzp/zulip,RobotCaleb/zulip,shrikrishnaholla/zulip,xuanhan863/zulip,paxapy/zulip,itnihao/zulip,amallia/zulip,LeeRisk/zulip,fw1121/zulip,cosmicAsymmetry/zulip,m1ssou/zulip,armooo/zulip,bluesea/zulip,aakash-cr7/zulip,calvinleenyc/zulip,noroot/zulip,codeKonami/zulip,Drooids/zulip,schatt/zulip,Jianchun1/zulip,shubhamdhama/zulip,joshisa/zulip,wdaher/zulip,dxq-git/zulip,ryansnowboarder/zulip,ericzhou2008/zulip,bitemyapp/zulip,vikas-parashar/zulip,ApsOps/zulip,kaiyuanheshang/zulip,voidException/zulip,natanovia/zulip,joyhchen/zulip,MariaFaBella85/zulip,krtkmj/zulip,Suninus/zulip,praveenaki/zulip,tdr130/zulip,DazWorrall/zulip,amallia/zulip,wdaher/zulip,shubhamdhama/zulip,timabbott/zulip,Cheppers/zulip,avastu/zulip,eeshangarg/zulip,ahmadassaf/zulip,qq1012803704/zulip,adnanh/zulip,akuseru/zulip,zhaoweigg/zulip,eastlhu/zulip,huangkebo/zulip,Qgap/zulip,developerfm/zulip,yuvipanda/zulip,mdavid/zulip,reyha/zulip,ryanbackman/zulip,blaze225/zulip,kaiyuanheshang/zulip,kou/zulip,peguin40/zulip,itnihao/zulip,wangdeshui/zulip,nicholasbs/zulip,atomic-labs/zulip,ipernet/zulip,dawran6/zulip,moria/zulip,KingxBanana/zulip,Jianchun1/zulip,bastianh/zulip,eastlhu/zulip,JanzTam/zulip,zacps/zulip,udxxabp/zulip,esander91/zulip,johnnygaddarr/zulip,seapasulli/zulip,stamhe/zulip,tbutter/zulip,SmartPeople/zulip,yocome/zulip,Batterfii/zulip,KingxBanana/zulip,shaunstanislaus/zulip,nicholasbs/zulip,joshisa/zulip,yocome/zulip,Cheppers/zulip,Gabriel0402/zulip,easyfmxu/zulip,brainwane/zulip,SmartPeople/zulip,jessedhillon/zulip,vikas-parashar/zulip,grave-w-grave/zulip,themass/zulip,susansls/zulip,tdr130/zulip,ipernet/zulip,zwily/zulip,hj3938/zulip,brainwane/zulip,ericzhou2008/zulip,isht3/zulip,MayB/zulip,qq1012803704/zulip,Cheppers/zulip,xuxiao/zulip,joshisa/zulip,hj3938/zulip,PaulPetring/zulip,Jianchun1/zulip,kaiyuanheshang/zulip,bitemyapp/zulip,akuseru/zulip,kou/zulip,LeeRisk/zulip,guiquanz/zulip,yuvipanda/zulip,vikas-parashar/zulip,amanharitsh123/zulip,esander91/zulip,paxapy/zulip,samatdav/zulip,mohsenSy/zulip,ahmadassaf/zulip,karamcnair/zulip,niftynei/zulip,dwrpayne/zulip,voidException/zulip,atomic-labs/zulip,mohsenSy/zulip,hengqujushi/zulip,jeffcao/zulip,grave-w-grave/zulip,Juanvulcano/zulip,peguin40/zulip,EasonYi/zulip,arpitpanwar/zulip,codeKonami/zulip,udxxabp/zulip,jessedhillon/zulip,dxq-git/zulip,Juanvulcano/zulip,TigorC/zulip,easyfmxu/zulip,SmartPeople/zulip,showell/zulip,EasonYi/zulip,aakash-cr7/zulip,technicalpickles/zulip,ipernet/zulip,grave-w-grave/zulip,zofuthan/zulip,peiwei/zulip,nicholasbs/zulip,brockwhittaker/zulip,ipernet/zulip,dhcrzf/zulip,jessedhillon/zulip,souravbadami/zulip,RobotCaleb/zulip,JPJPJPOPOP/zulip,esander91/zulip,jainayush975/zulip,tommyip/zulip,technicalpickles/zulip,ApsOps/zulip,MayB/zulip,sonali0901/zulip,mdavid/zulip,LeeRisk/zulip,bssrdf/zulip,krtkmj/zulip,zachallaun/zulip,shrikrishnaholla/zulip,eastlhu/zulip,Diptanshu8/zulip,jimmy54/zulip,praveenaki/zulip,rht/zulip,littledogboy/zulip,glovebx/zulip,showell/zulip,peguin40/zulip,rht/zulip,LAndreas/zulip,jphilipsen05/zulip,calvinleenyc/zulip,souravbadami/zulip,amyliu345/zulip,shrikrishnaholla/zulip,dattatreya303/zulip,amallia/zulip,gigawhitlocks/zulip,zorojean/zulip,wavelets/zulip,developerfm/zulip,aliceriot/zulip,bssrdf/zulip,noroot/zulip,akuseru/zulip,deer-hope/zulip,itnihao/zulip,nicholasbs/zulip,Qgap/zulip,DazWorrall/zulip,codeKonami/zulip,zulip/zulip,arpith/zulip,mahim97/zulip,EasonYi/zulip,ericzhou2008/zulip,jimmy54/zulip,tiansiyuan/zulip,zachallaun/zulip,zulip/zulip,grave-w-grave/zulip,saitodisse/zulip,ryansnowboarder/zulip,ufosky-server/zulip,praveenaki/zulip,eastlhu/zulip,hackerkid/zulip,jackrzhang/zulip,arpith/zulip,isht3/zulip,Batterfii/zulip,zofuthan/zulip,Frouk/zulip,hayderimran7/zulip,jonesgithub/zulip,armooo/zulip,peiwei/zulip,hayderimran7/zulip,vikas-parashar/zulip,adnanh/zulip,natanovia/zulip,Cheppers/zulip,bssrdf/zulip,xuanhan863/zulip,levixie/zulip,Juanvulcano/zulip,bluesea/zulip,alliejones/zulip,verma-varsha/zulip,thomasboyt/zulip,ufosky-server/zulip,firstblade/zulip,LeeRisk/zulip,synicalsyntax/zulip,shrikrishnaholla/zulip,mahim97/zulip,esander91/zulip,kokoar/zulip,glovebx/zulip,dawran6/zulip,ikasumiwt/zulip,willingc/zulip,huangkebo/zulip,itnihao/zulip,schatt/zulip,aps-sids/zulip,zwily/zulip,themass/zulip,shaunstanislaus/zulip,guiquanz/zulip,aps-sids/zulip,hengqujushi/zulip,zofuthan/zulip,hj3938/zulip,wangdeshui/zulip,punchagan/zulip,rht/zulip,zofuthan/zulip,Cheppers/zulip,rht/zulip,Suninus/zulip,gkotian/zulip,suxinde2009/zulip,brainwane/zulip,shubhamdhama/zulip,wavelets/zulip,jeffcao/zulip,johnny9/zulip,paxapy/zulip,AZtheAsian/zulip,avastu/zulip,fw1121/zulip,bssrdf/zulip,MayB/zulip,mansilladev/zulip,souravbadami/zulip,niftynei/zulip,zorojean/zulip,avastu/zulip,sharmaeklavya2/zulip,ryansnowboarder/zulip,technicalpickles/zulip,rishig/zulip,eastlhu/zulip,lfranchi/zulip,mahim97/zulip,amanharitsh123/zulip,xuanhan863/zulip,pradiptad/zulip,ikasumiwt/zulip,joyhchen/zulip,bitemyapp/zulip,ipernet/zulip,nicholasbs/zulip,aliceriot/zulip,gkotian/zulip,xuxiao/zulip,zachallaun/zulip,verma-varsha/zulip,gkotian/zulip,xuanhan863/zulip,KJin99/zulip,wdaher/zulip,sup95/zulip,calvinleenyc/zulip,zachallaun/zulip,developerfm/zulip,schatt/zulip,cosmicAsymmetry/zulip,vabs22/zulip,dnmfarrell/zulip,TigorC/zulip,dawran6/zulip,he15his/zulip,gkotian/zulip,wweiradio/zulip,glovebx/zulip,jessedhillon/zulip,wavelets/zulip,shaunstanislaus/zulip,bssrdf/zulip,punchagan/zulip,ahmadassaf/zulip,johnny9/zulip,dxq-git/zulip,aakash-cr7/zulip,ericzhou2008/zulip,proliming/zulip,arpitpanwar/zulip,sup95/zulip,amallia/zulip,bastianh/zulip,kokoar/zulip,jerryge/zulip,hafeez3000/zulip,thomasboyt/zulip,babbage/zulip,alliejones/zulip,brockwhittaker/zulip,peguin40/zulip,levixie/zulip,Gabriel0402/zulip,ashwinirudrappa/zulip,ericzhou2008/zulip,jphilipsen05/zulip,gigawhitlocks/zulip,karamcnair/zulip,synicalsyntax/zulip,hj3938/zulip,gigawhitlocks/zulip,LAndreas/zulip,Suninus/zulip,luyifan/zulip,timabbott/zulip,themass/zulip,Gabriel0402/zulip,alliejones/zulip,ufosky-server/zulip,hafeez3000/zulip,willingc/zulip,Qgap/zulip,mansilladev/zulip,dattatreya303/zulip,huangkebo/zulip,susansls/zulip,bluesea/zulip,dhcrzf/zulip,KingxBanana/zulip,KJin99/zulip,showell/zulip,JPJPJPOPOP/zulip,jrowan/zulip,Gabriel0402/zulip,dxq-git/zulip,tiansiyuan/zulip,bowlofstew/zulip,wangdeshui/zulip,pradiptad/zulip,timabbott/zulip,MariaFaBella85/zulip,technicalpickles/zulip,proliming/zulip,hengqujushi/zulip,atomic-labs/zulip,adnanh/zulip,tdr130/zulip,cosmicAsymmetry/zulip,jrowan/zulip,vakila/zulip,blaze225/zulip,zorojean/zulip,moria/zulip,krtkmj/zulip,noroot/zulip,moria/zulip,eastlhu/zulip,technicalpickles/zulip,yocome/zulip,avastu/zulip,bssrdf/zulip,praveenaki/zulip,hayderimran7/zulip,guiquanz/zulip,mohsenSy/zulip,punchagan/zulip,johnny9/zulip,brainwane/zulip,reyha/zulip,eeshangarg/zulip,yocome/zulip,synicalsyntax/zulip,paxapy/zulip,zhaoweigg/zulip,guiquanz/zulip,jerryge/zulip,reyha/zulip,hafeez3000/zulip,SmartPeople/zulip,hayderimran7/zulip,arpitpanwar/zulip,rishig/zulip,vabs22/zulip,hackerkid/zulip,MayB/zulip,babbage/zulip,mansilladev/zulip,verma-varsha/zulip,themass/zulip,babbage/zulip,hayderimran7/zulip,Cheppers/zulip,easyfmxu/zulip,johnnygaddarr/zulip,LAndreas/zulip,zacps/zulip,PhilSk/zulip,isht3/zulip,hayderimran7/zulip,wweiradio/zulip,littledogboy/zulip,proliming/zulip,Diptanshu8/zulip,PhilSk/zulip,Frouk/zulip,bowlofstew/zulip,xuxiao/zulip,zacps/zulip,LeeRisk/zulip,armooo/zulip,karamcnair/zulip,mahim97/zulip,dhcrzf/zulip,ikasumiwt/zulip,MariaFaBella85/zulip,guiquanz/zulip,guiquanz/zulip,hengqujushi/zulip,amyliu345/zulip,kou/zulip,grave-w-grave/zulip,he15his/zulip,udxxabp/zulip,hustlzp/zulip,MariaFaBella85/zulip,karamcnair/zulip,sonali0901/zulip,JanzTam/zulip,jrowan/zulip,vikas-parashar/zulip,willingc/zulip,sharmaeklavya2/zulip,firstblade/zulip,jimmy54/zulip,Galexrt/zulip,AZtheAsian/zulip,deer-hope/zulip,nicholasbs/zulip,moria/zulip,swinghu/zulip,PaulPetring/zulip,xuanhan863/zulip,dhcrzf/zulip,punchagan/zulip,tiansiyuan/zulip,zwily/zulip,showell/zulip,tdr130/zulip
|
---
+++
@@ -22,7 +22,3 @@
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
-
-# Apply WSGI middleware here.
-# from helloworld.wsgi import HelloWorldApplication
-# application = HelloWorldApplication(application)
|
f96902bc16a4e3da7b85dd3d55371a6927dc0472
|
src/office/views.py
|
src/office/views.py
|
from django.http import JsonResponse
from django.utils.decorators import method_decorator
from django.utils.translation import ugettext_lazy as _
from graphene_django.views import GraphQLView
from rest_framework import status
from rest_framework.exceptions import APIException
from rest_framework_jwt.authentication import JSONWebTokenAuthentication
def check_jwt_decorator(func):
"""
Check JWT Token by using DRF Authentication class.
Returns UNAUTHORIZED response if headers don't contain alive token.
:param func:
:return:
"""
def wrap(request, *args, **kwargs):
try:
auth_tuple = JSONWebTokenAuthentication().authenticate(request)
except APIException as e:
return JsonResponse({'details': str(e)}, status=e.status_code)
except Exception as e:
raise e
if auth_tuple is None:
return JsonResponse({'details': _('Unauthorized user')},
status=status.HTTP_401_UNAUTHORIZED)
request.user, request.auth = auth_tuple
return func(request, *args, **kwargs)
return wrap
class DRFAuthenticatedGraphQLView(GraphQLView):
"""
Extended default GraphQLView.
"""
@method_decorator(check_jwt_decorator)
def dispatch(self, request, *args, **kwargs):
return super(DRFAuthenticatedGraphQLView, self).dispatch(
request, *args, **kwargs)
|
from django.conf import settings
from django.http import JsonResponse
from django.utils.decorators import method_decorator
from django.utils.translation import ugettext_lazy as _
from graphene_django.views import GraphQLView
from rest_framework import status
from rest_framework.exceptions import APIException
from rest_framework_jwt.authentication import JSONWebTokenAuthentication
def check_jwt_decorator(func):
"""
Check JWT Token by using DRF Authentication class.
Returns UNAUTHORIZED response if headers don't contain alive token.
:param func:
:return:
"""
def wrap(request, *args, **kwargs):
if settings.DEBUG:
if request.user.is_authenticated():
return func(request, *args, **kwargs)
try:
auth_tuple = JSONWebTokenAuthentication().authenticate(request)
except APIException as e:
return JsonResponse({'details': str(e)}, status=e.status_code)
except Exception as e:
raise e
if auth_tuple is None:
return JsonResponse({'details': _('Unauthorized user')},
status=status.HTTP_401_UNAUTHORIZED)
request.user, request.auth = auth_tuple
return func(request, *args, **kwargs)
return wrap
class DRFAuthenticatedGraphQLView(GraphQLView):
"""
Extended default GraphQLView.
"""
@method_decorator(check_jwt_decorator)
def dispatch(self, request, *args, **kwargs):
return super(DRFAuthenticatedGraphQLView, self).dispatch(
request, *args, **kwargs)
|
Fix for graphene ui on debug mode
|
Fix for graphene ui on debug mode
|
Python
|
mit
|
wis-software/office-manager
|
---
+++
@@ -1,3 +1,4 @@
+from django.conf import settings
from django.http import JsonResponse
from django.utils.decorators import method_decorator
from django.utils.translation import ugettext_lazy as _
@@ -17,6 +18,9 @@
"""
def wrap(request, *args, **kwargs):
+ if settings.DEBUG:
+ if request.user.is_authenticated():
+ return func(request, *args, **kwargs)
try:
auth_tuple = JSONWebTokenAuthentication().authenticate(request)
except APIException as e:
|
10379c2210b39d507af61530c56c1dbfa8cf5307
|
pbxplore/demo/__init__.py
|
pbxplore/demo/__init__.py
|
"""
Demonstration files --- :mod:`pbxplore.demo`
============================================
PBxplore bundles a set of demonstration files. This module ease the access to
these files.
The path to the demonstration files is stored in :const:`DEMO_DATA_PATH`. This
constant can be accessed as :const:`pbxplore.demo.DEMO_DATA_PATH`, or as
:const:`pbxplore.DEMO_DATA_PATH`.
A list of the available demonstration files is available with the
:func:`list_demo_files` function.
.. autofunction:: pbxplore.demo.list_demo_files
"""
import os
DEMO_DATA_PATH=os.path.abspath(os.path.dirname(__file__))
def list_demo_files():
"""
List the names of the bundled demo files
"""
return os.listdir(DEMO_DATA_PATH)
|
"""
Demonstration files --- :mod:`pbxplore.demo`
============================================
PBxplore bundles a set of demonstration files. This module ease the access to
these files.
The path to the demonstration files is stored in :const:`DEMO_DATA_PATH`. This
constant can be accessed as :const:`pbxplore.demo.DEMO_DATA_PATH`, or as
:const:`pbxplore.DEMO_DATA_PATH`.
A list of the available demonstration files is available with the
:func:`list_demo_files` function. The same list with absolute path instead of
file names is provided by :func:`list_demo_files_absolute`.
.. autofunction:: pbxplore.demo.list_demo_files
.. autofunction:: pbxplore.demo.list_demo_files_absolute
"""
import os
DEMO_DATA_PATH=os.path.abspath(os.path.dirname(__file__))
def list_demo_files():
"""
List the names of the bundled demo files
File names starting with _ or . are not listed. This allows to omit
__init__.py, and hiden files.
"""
return [demo_file for demo_file in os.listdir(DEMO_DATA_PATH)
if not demo_file[0] in '_.']
def list_demo_files_absolute():
"""
List the absolute path to the bundled demo files
File names starting with _ or . are not listed. This allows to omit
__init__.py, and hiden files.
"""
return [os.path.join(DEMO_DATA_PATH, demo_file) for demo_file
in list_demo_files()]
|
Add a function to list absolute path to demo files
|
Add a function to list absolute path to demo files
|
Python
|
mit
|
jbarnoud/PBxplore,jbarnoud/PBxplore,pierrepo/PBxplore,HubLot/PBxplore,pierrepo/PBxplore,HubLot/PBxplore
|
---
+++
@@ -10,9 +10,12 @@
:const:`pbxplore.DEMO_DATA_PATH`.
A list of the available demonstration files is available with the
-:func:`list_demo_files` function.
+:func:`list_demo_files` function. The same list with absolute path instead of
+file names is provided by :func:`list_demo_files_absolute`.
.. autofunction:: pbxplore.demo.list_demo_files
+
+.. autofunction:: pbxplore.demo.list_demo_files_absolute
"""
import os
@@ -23,5 +26,19 @@
def list_demo_files():
"""
List the names of the bundled demo files
+
+ File names starting with _ or . are not listed. This allows to omit
+ __init__.py, and hiden files.
"""
- return os.listdir(DEMO_DATA_PATH)
+ return [demo_file for demo_file in os.listdir(DEMO_DATA_PATH)
+ if not demo_file[0] in '_.']
+
+def list_demo_files_absolute():
+ """
+ List the absolute path to the bundled demo files
+
+ File names starting with _ or . are not listed. This allows to omit
+ __init__.py, and hiden files.
+ """
+ return [os.path.join(DEMO_DATA_PATH, demo_file) for demo_file
+ in list_demo_files()]
|
b7faf879e81df86e49ec47f1bcce1d6488f743b2
|
medical_patient_species/tests/test_medical_patient_species.py
|
medical_patient_species/tests/test_medical_patient_species.py
|
# -*- coding: utf-8 -*-
# © 2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp.tests.common import TransactionCase
class TestMedicalPatientSpecies(TransactionCase):
def setUp(self):
super(TestMedicalPatientSpecies, self).setUp()
self.human = self.env.ref('medical_patient_species.human')
self.dog = self.env.ref('medical_patient_species.dog')
def test_create_is_person(self):
''' Tests on creation if Human, is_person is True '''
self.assertTrue(
self.human.is_person, 'Should be True if Human'
)
def test_create_not_is_person(self):
''' Tests on creation if not Human, is_person is False '''
self.assertFalse(
self.dog.is_person, 'Should be False if not Human'
)
|
# -*- coding: utf-8 -*-
# © 2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp.tests.common import TransactionCase
from openerp.exceptions import Warning
class TestMedicalPatientSpecies(TransactionCase):
def setUp(self):
super(TestMedicalPatientSpecies, self).setUp()
self.human = self.env.ref('medical_patient_species.human')
self.dog = self.env.ref('medical_patient_species.dog')
def test_unlink_human(self):
''' Test raises Warning if unlinking human '''
with self.assertRaises(Warning):
self.human.unlink()
|
Remove tests for is_person (default is False, human set to True in xml). Re-add test ensuring warning raised if trying to unlink Human.
|
[FIX] medical_patient_species: Remove tests for is_person (default is False, human set to True in xml).
Re-add test ensuring warning raised if trying to unlink Human.
|
Python
|
agpl-3.0
|
laslabs/vertical-medical,laslabs/vertical-medical
|
---
+++
@@ -3,6 +3,7 @@
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp.tests.common import TransactionCase
+from openerp.exceptions import Warning
class TestMedicalPatientSpecies(TransactionCase):
@@ -12,14 +13,7 @@
self.human = self.env.ref('medical_patient_species.human')
self.dog = self.env.ref('medical_patient_species.dog')
- def test_create_is_person(self):
- ''' Tests on creation if Human, is_person is True '''
- self.assertTrue(
- self.human.is_person, 'Should be True if Human'
- )
-
- def test_create_not_is_person(self):
- ''' Tests on creation if not Human, is_person is False '''
- self.assertFalse(
- self.dog.is_person, 'Should be False if not Human'
- )
+ def test_unlink_human(self):
+ ''' Test raises Warning if unlinking human '''
+ with self.assertRaises(Warning):
+ self.human.unlink()
|
f9e1c2bd5976623bcebbb4b57fb011eb4d1737bc
|
support/appveyor-build.py
|
support/appveyor-build.py
|
#!/usr/bin/env python
# Build the project on AppVeyor.
import os
from download import Downloader
from subprocess import check_call
build = os.environ['BUILD']
cmake_command = ['cmake', '-DFMT_EXTRA_TESTS=ON', '-DCMAKE_BUILD_TYPE=' + os.environ['CONFIG']]
build_command = ['msbuild', '/m:4', '/p:Config=' + os.environ['CONFIG'], 'FORMAT.sln']
test_command = ['msbuild', 'RUN_TESTS.vcxproj']
if build == 'mingw':
# Install MinGW.
mingw_url = 'http://ufpr.dl.sourceforge.net/project/mingw-w64/' + \
'Toolchains%20targetting%20Win64/Personal%20Builds/mingw-builds/' + \
'4.9.2/threads-win32/seh/x86_64-4.9.2-release-win32-seh-rt_v3-rev1.7z'
with Downloader().download(mingw_url) as f:
check_call(['7z', 'x', '-oC:\\', f])
# Remove path to Git bin directory from $PATH because it breaks MinGW config.
path = os.environ['PATH'].replace(r'C:\Program Files (x86)\Git\bin', '')
os.environ['PATH'] = r'C:\Program Files (x86)\MSBUILD\12.0\bin\;' + path + r';C:\mingw64\bin'
cmake_command.append('-GMinGW Makefiles')
build_command = ['mingw32-make', '-j4']
test_command = ['mingw32-make', 'test']
check_call(cmake_command)
check_call(build_command)
check_call(test_command)
|
#!/usr/bin/env python
# Build the project on AppVeyor.
import os
from subprocess import check_call
build = os.environ['BUILD']
config = os.environ['CONFIG']
cmake_command = ['cmake', '-DFMT_EXTRA_TESTS=ON', '-DCMAKE_BUILD_TYPE=' + config]
if build == 'mingw':
cmake_command.append('-GMinGW Makefiles')
build_command = ['mingw32-make', '-j4']
test_command = ['mingw32-make', 'test']
else:
build_command = ['msbuild', '/m:4', '/p:Config=' + config, 'FORMAT.sln']
test_command = ['msbuild', 'RUN_TESTS.vcxproj']
check_call(cmake_command)
check_call(build_command)
check_call(test_command)
|
Use preinstalled mingw on appveyor
|
Use preinstalled mingw on appveyor
|
Python
|
bsd-2-clause
|
lightslife/cppformat,blaquee/cppformat,mojoBrendan/fmt,alabuzhev/fmt,alabuzhev/fmt,wangshijin/cppformat,nelson4722/cppformat,cppformat/cppformat,cppformat/cppformat,alabuzhev/fmt,blaquee/cppformat,wangshijin/cppformat,Jopie64/cppformat,nelson4722/cppformat,lightslife/cppformat,cppformat/cppformat,seungrye/cppformat,Jopie64/cppformat,nelson4722/cppformat,dean0x7d/cppformat,seungrye/cppformat,lightslife/cppformat,dean0x7d/cppformat,mojoBrendan/fmt,wangshijin/cppformat,blaquee/cppformat,seungrye/cppformat,Jopie64/cppformat,mojoBrendan/fmt,dean0x7d/cppformat
|
---
+++
@@ -2,28 +2,18 @@
# Build the project on AppVeyor.
import os
-from download import Downloader
from subprocess import check_call
build = os.environ['BUILD']
-cmake_command = ['cmake', '-DFMT_EXTRA_TESTS=ON', '-DCMAKE_BUILD_TYPE=' + os.environ['CONFIG']]
-build_command = ['msbuild', '/m:4', '/p:Config=' + os.environ['CONFIG'], 'FORMAT.sln']
-test_command = ['msbuild', 'RUN_TESTS.vcxproj']
+config = os.environ['CONFIG']
+cmake_command = ['cmake', '-DFMT_EXTRA_TESTS=ON', '-DCMAKE_BUILD_TYPE=' + config]
if build == 'mingw':
- # Install MinGW.
- mingw_url = 'http://ufpr.dl.sourceforge.net/project/mingw-w64/' + \
- 'Toolchains%20targetting%20Win64/Personal%20Builds/mingw-builds/' + \
- '4.9.2/threads-win32/seh/x86_64-4.9.2-release-win32-seh-rt_v3-rev1.7z'
- with Downloader().download(mingw_url) as f:
- check_call(['7z', 'x', '-oC:\\', f])
-
- # Remove path to Git bin directory from $PATH because it breaks MinGW config.
- path = os.environ['PATH'].replace(r'C:\Program Files (x86)\Git\bin', '')
-
- os.environ['PATH'] = r'C:\Program Files (x86)\MSBUILD\12.0\bin\;' + path + r';C:\mingw64\bin'
cmake_command.append('-GMinGW Makefiles')
build_command = ['mingw32-make', '-j4']
test_command = ['mingw32-make', 'test']
+else:
+ build_command = ['msbuild', '/m:4', '/p:Config=' + config, 'FORMAT.sln']
+ test_command = ['msbuild', 'RUN_TESTS.vcxproj']
check_call(cmake_command)
check_call(build_command)
|
c87b5f8392dc58d6fa1d5398245b4ffe9edb19c8
|
praw/models/mod_action.py
|
praw/models/mod_action.py
|
"""Provide the ModAction class."""
from typing import TYPE_CHECKING
from .base import PRAWBase
if TYPE_CHECKING: # pragma: no cover
from ... import praw
class ModAction(PRAWBase):
"""Represent a moderator action."""
@property
def mod(self) -> "praw.models.Redditor":
"""Return the :class:`.Redditor` who the action was issued by."""
return self._reddit.redditor(self._mod) # pylint: disable=no-member
@mod.setter
def mod(self, value: "praw.models.Redditor"):
self._mod = value # pylint: disable=attribute-defined-outside-init
|
"""Provide the ModAction class."""
from typing import TYPE_CHECKING, Union
from .base import PRAWBase
if TYPE_CHECKING: # pragma: no cover
from ... import praw
class ModAction(PRAWBase):
"""Represent a moderator action."""
@property
def mod(self) -> "praw.models.Redditor":
"""Return the :class:`.Redditor` who the action was issued by."""
return self._reddit.redditor(self._mod) # pylint: disable=no-member
@mod.setter
def mod(self, value: Union[str, "praw.models.Redditor"]):
self._mod = value # pylint: disable=attribute-defined-outside-init
|
Add str as a type for mod setter
|
Add str as a type for mod setter
|
Python
|
bsd-2-clause
|
praw-dev/praw,praw-dev/praw
|
---
+++
@@ -1,5 +1,5 @@
"""Provide the ModAction class."""
-from typing import TYPE_CHECKING
+from typing import TYPE_CHECKING, Union
from .base import PRAWBase
@@ -16,5 +16,5 @@
return self._reddit.redditor(self._mod) # pylint: disable=no-member
@mod.setter
- def mod(self, value: "praw.models.Redditor"):
+ def mod(self, value: Union[str, "praw.models.Redditor"]):
self._mod = value # pylint: disable=attribute-defined-outside-init
|
b8e085c538b9eda06a831c78f55219ac4612a5da
|
model.py
|
model.py
|
import collections
LearningObject = collections.namedtuple(
'LearningObject',
['text', 'image'])
class Model(object):
def __init__(self, name):
self._name = name
self._objs = []
def add_object(self, text, image):
self._objs.append(LearningObject(text, image))
def write(self):
path = 'xml/LearningObjectsModularList-%s.xml' % self._name
with open(path, 'w') as manifest_file:
manifest_file.write('<Modules>\n')
manifest_file.write(' <Module>\n')
manifest_file.write(' <ModuleName>%s</ModuleName>\n' % self._name)
for o in self._objs:
manifest_file.write(
' <LearningObject>\n'
' <TextToDisplay>%s</TextToDisplay>\n'
' <ImageToDisplay>%s</ImageToDisplay>\n'
' </LearningObject>\n' % (o.text, o.image))
manifest_file.write(' </Module>\n')
manifest_file.write('</Modules>')
|
import os
import collections
LearningObject = collections.namedtuple(
'LearningObject',
['text', 'image'])
class Model(object):
def __init__(self, name):
self._name = name
self._objs = []
def add_object(self, text, image):
self._objs.append(LearningObject(text, image))
def write(self):
if not os.path.exists('xml'): os.mkdir('xml')
path = 'xml/LearningObjectsModularList-%s.xml' % self._name
with open(path, 'w') as manifest_file:
manifest_file.write('<Modules>\n')
manifest_file.write(' <Module>\n')
manifest_file.write(' <ModuleName>%s</ModuleName>\n' % self._name)
for o in self._objs:
manifest_file.write(
' <LearningObject>\n'
' <TextToDisplay>%s</TextToDisplay>\n'
' <ImageToDisplay>%s</ImageToDisplay>\n'
' </LearningObject>\n' % (o.text, o.image))
manifest_file.write(' </Module>\n')
manifest_file.write('</Modules>')
|
Make directory if it doesnt exist
|
Make directory if it doesnt exist
|
Python
|
apache-2.0
|
faskiri/google-drive-extract-images
|
---
+++
@@ -1,3 +1,5 @@
+import os
+
import collections
LearningObject = collections.namedtuple(
@@ -12,7 +14,9 @@
def add_object(self, text, image):
self._objs.append(LearningObject(text, image))
- def write(self):
+ def write(self):
+ if not os.path.exists('xml'): os.mkdir('xml')
+
path = 'xml/LearningObjectsModularList-%s.xml' % self._name
with open(path, 'w') as manifest_file:
manifest_file.write('<Modules>\n')
|
dd0405965f816a2a71bfb6d7a3f939691a6ab6d8
|
ibmcnx/doc/DataSources.py
|
ibmcnx/doc/DataSources.py
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dsidlist = []
# remove unwanted databases
for db in dbs:
dbname = db.split('(')
n = 0
for i in dbname:
# i is only the name of the DataSource, db is DataSource ID!
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dsidlist.append(str(db).replace('"',''))
n += 1
dsidlist.sort()
for dsid in dsidlist:
print "AdminConfig.list( dsid ): "
AdminConfig.showAttribute(dsid,"propertySet")
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dsidlist = []
# remove unwanted databases
for db in dbs:
dbname = db.split('(')
n = 0
for i in dbname:
# i is only the name of the DataSource, db is DataSource ID!
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dsidlist.append(str(db).replace('"',''))
n += 1
dsidlist.sort()
for dsid in dsidlist:
propertySet = AdminConfig.showAttribute(dsid,"propertySet")
propertyList = AdminConfig.list("J2EEResourceProperty", propertySet).splitlines()
|
Create documentation of DataSource Settings
|
8: Create documentation of DataSource Settings
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/8
|
Python
|
apache-2.0
|
stoeps13/ibmcnx2,stoeps13/ibmcnx2
|
---
+++
@@ -33,5 +33,5 @@
dsidlist.sort()
for dsid in dsidlist:
- print "AdminConfig.list( dsid ): "
- AdminConfig.showAttribute(dsid,"propertySet")
+ propertySet = AdminConfig.showAttribute(dsid,"propertySet")
+ propertyList = AdminConfig.list("J2EEResourceProperty", propertySet).splitlines()
|
ba3655a8771978edcf73083446c47adafc677afc
|
bayesian_jobs/handlers/clean_postgres.py
|
bayesian_jobs/handlers/clean_postgres.py
|
from selinon import StoragePool
from cucoslib.models import WorkerResult, Analysis
from .base import BaseHandler
class CleanPostgres(BaseHandler):
""" Clean JSONB columns in Postgres """
def execute(self):
s3 = StoragePool.get_connected_storage('S3Data')
results = self.postgres.session.query(WorkerResult).join(Analysis).\
filter(Analysis.finished_at != None).\
filter(WorkerResult.external_request_id == None)
for entry in results:
if entry.worker[0].isupper() or entry.worker in ('recommendation', 'stack_aggregator'):
continue
if 'VersionId' in entry.task_result:
continue
result_object_key = s3._construct_task_result_object_key(entry.ecosystem.name,
entry.package.name,
entry.version.identifier,
entry.worker)
if s3.object_exists(result_object_key):
entry.task_result = {'VersionId': s3.retrieve_latest_version_id(result_object_key)}
else:
entry.task_result = None
entry.error = True
self.postgres.session.commit()
|
from selinon import StoragePool
from cucoslib.models import WorkerResult, Analysis
from .base import BaseHandler
class CleanPostgres(BaseHandler):
""" Clean JSONB columns in Postgres """
def execute(self):
s3 = StoragePool.get_connected_storage('S3Data')
results = self.postgres.session.query(WorkerResult).join(Analysis).\
filter(Analysis.finished_at != None).\
filter(WorkerResult.external_request_id == None)
for entry in results:
if entry.worker[0].isupper() or entry.worker in ('recommendation', 'stack_aggregator'):
continue
if 'VersionId' in entry.task_result:
continue
result_object_key = s3._construct_task_result_object_key(entry.ecosystem.name,
entry.package.name,
entry.version.identifier,
entry.worker)
if s3.object_exists(result_object_key):
entry.task_result = {'VersionId': s3.retrieve_latest_version_id(result_object_key)}
else:
entry.task_result = None
entry.error = True
self.postgres.session.commit()
|
Clean PostgreSQL - commit after each change not to keep state in memory
|
Clean PostgreSQL - commit after each change not to keep state in memory
|
Python
|
apache-2.0
|
fabric8-analytics/fabric8-analytics-jobs,fabric8-analytics/fabric8-analytics-jobs
|
---
+++
@@ -31,4 +31,4 @@
entry.task_result = None
entry.error = True
- self.postgres.session.commit()
+ self.postgres.session.commit()
|
bdd2570f9ba8963edb6fa1a57b3f0ad5b1703a13
|
check_env.py
|
check_env.py
|
""" Run this file to check your python installation.
"""
from os.path import dirname, join
HERE = dirname(__file__)
def test_import_pandas():
import pandas
def test_pandas_version():
import pandas
version_found = pandas.__version__.split(".")
version_found = tuple(int(num) for num in version_found)
assert version_found > (0, 15)
def test_import_numpy():
import numpy
def test_import_matplotlib():
import matplotlib.pyplot as plt
plt.figure
plt.plot
plt.legend
plt.imshow
def test_import_statsmodels():
import statsmodels as sm
from statsmodels.formula.api import ols
from statsmodels.tsa.ar_model import AR
def test_read_html():
import pandas
pandas.read_html(join(HERE, "demos", "climate_timeseries", "data",
"sea_levels", "Obtaining Tide Gauge Data.html"))
def test_scrape_web():
import pandas as pd
pd.read_html("http://en.wikipedia.org/wiki/World_population")
if __name__ == "__main__":
import nose
nose.run(defaultTest=__name__)
|
""" Run this file to check your python installation.
"""
from os.path import dirname, join
HERE = dirname(__file__)
def test_import_pandas():
import pandas
def test_pandas_version():
import pandas
version_found = pandas.__version__.split(".")
version_found = tuple(int(num) for num in version_found)
assert version_found > (0, 15)
def test_import_numpy():
import numpy
def test_import_matplotlib():
import matplotlib.pyplot as plt
plt.figure
plt.plot
plt.legend
plt.imshow
def test_import_statsmodels():
import statsmodels as sm
from statsmodels.formula.api import ols
from statsmodels.tsa.ar_model import AR
def test_read_html():
import pandas
pandas.read_html(join(HERE, "climate_timeseries", "data",
"sea_levels", "Obtaining Tide Gauge Data.html"))
def test_scrape_web():
import pandas as pd
pd.read_html("http://en.wikipedia.org/wiki/World_population")
if __name__ == "__main__":
import nose
nose.run(defaultTest=__name__)
|
Fix path to test file.
|
Fix path to test file.
|
Python
|
mit
|
jonathanrocher/pandas_tutorial,jonathanrocher/pandas_tutorial,jonathanrocher/pandas_tutorial
|
---
+++
@@ -35,7 +35,7 @@
def test_read_html():
import pandas
- pandas.read_html(join(HERE, "demos", "climate_timeseries", "data",
+ pandas.read_html(join(HERE, "climate_timeseries", "data",
"sea_levels", "Obtaining Tide Gauge Data.html"))
|
cff7bb0fda7e126ce65701231cab0e67a5a2794c
|
endpoints.py
|
endpoints.py
|
import requests
class AlgoliaEndpoint(object):
"""Class used to call the Algolia API and parse the response."""
URL = "http://hn.algolia.com/api/v1/search_by_date"
@staticmethod
def get(tag, since, until=None, page=0):
"""Send a GET request to the endpoint.
Since Algolia only returns JSON, parse it into a dict.
Params:
tag: Can be "story" or "comment".
since: timestamp representing how old the news should be.
Optional params:
until: timestamp representing how new the news should be.
page: The number of the page to get.
Returns:
A python dict representing the response.
Raises:
requests.exceptions.RequestException.
"""
numericFilters = ["created_at_i<%d" % since]
if until is not None:
numericFilters += ["created_at_i>%d" % until]
params = {
"numericFilters": ",".join(numericFilters),
"tags": tag,
"page": page
}
url = AlgoliaEndpoint.URL
url += "?" + "&".join(["%s=%s" for k,v in params.items()])
response = requests.get(url)
return response.json()
|
import requests
class AlgoliaEndpoint(object):
"""Class used to call the Algolia API and parse the response."""
URL = "http://hn.algolia.com/api/v1/search_by_date"
@staticmethod
def get(tag, since, until=None, page=0):
"""Send a GET request to the endpoint.
Since Algolia only returns JSON, parse it into a dict.
See http://hn.algolia.com/api for more details.
Params:
tag: Can be "story" or "comment".
since: timestamp representing how old the news should be.
Optional params:
until: timestamp representing how new the news should be.
page: The number of the page to get.
Returns:
A python dict representing the response.
Raises:
requests.exceptions.RequestException.
"""
numericFilters = ["created_at_i<%d" % since]
if until is not None:
numericFilters += ["created_at_i>%d" % until]
params = {
"numericFilters": ",".join(numericFilters),
"tags": tag,
"page": page
}
url = AlgoliaEndpoint.URL
url += "?" + "&".join(["%s=%s" for k,v in params.items()])
response = requests.get(url)
return response.json()
|
Add Algolia API website in docstring.
|
Add Algolia API website in docstring.
|
Python
|
bsd-2-clause
|
NiGhTTraX/hackernews-scraper
|
---
+++
@@ -11,6 +11,8 @@
"""Send a GET request to the endpoint.
Since Algolia only returns JSON, parse it into a dict.
+
+ See http://hn.algolia.com/api for more details.
Params:
tag: Can be "story" or "comment".
|
2718256bfbf57eba36c3f083dace32afbc101fd3
|
python2/runner/writeln_decorator.py
|
python2/runner/writeln_decorator.py
|
#!/usr/bin/env python
# encoding: utf-8
import sys
import os
# Taken from legacy python unittest
class WritelnDecorator:
"""Used to decorate file-like objects with a handy 'writeln' method"""
def __init__(self, stream):
self.stream = stream
def __getattr__(self, attr):
return getattr(self.stream,attr)
def writeln(self, arg=None):
if arg: self.write(arg)
self.write('\n') # text-mode streams translate to \r\n if needed
|
#!/usr/bin/env python
# encoding: utf-8
import sys
import os
# Taken from legacy python unittest
class WritelnDecorator:
"""Used to decorate file-like objects with a handy 'writeln' method"""
def __init__(self,stream):
self.stream = stream
def __getattr__(self, attr):
return getattr(self.stream,attr)
def writeln(self, arg=None):
if arg: self.write(arg)
self.write('\n') # text-mode streams translate to \r\n if needed
|
Revert "PEP8: delete unused import"
|
Revert "PEP8: delete unused import"
This reverts commit ef48c713ec8686d1be3a66d8f41c498ae1361708.
|
Python
|
mit
|
kjc/python_koans,erikld/Bobo,haroldtreen/python_koans,welenofsky/python_koans,EavesofIT/python_koans,bordeltabernacle/python_koans,gregmalcolm/python_koans,bohdan7/python_koans,Sam-Rowe/python-koans,Sam-Rowe/python-koans,PaulFranklin/python_koans,kimegitee/python-koans,kjc/python_koans,rameshugar/koans,PaulFranklin/python_koans,haroldtreen/python_koans,kimegitee/python-koans,kernbeisser/python_koans,gregmalcolm/python_koans,EavesofIT/python_koans,kernbeisser/python_koans,garethsaxby/python_koans,gregkorte/Python-Koans,bordeltabernacle/python_koans,gregkorte/Python-Koans,rameshugar/koans,bohdan7/python_koans,welenofsky/python_koans,erikld/Bobo,garethsaxby/python_koans
|
---
+++
@@ -4,11 +4,10 @@
import sys
import os
-
# Taken from legacy python unittest
class WritelnDecorator:
"""Used to decorate file-like objects with a handy 'writeln' method"""
- def __init__(self, stream):
+ def __init__(self,stream):
self.stream = stream
def __getattr__(self, attr):
|
05e9c3e9c58732e68eacc0462f949c2525d670fe
|
tests/contrib/test_sqlalchemy_handler.py
|
tests/contrib/test_sqlalchemy_handler.py
|
# -*- coding: utf-8 -*-
import unittest
from flask.ext.sqlalchemy import SQLAlchemy
import flask_featureflags as feature_flags
from flask_featureflags.contrib.sqlalchemy import SQLAlchemyFeatureFlags
from tests.fixtures import app, feature_setup
db = SQLAlchemy(app)
SQLAlchemyHandler = SQLAlchemyFeatureFlags(db)
class SQLAlchemyFeatureFlagTest(unittest.TestCase):
@classmethod
def setupClass(cls):
feature_setup.handlers = [SQLAlchemyHandler]
@classmethod
def tearDownClass(cls):
feature_setup.clear_handlers()
def setUp(self):
self.app_ctx = app.app_context()
self.app_ctx.push()
db.create_all()
m1 = SQLAlchemyHandler.model(feature='active', is_active=True)
m2 = SQLAlchemyHandler.model(feature='inactive')
db.session.add_all([m1, m2])
db.session.commit()
def tearDown(self):
db.session.close()
db.drop_all()
self.app_ctx.pop()
def test_flag_active(self):
self.assertTrue(feature_flags.is_active('active'))
def test_flag_inactive(self):
self.assertFalse(feature_flags.is_active('inactive'))
def test_flag_not_found(self):
self.assertFalse(feature_flags.is_active('not_found'))
def test_flag_not_found_raise_handler_exception(self):
self.assertRaises(feature_flags.NoFeatureFlagFound,
SQLAlchemyHandler, 'not_found')
|
# -*- coding: utf-8 -*-
import unittest
from flask.ext.sqlalchemy import SQLAlchemy
import flask_featureflags as feature_flags
from flask_featureflags.contrib.sqlalchemy import SQLAlchemyFeatureFlags
from tests.fixtures import app, feature_setup
db = SQLAlchemy(app)
SQLAlchemyHandler = SQLAlchemyFeatureFlags(db)
class SQLAlchemyFeatureFlagTest(unittest.TestCase):
@classmethod
def setupClass(cls):
feature_setup.add_handler(SQLAlchemyHandler)
@classmethod
def tearDownClass(cls):
feature_setup.clear_handlers()
def setUp(self):
self.app_ctx = app.app_context()
self.app_ctx.push()
db.create_all()
m1 = SQLAlchemyHandler.model(feature='active', is_active=True)
m2 = SQLAlchemyHandler.model(feature='inactive')
db.session.add_all([m1, m2])
db.session.commit()
def tearDown(self):
db.session.close()
db.drop_all()
self.app_ctx.pop()
def test_flag_active(self):
self.assertTrue(feature_flags.is_active('active'))
def test_flag_inactive(self):
self.assertFalse(feature_flags.is_active('inactive'))
def test_flag_not_found(self):
self.assertFalse(feature_flags.is_active('not_found'))
def test_flag_not_found_raise_handler_exception(self):
self.assertRaises(feature_flags.NoFeatureFlagFound,
SQLAlchemyHandler, 'not_found')
|
Use add_handler instead of set handlers as a list.
|
Use add_handler instead of set handlers as a list.
|
Python
|
apache-2.0
|
jskulski/Flask-FeatureFlags,iromli/Flask-FeatureFlags,trustrachel/Flask-FeatureFlags
|
---
+++
@@ -16,7 +16,7 @@
@classmethod
def setupClass(cls):
- feature_setup.handlers = [SQLAlchemyHandler]
+ feature_setup.add_handler(SQLAlchemyHandler)
@classmethod
def tearDownClass(cls):
|
0e9acfe35396582f95c22994e061be871fdaf865
|
tests/test_datapackage.py
|
tests/test_datapackage.py
|
import pytest
import datapackage
class TestDataPackage(object):
def test_schema(self):
descriptor = {}
schema = {'foo': 'bar'}
dp = datapackage.DataPackage(descriptor, schema=schema)
assert dp.schema.to_dict() == schema
def test_datapackage_attributes(self):
dp = datapackage.DataPackage()
dp.foo = 'bar'
dp.bar = 'baz'
assert dp.attributes == {'foo': 'bar', 'bar': 'baz'}
def test_validate(self):
descriptor = {
'name': 'foo',
}
schema = {
'properties': {
'name': {},
},
'required': ['name'],
}
dp = datapackage.DataPackage(descriptor, schema)
dp.validate()
def test_validate_raises_validation_error_if_invalid(self):
schema = {
'properties': {
'name': {},
},
'required': ['name'],
}
dp = datapackage.DataPackage(schema=schema)
with pytest.raises(datapackage.exceptions.ValidationError):
dp.validate()
|
import pytest
import datapackage
class TestDataPackage(object):
def test_init_uses_base_schema_by_default(self):
dp = datapackage.DataPackage()
assert dp.schema.title == 'DataPackage'
def test_schema(self):
descriptor = {}
schema = {'foo': 'bar'}
dp = datapackage.DataPackage(descriptor, schema=schema)
assert dp.schema.to_dict() == schema
def test_datapackage_attributes(self):
dp = datapackage.DataPackage()
dp.foo = 'bar'
dp.bar = 'baz'
assert dp.attributes == {'foo': 'bar', 'bar': 'baz'}
def test_validate(self):
descriptor = {
'name': 'foo',
}
schema = {
'properties': {
'name': {},
},
'required': ['name'],
}
dp = datapackage.DataPackage(descriptor, schema)
dp.validate()
def test_validate_raises_validation_error_if_invalid(self):
schema = {
'properties': {
'name': {},
},
'required': ['name'],
}
dp = datapackage.DataPackage(schema=schema)
with pytest.raises(datapackage.exceptions.ValidationError):
dp.validate()
|
Add tests to ensure DataPackage uses base schema by default
|
Add tests to ensure DataPackage uses base schema by default
|
Python
|
mit
|
okfn/datapackage-model-py,sirex/datapackage-py,sirex/datapackage-py,okfn/datapackage-model-py,okfn/datapackage-py,datapackages/datapackage-py,datapackages/datapackage-py,okfn/datapackage-py
|
---
+++
@@ -3,6 +3,10 @@
class TestDataPackage(object):
+ def test_init_uses_base_schema_by_default(self):
+ dp = datapackage.DataPackage()
+ assert dp.schema.title == 'DataPackage'
+
def test_schema(self):
descriptor = {}
schema = {'foo': 'bar'}
|
cc17f806e3fcbc6974a9ee13be58585e681cc59a
|
jose/backends/__init__.py
|
jose/backends/__init__.py
|
try:
from jose.backends.cryptography_backend import CryptographyRSAKey as RSAKey # noqa: F401
except ImportError:
try:
from jose.backends.pycrypto_backend import RSAKey # noqa: F401
except ImportError:
from jose.backends.rsa_backend import RSAKey # noqa: F401
try:
from jose.backends.cryptography_backend import CryptographyECKey as ECKey # noqa: F401
except ImportError:
from jose.backends.ecdsa_backend import ECDSAECKey as ECKey # noqa: F401
|
try:
from jose.backends.cryptography_backend import CryptographyRSAKey as RSAKey # noqa: F401
except ImportError:
try:
from jose.backends.pycrypto_backend import RSAKey # noqa: F401
# time.clock was deprecated in python 3.3 in favor of time.perf_counter
# and removed in python 3.8. pycrypto was never updated for this. If
# time has no clock attribute, let it use perf_counter instead to work
# in 3.8+
# noinspection PyUnresolvedReferences
import time
if not hasattr(time, "clock"):
time.clock = time.perf_counter
except ImportError:
from jose.backends.rsa_backend import RSAKey # noqa: F401
try:
from jose.backends.cryptography_backend import CryptographyECKey as ECKey # noqa: F401
except ImportError:
from jose.backends.ecdsa_backend import ECDSAECKey as ECKey # noqa: F401
|
Add fix for time.clock removal in 3.8 for pycrypto backend
|
Add fix for time.clock removal in 3.8 for pycrypto backend
|
Python
|
mit
|
mpdavis/python-jose
|
---
+++
@@ -4,6 +4,16 @@
except ImportError:
try:
from jose.backends.pycrypto_backend import RSAKey # noqa: F401
+
+ # time.clock was deprecated in python 3.3 in favor of time.perf_counter
+ # and removed in python 3.8. pycrypto was never updated for this. If
+ # time has no clock attribute, let it use perf_counter instead to work
+ # in 3.8+
+ # noinspection PyUnresolvedReferences
+ import time
+ if not hasattr(time, "clock"):
+ time.clock = time.perf_counter
+
except ImportError:
from jose.backends.rsa_backend import RSAKey # noqa: F401
|
abc95f3a10cd27ec67e982b187f7948d0dc83fe3
|
corgi/sql.py
|
corgi/sql.py
|
from six.moves import configparser as CP
from sqlalchemy.engine.url import URL
from sqlalchemy.engine import create_engine
import os
import pandas as pd
def get_odbc_engine(name, odbc_filename='/etc/odbc.ini', database=None):
"""
Looks up the connection details in an odbc file and returns a SQLAlchemy engine initialized with those details.
"""
parser = CP.ConfigParser()
parser.read(odbc_filename)
cfg_dict = dict(parser.items(name))
if database:
cfg_dict['database'] = database
connection_href = str(URL(**cfg_dict))
engine = create_engine(connection_href)
return engine
def cached_read_sql(name, engine, sql_loc='sql', out_data_loc='data', refresh=False):
sql_fname = '%s/%s.sql' % (sql_loc, name)
data_fname = '%s/%s.csv' % (out_data_loc, name)
if os.path.isfile(data_fname):
return pd.read_csv(data_fname)
with open(sql_fname) as f:
df = pd.read_sql(f.read(), engine)
df.to_csv(data_fname, index=False)
return df
|
import os
from pathlib import Path
from six.moves import configparser as CP
import pandas as pd
from sqlalchemy.engine import create_engine
from sqlalchemy.engine.url import URL
home = str(Path.home())
def get_odbc_engine(name, odbc_filename=None, database=None):
"""
Looks up the connection details in an odbc file and returns a SQLAlchemy engine initialized with those details.
"""
possible_locations = []
if odbc_filename:
possible_locations += [odbc_filename]
possible_locations += [
'/etc/odbc.ini',
'%s/odbc.ini' % home,
]
odbc_loc = None
for loc in possible_locations:
if os.path.exists(loc):
odbc_loc = loc
break
if not odbc_loc:
raise Exception('Could not find an odbc config file. Checked: \n%s' % "\n".join(possible_locations))
parser = CP.ConfigParser()
parser.read(odbc_loc)
cfg_dict = dict(parser.items(name))
if database:
cfg_dict['database'] = database
connection_href = str(URL(**cfg_dict))
engine = create_engine(connection_href)
return engine
def cached_read_sql(name, engine, sql_loc='sql', out_data_loc='data', refresh=False):
sql_fname = '%s/%s.sql' % (sql_loc, name)
data_fname = '%s/%s.csv' % (out_data_loc, name)
if os.path.isfile(data_fname):
return pd.read_csv(data_fname)
with open(sql_fname) as f:
df = pd.read_sql(f.read(), engine)
df.to_csv(data_fname, index=False)
return df
|
Set the get_odbc_engine function to check etc, then user home for odbc file by default
|
Set the get_odbc_engine function to check etc, then user home for odbc file by default
|
Python
|
mit
|
log0ymxm/corgi
|
---
+++
@@ -1,16 +1,36 @@
+import os
+from pathlib import Path
+
from six.moves import configparser as CP
+
+import pandas as pd
+from sqlalchemy.engine import create_engine
from sqlalchemy.engine.url import URL
-from sqlalchemy.engine import create_engine
-import os
-import pandas as pd
-def get_odbc_engine(name, odbc_filename='/etc/odbc.ini', database=None):
+home = str(Path.home())
+
+def get_odbc_engine(name, odbc_filename=None, database=None):
"""
Looks up the connection details in an odbc file and returns a SQLAlchemy engine initialized with those details.
"""
+ possible_locations = []
+ if odbc_filename:
+ possible_locations += [odbc_filename]
+ possible_locations += [
+ '/etc/odbc.ini',
+ '%s/odbc.ini' % home,
+ ]
+
+ odbc_loc = None
+ for loc in possible_locations:
+ if os.path.exists(loc):
+ odbc_loc = loc
+ break
+ if not odbc_loc:
+ raise Exception('Could not find an odbc config file. Checked: \n%s' % "\n".join(possible_locations))
parser = CP.ConfigParser()
- parser.read(odbc_filename)
+ parser.read(odbc_loc)
cfg_dict = dict(parser.items(name))
|
a46bc83eb16888e374f5f24080581818617539a2
|
src/cards.py
|
src/cards.py
|
from random import seed as srand, randint
from time import time
srand(time())
class Card:
types = ("def", "atk")
limits = {"def": (1, 25), "atk": (1, 40)}
def __init__(self, type = None, value = None):
if type:
if not type in types:
print("ERROR: Invalid card type")
return False
else:
# Randomize card type
type = Card.types[randint(len(Card.types))]
self.type = type
self.value = randint(Card.limits[type][0], Card.limits[type][1])
|
from random import seed as srand, randint
from time import time
srand(time())
class Card:
card_types = ("def", "atk")
card_limits = {"def": (1, 25), "atk": (1, 40)}
def __init__(self, card_type = None, card_value = None):
if card_type:
if not card_type in card_types:
print("ERROR: Invalid card type")
return False
else:
# Randomize card type
card_type = Card.card_types[randint(len(Card.card_types))]
self.card_type = card_type
self.value = randint(card_limits[card_type][0],
card_limits[card_type][1])
|
Change variable names to remove conflict with standard functions
|
Change variable names to remove conflict with standard functions
|
Python
|
mit
|
TheUnderscores/card-fight-thingy
|
---
+++
@@ -4,17 +4,18 @@
srand(time())
class Card:
- types = ("def", "atk")
- limits = {"def": (1, 25), "atk": (1, 40)}
+ card_types = ("def", "atk")
+ card_limits = {"def": (1, 25), "atk": (1, 40)}
- def __init__(self, type = None, value = None):
- if type:
- if not type in types:
+ def __init__(self, card_type = None, card_value = None):
+ if card_type:
+ if not card_type in card_types:
print("ERROR: Invalid card type")
return False
else:
# Randomize card type
- type = Card.types[randint(len(Card.types))]
+ card_type = Card.card_types[randint(len(Card.card_types))]
- self.type = type
- self.value = randint(Card.limits[type][0], Card.limits[type][1])
+ self.card_type = card_type
+ self.value = randint(card_limits[card_type][0],
+ card_limits[card_type][1])
|
063a655327cf0872e01170653604db6901d5aacd
|
pagebits/managers.py
|
pagebits/managers.py
|
from django.db import models
from django.core.cache import cache
from django.conf import settings
from .utils import bitgroup_cache_key
class BitGroupManager(models.Manager):
def get_group(self, slug):
""" Retrieve a group by slug, with caching """
key = bitgroup_cache_key(slug)
cached_group = cache.get(key)
if not cached_group:
cached_group = self.get_queryset().select_related(
'bits',
).prefetch_related('bits__data').get(slug=slug)
timeout = getattr(settings, 'PAGEBITS_CACHE_TIMEOUT', 3600)
cache.set(key, cached_group, int(timeout))
return cached_group
|
from django.db import models
from django.core.cache import cache
from django.conf import settings
from .utils import bitgroup_cache_key
class BitGroupManager(models.Manager):
def get_group(self, slug):
""" Retrieve a group by slug, with caching """
key = bitgroup_cache_key(slug)
cached_group = cache.get(key)
if not cached_group:
cached_group = self.get_queryset() \
.prefetch_related('bits__data').get(slug=slug)
timeout = getattr(settings, 'PAGEBITS_CACHE_TIMEOUT', 3600)
cache.set(key, cached_group, int(timeout))
return cached_group
|
Remove select related. Django 1.8 requires existing fields instead of ignoring missing ones.
|
Remove select related. Django 1.8 requires existing fields instead of ignoring missing ones.
|
Python
|
bsd-3-clause
|
nngroup/django-pagebits,nngroup/django-pagebits
|
---
+++
@@ -13,9 +13,8 @@
cached_group = cache.get(key)
if not cached_group:
- cached_group = self.get_queryset().select_related(
- 'bits',
- ).prefetch_related('bits__data').get(slug=slug)
+ cached_group = self.get_queryset() \
+ .prefetch_related('bits__data').get(slug=slug)
timeout = getattr(settings, 'PAGEBITS_CACHE_TIMEOUT', 3600)
|
3c1f2c46485aee91dbf4c61b7b096c2cc4b28c06
|
kcdc3/apps/pinata/urls.py
|
kcdc3/apps/pinata/urls.py
|
from django.conf.urls import patterns, include, url
from models import Page
urlpatterns = patterns('kcdc3.apps.pinata.views',
url(r'^$', 'page_view'),
url(r'^[0-9a-zA-Z_-]+/$', 'page_view'),
)
|
from django.conf.urls import patterns, include, url
from models import Page
urlpatterns = patterns('kcdc3.apps.pinata.views',
url(r'^$', 'page_view'),
url(r'^[0-9a-zA-Z_-]+/$', 'page_view'),
url(r'^[0-9a-zA-Z_-]+/[0-9a-zA-Z_-]+/$', 'page_view'),
# Surely there's a better way to handle paths that contain several slashes
)
|
Allow three-deep paths in Pinata
|
Allow three-deep paths in Pinata
|
Python
|
mit
|
knowledgecommonsdc/kcdc3,knowledgecommonsdc/kcdc3,knowledgecommonsdc/kcdc3,knowledgecommonsdc/kcdc3,knowledgecommonsdc/kcdc3,knowledgecommonsdc/kcdc3
|
---
+++
@@ -5,5 +5,7 @@
url(r'^$', 'page_view'),
url(r'^[0-9a-zA-Z_-]+/$', 'page_view'),
+ url(r'^[0-9a-zA-Z_-]+/[0-9a-zA-Z_-]+/$', 'page_view'),
+ # Surely there's a better way to handle paths that contain several slashes
)
|
65d3e588606f533b2d2934bf52439e586a01f2b4
|
pdc/settings_test.py
|
pdc/settings_test.py
|
#
# Copyright (c) 2015 Red Hat
# Licensed under The MIT License (MIT)
# http://opensource.org/licenses/MIT
#
"""
Extra Django settings for test environment of pdc project.
"""
from settings import *
# Database settings
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.sqlite3',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
# disable PERMISSION while testing
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': (
'pdc.apps.auth.authentication.TokenAuthenticationWithChangeSet',
'rest_framework.authentication.SessionAuthentication',
),
# 'DEFAULT_PERMISSION_CLASSES': [
# 'rest_framework.permissions.DjangoModelPermissions'
# ],
'DEFAULT_FILTER_BACKENDS': ('rest_framework.filters.DjangoFilterBackend',
'pdc.apps.utils.utils.RelatedNestedOrderingFilter'),
'DEFAULT_METADATA_CLASS': 'contrib.bulk_operations.metadata.BulkMetadata',
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
'pdc.apps.common.renderers.ReadOnlyBrowsableAPIRenderer',
),
'EXCEPTION_HANDLER': 'pdc.apps.common.handlers.exception_handler',
'DEFAULT_PAGINATION_CLASS': 'pdc.apps.common.pagination.AutoDetectedPageNumberPagination',
'NON_FIELD_ERRORS_KEY': 'detail',
}
COMPONENT_BRANCH_NAME_BLACKLIST_REGEX = r'^epel\d+$'
|
#
# Copyright (c) 2015 Red Hat
# Licensed under The MIT License (MIT)
# http://opensource.org/licenses/MIT
#
"""
Extra Django settings for test environment of pdc project.
"""
from settings import *
# Database settings
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.sqlite3',
'TEST': {'NAME': 'test.sqlite3'},
}
}
# disable PERMISSION while testing
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': (
'pdc.apps.auth.authentication.TokenAuthenticationWithChangeSet',
'rest_framework.authentication.SessionAuthentication',
),
# 'DEFAULT_PERMISSION_CLASSES': [
# 'rest_framework.permissions.DjangoModelPermissions'
# ],
'DEFAULT_FILTER_BACKENDS': ('rest_framework.filters.DjangoFilterBackend',
'pdc.apps.utils.utils.RelatedNestedOrderingFilter'),
'DEFAULT_METADATA_CLASS': 'contrib.bulk_operations.metadata.BulkMetadata',
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
'pdc.apps.common.renderers.ReadOnlyBrowsableAPIRenderer',
),
'EXCEPTION_HANDLER': 'pdc.apps.common.handlers.exception_handler',
'DEFAULT_PAGINATION_CLASS': 'pdc.apps.common.pagination.AutoDetectedPageNumberPagination',
'NON_FIELD_ERRORS_KEY': 'detail',
}
COMPONENT_BRANCH_NAME_BLACKLIST_REGEX = r'^epel\d+$'
|
Use persistent test DB (--keepdb can skip migrations)
|
Use persistent test DB (--keepdb can skip migrations)
By default test database is in-memory (':memory:'), i.e. not saved on
disk, making it impossible to skip migrations to quickly re-run tests
with '--keepdb' argument.
|
Python
|
mit
|
release-engineering/product-definition-center,release-engineering/product-definition-center,product-definition-center/product-definition-center,product-definition-center/product-definition-center,product-definition-center/product-definition-center,product-definition-center/product-definition-center,release-engineering/product-definition-center,release-engineering/product-definition-center
|
---
+++
@@ -15,10 +15,7 @@
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.sqlite3',
- 'USER': '',
- 'PASSWORD': '',
- 'HOST': '',
- 'PORT': '',
+ 'TEST': {'NAME': 'test.sqlite3'},
}
}
|
0610628771df849119e6ed316dfa0f8107d8fe6e
|
src/WaveBlocksND/Utils.py
|
src/WaveBlocksND/Utils.py
|
"""The WaveBlocks Project
Various small utility functions.
@author: R. Bourquin
@copyright: Copyright (C) 2012 R. Bourquin
@license: Modified BSD License
"""
from numpy import squeeze, asarray
def meshgrid_nd(arrays):
"""Like 'meshgrid()' but for arbitrary number of dimensions.
"""
arrays = tuple(map(squeeze, arrays))
if not len([ None for a in arrays if a.ndim != 1 ]) == 0:
raise ValueError("Arrays must be 1-dimensional")
# TODO: Handle one-element arrays!
# The dimension
D = len(arrays)
# The number of elements in each array
nelements = map(len, arrays)
result = []
for d, a in enumerate(arrays):
# The new shape
shape = D * [1]
shape[d] = nelements[d]
# Reshape
A = asarray(a).reshape(shape)
# And repeat as many times as necessary
for ax, n in enumerate(nelements):
if not ax == d:
A = A.repeat(n, axis=ax)
result.append(A)
return tuple(result)
|
"""The WaveBlocks Project
Various small utility functions.
@author: R. Bourquin
@copyright: Copyright (C) 2012, 2013 R. Bourquin
@license: Modified BSD License
"""
from numpy import squeeze, asarray, atleast_1d
def meshgrid_nd(arrays):
"""Like 'meshgrid()' but for arbitrary number of dimensions.
:param arrays: A list of arrays to form the tensor grid.
All arrays have to be 1 oder 0 dimensional.
:return: The full tensor product mesh grid.
"""
arrays = map(squeeze, arrays)
arrays = tuple(map(atleast_1d, arrays))
if not len([ None for a in arrays if a.ndim != 1 ]) == 0:
raise ValueError("Arrays must be 1-dimensional")
# The dimension
D = len(arrays)
# The number of elements in each array
nelements = map(len, arrays)
result = []
for d, a in enumerate(arrays):
# The new shape
shape = D * [1]
shape[d] = nelements[d]
# Reshape
A = asarray(a).reshape(shape)
# And repeat as many times as necessary
for ax, n in enumerate(nelements):
if not ax == d:
A = A.repeat(n, axis=ax)
result.append(A)
return tuple(result)
|
Allow 0-dimensional arrays for tensor meshgrids
|
Allow 0-dimensional arrays for tensor meshgrids
|
Python
|
bsd-3-clause
|
WaveBlocks/WaveBlocksND,WaveBlocks/WaveBlocksND
|
---
+++
@@ -3,22 +3,25 @@
Various small utility functions.
@author: R. Bourquin
-@copyright: Copyright (C) 2012 R. Bourquin
+@copyright: Copyright (C) 2012, 2013 R. Bourquin
@license: Modified BSD License
"""
-from numpy import squeeze, asarray
+from numpy import squeeze, asarray, atleast_1d
def meshgrid_nd(arrays):
"""Like 'meshgrid()' but for arbitrary number of dimensions.
+
+ :param arrays: A list of arrays to form the tensor grid.
+ All arrays have to be 1 oder 0 dimensional.
+ :return: The full tensor product mesh grid.
"""
- arrays = tuple(map(squeeze, arrays))
+ arrays = map(squeeze, arrays)
+ arrays = tuple(map(atleast_1d, arrays))
if not len([ None for a in arrays if a.ndim != 1 ]) == 0:
raise ValueError("Arrays must be 1-dimensional")
-
- # TODO: Handle one-element arrays!
# The dimension
D = len(arrays)
|
fb9999b8dfcbc67da3a14ecbfed8fcd5676c0ea3
|
akhet/demo/subscribers.py
|
akhet/demo/subscribers.py
|
from akhet.urlgenerator import URLGenerator
import pyramid.threadlocal as threadlocal
from pyramid.exceptions import ConfigurationError
from .lib import helpers
def includeme(config):
"""Configure all application-specific subscribers."""
config.add_subscriber(create_url_generator, "pyramid.events.ContextFound")
config.add_subscriber(add_renderer_globals, "pyramid.events.BeforeRender")
def create_url_generator(event):
"""A subscriber for ``pyramid.events.ContextFound`` events. I create a
URL generator and attach it to the request (``request.url_generator``).
Templates and views can then use it to generate application URLs.
"""
request = event.request
context = request.context
url_generator = URLGenerator(context, request, qualified=False)
request.url_generator = url_generator
def add_renderer_globals(event):
"""A subscriber for ``pyramid.events.BeforeRender`` events. I add
some :term:`renderer globals` with values that are familiar to Pylons
users.
"""
renderer_globals = event
renderer_globals["h"] = helpers
request = event.get("request") or threadlocal.get_current_request()
if not request:
return
#renderer_globals["c"] = request.tmpl_context
#try:
# renderer_globals["session"] = request.session
#except ConfigurationError:
# pass
renderer_globals["url"] = request.url_generator
|
from akhet.urlgenerator import URLGenerator
import pyramid.threadlocal as threadlocal
from pyramid.exceptions import ConfigurationError
from .lib import helpers
def includeme(config):
"""Configure all application-specific subscribers."""
config.add_subscriber(create_url_generator, "pyramid.events.ContextFound")
config.add_subscriber(add_renderer_globals, "pyramid.events.BeforeRender")
def create_url_generator(event):
"""A subscriber for ``pyramid.events.ContextFound`` events. I create a
URL generator and attach it to the request (``request.url_generator``).
Templates and views can then use it to generate application URLs.
"""
request = event.request
context = request.context
url_generator = URLGenerator(context, request, qualified=False)
request.url_generator = url_generator
def add_renderer_globals(event):
"""A subscriber for ``pyramid.events.BeforeRender`` events. I add
some :term:`renderer globals` with values that are familiar to Pylons
users.
"""
renderer_globals = event
renderer_globals["h"] = helpers
request = event.get("request") or threadlocal.get_current_request()
if not request:
return
renderer_globals["r"] = request
#renderer_globals["c"] = request.tmpl_context
#try:
# renderer_globals["session"] = request.session
#except ConfigurationError:
# pass
renderer_globals["url"] = request.url_generator
|
Add renderer global 'r' as alias for 'request'.
|
Add renderer global 'r' as alias for 'request'.
|
Python
|
mit
|
Pylons/akhet,Pylons/akhet,hlwsmith/akhet,hlwsmith/akhet,hlwsmith/akhet
|
---
+++
@@ -30,6 +30,7 @@
request = event.get("request") or threadlocal.get_current_request()
if not request:
return
+ renderer_globals["r"] = request
#renderer_globals["c"] = request.tmpl_context
#try:
# renderer_globals["session"] = request.session
|
c5c92c852d27fb370e4efdc631caf38ebcfdd8ba
|
tests/GIR/test_query_select.py
|
tests/GIR/test_query_select.py
|
# coding=utf-8
import sys
import struct
import unittest
from test_000_config import TestConfig
from test_001_connection import TestConnection
from gi.repository import Midgard
from gi.repository import GObject
class TestQuerySelect(unittest.TestCase):
def testSelectAdminPerson(self):
mgd = TestConnection.openConnection()
st = Midgard.QueryStorage(dbclass = "midgard_person")
qs = Midgard.QuerySelect(connection = mgd, storage = st)
qs.execute()
objects = qs.list_objects()
# Expect one person only
self.assertEqual(len(objects), 1);
def testSelectInvalidType(self):
mgd = TestConnection.openConnection()
st = Midgard.QueryStorage(dbclass = "NotExists")
qs = Midgard.QuerySelect(connection = mgd, storage = st)
# Check if we have GError
self.assertRaises(GObject.GError, qs.execute)
# Check if we have correct domain
try:
qs.execute()
except GObject.GError as e:
self.assertEqual(e.domain, "midgard-validation-error-quark")
self.assertEqual(e.code, Midgard.ValidationError.TYPE_INVALID)
def testOrder(self):
mgd = TestConnection.openConnection()
self.assertEqual("ok", "NOT IMPLEMENTED")
def testInheritance(self):
mgd = TestConnection.openConnection()
qs = Midgard.QuerySelect(connection = mgd)
self.assertIsInstance(qs, Midgard.QueryExecutor)
if __name__ == "__main__":
unittest.main()
|
# coding=utf-8
import sys
import struct
import unittest
from test_000_config import TestConfig
from test_001_connection import TestConnection
from gi.repository import Midgard
from gi.repository import GObject
class TestQuerySelect(unittest.TestCase):
def setUp(self):
self.mgd = TestConnection.openConnection()
def tearDown(self):
return
def testSelectAdminPerson(self):
st = Midgard.QueryStorage(dbclass = "midgard_person")
qs = Midgard.QuerySelect(connection = self.mgd, storage = st)
qs.execute()
objects = qs.list_objects()
# Expect one person only
self.assertEqual(len(objects), 1);
def testSelectInvalidType(self):
st = Midgard.QueryStorage(dbclass = "NotExists")
qs = Midgard.QuerySelect(connection = self.mgd, storage = st)
# Check if we have GError
self.assertRaises(GObject.GError, qs.execute)
# Check if we have correct domain
try:
qs.execute()
except GObject.GError as e:
self.assertEqual(e.domain, "midgard-validation-error-quark")
self.assertEqual(e.code, Midgard.ValidationError.TYPE_INVALID)
def testOrder(self):
self.assertEqual("ok", "NOT IMPLEMENTED")
def testInheritance(self):
qs = Midgard.QuerySelect(connection = self.mgd)
self.assertIsInstance(qs, Midgard.QueryExecutor)
if __name__ == "__main__":
unittest.main()
|
Set MidgardConnection in setUp method
|
Set MidgardConnection in setUp method
|
Python
|
lgpl-2.1
|
piotras/midgard-core,piotras/midgard-core,midgardproject/midgard-core,midgardproject/midgard-core,midgardproject/midgard-core,midgardproject/midgard-core,piotras/midgard-core,piotras/midgard-core
|
---
+++
@@ -10,19 +10,23 @@
from gi.repository import GObject
class TestQuerySelect(unittest.TestCase):
+ def setUp(self):
+ self.mgd = TestConnection.openConnection()
+
+ def tearDown(self):
+ return
+
def testSelectAdminPerson(self):
- mgd = TestConnection.openConnection()
st = Midgard.QueryStorage(dbclass = "midgard_person")
- qs = Midgard.QuerySelect(connection = mgd, storage = st)
+ qs = Midgard.QuerySelect(connection = self.mgd, storage = st)
qs.execute()
objects = qs.list_objects()
# Expect one person only
self.assertEqual(len(objects), 1);
- def testSelectInvalidType(self):
- mgd = TestConnection.openConnection()
+ def testSelectInvalidType(self):
st = Midgard.QueryStorage(dbclass = "NotExists")
- qs = Midgard.QuerySelect(connection = mgd, storage = st)
+ qs = Midgard.QuerySelect(connection = self.mgd, storage = st)
# Check if we have GError
self.assertRaises(GObject.GError, qs.execute)
# Check if we have correct domain
@@ -33,12 +37,10 @@
self.assertEqual(e.code, Midgard.ValidationError.TYPE_INVALID)
def testOrder(self):
- mgd = TestConnection.openConnection()
self.assertEqual("ok", "NOT IMPLEMENTED")
def testInheritance(self):
- mgd = TestConnection.openConnection()
- qs = Midgard.QuerySelect(connection = mgd)
+ qs = Midgard.QuerySelect(connection = self.mgd)
self.assertIsInstance(qs, Midgard.QueryExecutor)
if __name__ == "__main__":
|
264b7f26b872f4307c70cf6c68d84fdce620f5bb
|
utils/CIndex/completion_logger_server.py
|
utils/CIndex/completion_logger_server.py
|
#!/usr/bin/env python
import sys
from socket import *
from time import localtime, strftime
def main():
if len(sys.argv) < 4:
print "completion_logger_server.py <listen address> <listen port> <log file>"
exit(1)
host = sys.argv[1]
port = int(sys.argv[2])
buf = 1024 * 8
addr = (host,port)
# Create socket and bind to address
UDPSock = socket(AF_INET,SOCK_DGRAM)
UDPSock.bind(addr)
print "Listing on {0}:{1} and logging to '{2}'".format(host, port, sys.argv[3])
# Open the logging file.
f = open(sys.argv[3], "a")
# Receive messages
while 1:
data,addr = UDPSock.recvfrom(buf)
if not data:
break
else:
f.write(strftime("'%a, %d %b %Y %H:%M:%S' ", localtime()))
f.write(data)
f.write('\n')
# Close socket
UDPSock.close()
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import sys
from socket import *
from time import localtime, strftime
def main():
if len(sys.argv) < 4:
print "completion_logger_server.py <listen address> <listen port> <log file>"
exit(1)
host = sys.argv[1]
port = int(sys.argv[2])
buf = 1024 * 8
addr = (host,port)
# Create socket and bind to address
UDPSock = socket(AF_INET,SOCK_DGRAM)
UDPSock.bind(addr)
print "Listing on {0}:{1} and logging to '{2}'".format(host, port, sys.argv[3])
# Open the logging file.
f = open(sys.argv[3], "a")
# Receive messages
while 1:
data,addr = UDPSock.recvfrom(buf)
if not data:
break
else:
f.write(strftime("'%a, %d %b %Y %H:%M:%S' ", localtime()))
f.write("'{0}' ".format(addr[0]))
f.write(data)
f.write('\n')
f.flush()
# Close socket
UDPSock.close()
if __name__ == '__main__':
main()
|
Include sender address in completion log.
|
Include sender address in completion log.
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@101358 91177308-0d34-0410-b5e6-96231b3b80d8
|
Python
|
apache-2.0
|
apple/swift-clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang
|
---
+++
@@ -29,8 +29,10 @@
break
else:
f.write(strftime("'%a, %d %b %Y %H:%M:%S' ", localtime()))
+ f.write("'{0}' ".format(addr[0]))
f.write(data)
f.write('\n')
+ f.flush()
# Close socket
UDPSock.close()
|
d623b9904e4fa1967d8f83b45d39c9b57d2a4b0e
|
DDSP/Header.py
|
DDSP/Header.py
|
# The Header class contains the data structure of the Header class, and methods includes encapsulate, and decapsulate.
import struct
import random
from MessageType import MessageType
class Header:
"""docstring for Header"""
def __init__(self, type = MessageType.undefined):
self.type = type
self.length = 0
self.port = 0
def encapsulate(self):
return struct.pack("!BHH", self.type, self.length, self.port)
def decapsulate(self, data):
header = struct.unpack("!BHH", data)
self.type = header[0]
self.length = header[1]
self.port = header[2]
"""Write the test code here"""
if __name__ == '__main__':
header = Header(MessageType.query)
print str(header.port)
header.length = 1
header.decapsulate(header.encapsulate())
print header.type
print header.length
print header.port
print "Header class should work if you see this"
|
# The Header class contains the data structure of the Header class, and methods includes encapsulate, and decapsulate.
import struct
import random
from MessageType import MessageType
class Header:
"""docstring for Header"""
def __init__(self, type = MessageType.undefined):
self.version = 1
self.type = type
self.length = 0
self.port = 0
def encapsulate(self):
return struct.pack("!BBHH", self.version, self.type, self.length, self.port)
def decapsulate(self, data):
header = struct.unpack("!BBHH", data)
self.version = header[0]
self.type = header[1]
self.length = header[2]
self.port = header[3]
"""Write the test code here"""
if __name__ == '__main__':
header = Header(MessageType.query)
print str(header.port)
header.length = 1
header.decapsulate(header.encapsulate())
print header.version
print header.type
print header.length
print header.port
print "Header class should work if you see this"
|
Add a version field into the header.
|
Add a version field into the header.
|
Python
|
mit
|
CharKwayTeow/ddsp
|
---
+++
@@ -7,18 +7,20 @@
class Header:
"""docstring for Header"""
def __init__(self, type = MessageType.undefined):
+ self.version = 1
self.type = type
self.length = 0
self.port = 0
def encapsulate(self):
- return struct.pack("!BHH", self.type, self.length, self.port)
+ return struct.pack("!BBHH", self.version, self.type, self.length, self.port)
def decapsulate(self, data):
- header = struct.unpack("!BHH", data)
- self.type = header[0]
- self.length = header[1]
- self.port = header[2]
+ header = struct.unpack("!BBHH", data)
+ self.version = header[0]
+ self.type = header[1]
+ self.length = header[2]
+ self.port = header[3]
"""Write the test code here"""
if __name__ == '__main__':
@@ -26,6 +28,7 @@
print str(header.port)
header.length = 1
header.decapsulate(header.encapsulate())
+ print header.version
print header.type
print header.length
print header.port
|
9a850232e187080222e7d245c65264e9b3484ee8
|
tests/test_git_mongo.py
|
tests/test_git_mongo.py
|
from unittest import TestCase
from datetime import datetime
from citools.mongo import get_database_connection
from citools.git import get_last_revision
class TestLastStoreRetrieval(TestCase):
def setUp(self):
TestCase.setUp(self)
self.db = get_database_connection(database="test_citools")
self.collection = self.db['repository_information']
def test_none_where_no_data_yet(self):
self.assertEquals(None, get_last_revision(self.collection))
def test_last_repository_retrieved(self):
hash = '5ae35ebcbb0adc3660f0af891058e4e46dbdc14c'
self.collection.insert({
"hash_abbrev" : hash[0:16],
"hash" : hash,
"author_name" : "author_name",
"author_email" : "author_email",
"author_date" : datetime.now(),
"commiter_name" : "commiter_name",
"commiter_email" : "commiter_email",
"commiter_date" : datetime.now(),
"subject" : "subject",
})
self.assertEquals(hash, get_last_revision(self.collection))
def tearDown(self):
self.db.drop_collection(self.collection)
|
from unittest import TestCase
from datetime import datetime
from citools.mongo import get_database_connection
from citools.git import get_last_revision
from helpers import MongoTestCase
class TestLastStoreRetrieval(MongoTestCase):
def setUp(self):
super(TestLastStoreRetrieval, self).setUp()
self.collection = self.database['repository_information']
def test_none_where_no_data_yet(self):
self.assertEquals(None, get_last_revision(self.collection))
def test_last_repository_retrieved(self):
hash = '5ae35ebcbb0adc3660f0af891058e4e46dbdc14c'
self.collection.insert({
"hash_abbrev" : hash[0:16],
"hash" : hash,
"author_name" : "author_name",
"author_email" : "author_email",
"author_date" : datetime.now(),
"commiter_name" : "commiter_name",
"commiter_email" : "commiter_email",
"commiter_date" : datetime.now(),
"subject" : "subject",
})
self.assertEquals(hash, get_last_revision(self.collection))
|
Use MongoTestCase when we have it...
|
Use MongoTestCase when we have it...
|
Python
|
bsd-3-clause
|
ella/citools,ella/citools
|
---
+++
@@ -5,12 +5,13 @@
from citools.mongo import get_database_connection
from citools.git import get_last_revision
-class TestLastStoreRetrieval(TestCase):
+from helpers import MongoTestCase
+
+class TestLastStoreRetrieval(MongoTestCase):
def setUp(self):
- TestCase.setUp(self)
- self.db = get_database_connection(database="test_citools")
- self.collection = self.db['repository_information']
+ super(TestLastStoreRetrieval, self).setUp()
+ self.collection = self.database['repository_information']
def test_none_where_no_data_yet(self):
self.assertEquals(None, get_last_revision(self.collection))
@@ -31,5 +32,3 @@
})
self.assertEquals(hash, get_last_revision(self.collection))
- def tearDown(self):
- self.db.drop_collection(self.collection)
|
70138da1fa6a28d0e7b7fdf80ab894236c0f5583
|
tests/test_tokenizer.py
|
tests/test_tokenizer.py
|
import unittest
from cobe.tokenizer import MegaHALTokenizer
class testMegaHALTokenizer(unittest.TestCase):
def setUp(self):
self.tokenizer = MegaHALTokenizer()
def testSplitEmpty(self):
self.assertEquals(len(self.tokenizer.split("")), 0)
def testSplitSentence(self):
words = self.tokenizer.split("hi.")
self.assertEquals(len(words), 2)
self.assertEquals(words[0], "HI")
self.assertEquals(words[1], ".")
def testSplitComma(self):
words = self.tokenizer.split("hi, cobe")
self.assertEquals(len(words), 4)
self.assertEquals(words[0], "HI")
self.assertEquals(words[1], ", ")
self.assertEquals(words[2], "COBE")
self.assertEquals(words[3], ".")
def testSplitImplicitStop(self):
words = self.tokenizer.split("hi")
self.assertEquals(len(words), 2)
self.assertEquals(words[0], "HI")
self.assertEquals(words[1], ".")
def testSplitUrl(self):
words = self.tokenizer.split("http://www.google.com/")
self.assertEquals(len(words), 8)
self.assertEquals(words[0], "HTTP")
self.assertEquals(words[1], "://")
self.assertEquals(words[2], "WWW")
self.assertEquals(words[3], ".")
self.assertEquals(words[4], "GOOGLE")
self.assertEquals(words[5], ".")
self.assertEquals(words[6], "COM")
self.assertEquals(words[7], "/.")
if __name__ == '__main__':
unittest.main()
|
import unittest
from cobe.tokenizer import MegaHALTokenizer
class testMegaHALTokenizer(unittest.TestCase):
def setUp(self):
self.tokenizer = MegaHALTokenizer()
def testSplitEmpty(self):
self.assertEquals(len(self.tokenizer.split("")), 0)
def testSplitSentence(self):
words = self.tokenizer.split("hi.")
self.assertEquals(words, ["HI", "."])
def testSplitComma(self):
words = self.tokenizer.split("hi, cobe")
self.assertEquals(words, ["HI", ", ", "COBE", "."])
def testSplitImplicitStop(self):
words = self.tokenizer.split("hi")
self.assertEquals(words, ["HI", "."])
def testSplitUrl(self):
words = self.tokenizer.split("http://www.google.com/")
self.assertEquals(words, ["HTTP", "://", "WWW", ".", "GOOGLE", ".", "COM", "/."])
if __name__ == '__main__':
unittest.main()
|
Make all the tokenizer unit tests assertEquals() on arrays
|
Make all the tokenizer unit tests assertEquals() on arrays
|
Python
|
mit
|
LeMagnesium/cobe,tiagochiavericosta/cobe,pteichman/cobe,wodim/cobe-ng,meska/cobe,wodim/cobe-ng,meska/cobe,tiagochiavericosta/cobe,DarkMio/cobe,DarkMio/cobe,pteichman/cobe,LeMagnesium/cobe
|
---
+++
@@ -11,35 +11,19 @@
def testSplitSentence(self):
words = self.tokenizer.split("hi.")
- self.assertEquals(len(words), 2)
- self.assertEquals(words[0], "HI")
- self.assertEquals(words[1], ".")
+ self.assertEquals(words, ["HI", "."])
def testSplitComma(self):
words = self.tokenizer.split("hi, cobe")
- self.assertEquals(len(words), 4)
- self.assertEquals(words[0], "HI")
- self.assertEquals(words[1], ", ")
- self.assertEquals(words[2], "COBE")
- self.assertEquals(words[3], ".")
+ self.assertEquals(words, ["HI", ", ", "COBE", "."])
def testSplitImplicitStop(self):
words = self.tokenizer.split("hi")
- self.assertEquals(len(words), 2)
- self.assertEquals(words[0], "HI")
- self.assertEquals(words[1], ".")
+ self.assertEquals(words, ["HI", "."])
def testSplitUrl(self):
words = self.tokenizer.split("http://www.google.com/")
- self.assertEquals(len(words), 8)
- self.assertEquals(words[0], "HTTP")
- self.assertEquals(words[1], "://")
- self.assertEquals(words[2], "WWW")
- self.assertEquals(words[3], ".")
- self.assertEquals(words[4], "GOOGLE")
- self.assertEquals(words[5], ".")
- self.assertEquals(words[6], "COM")
- self.assertEquals(words[7], "/.")
+ self.assertEquals(words, ["HTTP", "://", "WWW", ".", "GOOGLE", ".", "COM", "/."])
if __name__ == '__main__':
unittest.main()
|
5ccf3753882c6cbde98923fe535857afca4a7187
|
webapp/calendars/forms.py
|
webapp/calendars/forms.py
|
from django import forms
from django.contrib.admin import widgets
from .models import Event
class LoginForm(forms.Form):
username = forms.CharField()
password = forms.CharField(widget=forms.PasswordInput())
class EventForm(forms.ModelForm):
class Meta:
model = Event
fields = (
'title', 'description',
'start_time', 'end_time',
'image', 'place', 'point'
)
widgets = {
'start_time': widgets.AdminSplitDateTime,
'end_time': widgets.AdminSplitDateTime
}
|
from django import forms
from django.contrib.admin import widgets
from .models import Event
class LoginForm(forms.Form):
username = forms.CharField(label='Nazwa użytkownika');
password = forms.CharField(label='Hasło', widget=forms.PasswordInput())
class EventForm(forms.ModelForm):
class Meta:
model = Event
fields = (
'title', 'description',
'start_time', 'end_time',
'image', 'place', 'point'
)
widgets = {
'start_time': widgets.AdminSplitDateTime,
'end_time': widgets.AdminSplitDateTime
}
|
Use Polish lables in login form.
|
Use Polish lables in login form.
Signed-off-by: Mariusz Fik <e22610367d206dca7aa58af34ebf008b556228c5@fidano.pl>
|
Python
|
agpl-3.0
|
Fisiu/calendar-oswiecim,Fisiu/calendar-oswiecim,hackerspace-silesia/calendar-oswiecim,firemark/calendar-oswiecim,firemark/calendar-oswiecim,hackerspace-silesia/calendar-oswiecim,firemark/calendar-oswiecim,hackerspace-silesia/calendar-oswiecim,Fisiu/calendar-oswiecim
|
---
+++
@@ -4,8 +4,8 @@
class LoginForm(forms.Form):
- username = forms.CharField()
- password = forms.CharField(widget=forms.PasswordInput())
+ username = forms.CharField(label='Nazwa użytkownika');
+ password = forms.CharField(label='Hasło', widget=forms.PasswordInput())
class EventForm(forms.ModelForm):
|
c15875062be2b59c78fca9a224b0231986a37868
|
feincms3/templatetags/feincms3_renderer.py
|
feincms3/templatetags/feincms3_renderer.py
|
from django import template
from django.utils.html import mark_safe
register = template.Library()
@register.simple_tag(takes_context=True)
def render_plugin(context, plugin):
"""
Render a single plugin. See :mod:`feincms3.renderer` for additional
details.
"""
return context['renderer'].render_plugin_in_context(plugin, context)
@register.simple_tag(takes_context=True)
def render_plugins(context, plugins):
"""
Render and concatenate a list of plugins. See
:mod:`feincms3.renderer` for additional details.
"""
renderer = context['renderer']
return mark_safe(''.join(
renderer.render_plugin_in_context(plugin, context)
for plugin in plugins
))
@register.simple_tag(takes_context=True)
def render_region(context, regions, region, **kwargs):
"""
Render a single region. See :class:`~feincms3.renderer.Regions` for
additional details. Any and all keyword arguments are forwarded to the
``render`` method of the ``Regions`` instance.
"""
return regions.render(region, context, **kwargs)
|
from django import template
from django.utils.html import mark_safe
register = template.Library()
@register.simple_tag(takes_context=True)
def render_plugin(context, plugin):
"""
Render a single plugin. See :mod:`feincms3.renderer` for additional
details.
In general you should prefer
:func:`~feincms3.templatetags.feincms3_renderer.render_region` over this
tag.
"""
return context['renderer'].render_plugin_in_context(plugin, context)
@register.simple_tag(takes_context=True)
def render_plugins(context, plugins):
"""
Render and concatenate a list of plugins. See
:mod:`feincms3.renderer` for additional details.
In general you should prefer
:func:`~feincms3.templatetags.feincms3_renderer.render_region` over this
tag.
"""
renderer = context['renderer']
return mark_safe(''.join(
renderer.render_plugin_in_context(plugin, context)
for plugin in plugins
))
@register.simple_tag(takes_context=True)
def render_region(context, regions, region, **kwargs):
"""
Render a single region. See :class:`~feincms3.renderer.Regions` for
additional details. Any and all keyword arguments are forwarded to the
``render`` method of the ``Regions`` instance.
"""
return regions.render(region, context, **kwargs)
|
Add note to render_plugin[s] that render_region should be preferred
|
Add note to render_plugin[s] that render_region should be preferred
|
Python
|
bsd-3-clause
|
matthiask/feincms3,matthiask/feincms3,matthiask/feincms3
|
---
+++
@@ -9,6 +9,10 @@
"""
Render a single plugin. See :mod:`feincms3.renderer` for additional
details.
+
+ In general you should prefer
+ :func:`~feincms3.templatetags.feincms3_renderer.render_region` over this
+ tag.
"""
return context['renderer'].render_plugin_in_context(plugin, context)
@@ -18,6 +22,10 @@
"""
Render and concatenate a list of plugins. See
:mod:`feincms3.renderer` for additional details.
+
+ In general you should prefer
+ :func:`~feincms3.templatetags.feincms3_renderer.render_region` over this
+ tag.
"""
renderer = context['renderer']
return mark_safe(''.join(
|
f11f482688d6374a7771c40ce48f4f743cc98b9b
|
storage_service/common/apps.py
|
storage_service/common/apps.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from django.apps import AppConfig
class CommonAppConfig(AppConfig):
name = "common"
def ready(self):
import common.signals # noqa: F401
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from django.apps import AppConfig
from prometheus_client import Info
from storage_service import __version__
version_info = Info("version", "Archivematica Storage Service version info")
class CommonAppConfig(AppConfig):
name = "common"
def ready(self):
import common.signals # noqa: F401
version_info.info({"version": __version__})
|
Include application version info metric
|
Include application version info metric
|
Python
|
agpl-3.0
|
artefactual/archivematica-storage-service,artefactual/archivematica-storage-service,artefactual/archivematica-storage-service,artefactual/archivematica-storage-service
|
---
+++
@@ -2,6 +2,12 @@
from __future__ import absolute_import
from django.apps import AppConfig
+from prometheus_client import Info
+
+from storage_service import __version__
+
+
+version_info = Info("version", "Archivematica Storage Service version info")
class CommonAppConfig(AppConfig):
@@ -9,3 +15,4 @@
def ready(self):
import common.signals # noqa: F401
+ version_info.info({"version": __version__})
|
fe5a980dd36c24008efe8e4900a675b568e5fb9d
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='ohmysportsfeedspy',
packages=['ohmysportsfeedspy'],
version='0.1.2',
author = ['Brad Barkhouse', 'MySportsFeeds'],
author_email='brad.barkhouse@mysportsfeeds.com',
url='https://github.com/MySportsFeeds/mysportsfeeds-python',
license='MIT',
description='A Python wrapper for the MySportsFeeds sports data API'
)
|
from distutils.core import setup
setup(
name='ohmysportsfeedspy',
packages=['ohmysportsfeedspy'],
version='0.1.3',
author = ['Brad Barkhouse', 'MySportsFeeds'],
author_email='brad.barkhouse@mysportsfeeds.com',
url='https://github.com/MySportsFeeds/mysportsfeeds-python',
license='MIT',
description='A Python wrapper for the MySportsFeeds sports data API'
)
|
Increment patch version to match submission to PyPI for pip install.
|
Increment patch version to match submission to PyPI for pip install.
|
Python
|
mit
|
MySportsFeeds/mysportsfeeds-python
|
---
+++
@@ -3,7 +3,7 @@
setup(
name='ohmysportsfeedspy',
packages=['ohmysportsfeedspy'],
- version='0.1.2',
+ version='0.1.3',
author = ['Brad Barkhouse', 'MySportsFeeds'],
author_email='brad.barkhouse@mysportsfeeds.com',
url='https://github.com/MySportsFeeds/mysportsfeeds-python',
|
91550be5f866cd53bdff019f1be02af00a20a3e0
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='lightstep',
version='4.1.0',
description='LightStep Python OpenTracing Implementation',
long_description='',
author='LightStep',
license='',
install_requires=['thrift>=0.10.0,<0.12.0',
'jsonpickle',
'six',
'basictracer>=3.0,<3.1',
'googleapis-common-protos>=1.5.3,<2.0',
'requests>=2.19,<3.0',
'protobuf>=3.6.0,<4.0'],
tests_require=['pytest',
'sphinx',
'sphinx-epytext'],
classifiers=[
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
],
keywords=[ 'opentracing', 'lightstep', 'traceguide', 'tracing', 'microservices', 'distributed' ],
packages=find_packages(exclude=['docs*', 'tests*', 'sample*']),
)
|
from setuptools import setup, find_packages
setup(
name='lightstep',
version='4.1.0',
description='LightStep Python OpenTracing Implementation',
long_description='',
author='LightStep',
license='',
install_requires=['thrift>=0.10.0,<0.12.0',
'jsonpickle',
'six',
'basictracer>=3.0,<4',
'googleapis-common-protos>=1.5.3,<2.0',
'requests>=2.19,<3.0',
'protobuf>=3.6.0,<4.0'],
tests_require=['pytest',
'sphinx',
'sphinx-epytext'],
classifiers=[
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
],
keywords=[ 'opentracing', 'lightstep', 'traceguide', 'tracing', 'microservices', 'distributed' ],
packages=find_packages(exclude=['docs*', 'tests*', 'sample*']),
)
|
Enable latest version of basictracer
|
Enable latest version of basictracer
|
Python
|
mit
|
lightstephq/lightstep-tracer-python
|
---
+++
@@ -10,7 +10,7 @@
install_requires=['thrift>=0.10.0,<0.12.0',
'jsonpickle',
'six',
- 'basictracer>=3.0,<3.1',
+ 'basictracer>=3.0,<4',
'googleapis-common-protos>=1.5.3,<2.0',
'requests>=2.19,<3.0',
'protobuf>=3.6.0,<4.0'],
|
327b4558d77256dd45f0e32be014960eb66734ff
|
setup.py
|
setup.py
|
from setuptools import find_packages, setup
from dichalcogenides import __version__
with open('README.rst', 'r') as f:
long_description = f.read()
setup(
name='dichalcogenides',
version=__version__,
author='Evan Sosenko',
author_email='razorx@evansosenko.com',
packages=find_packages(exclude=['docs']),
url='https://github.com/razor-x/dichalcogenides',
license='MIT',
description='Python analysis code for dichalcogenide systems.',
long_description=long_description,
install_requires=[
'numpy>=1.11.0,<2.0.0',
'scipy>=0.17.0,<1.0.0',
'pyyaml>=3.11,<4.0'
]
)
|
import os
import re
from setuptools import find_packages, setup
from dichalcogenides import __version__
with open('README.rst', 'r') as f:
long_description = f.read()
if os.environ.get('READTHEDOCS') == 'True':
mocked = ['numpy', 'scipy']
mock_filter = lambda x: re.sub(r'>.+', '', x) not in mocked
else:
mock_filter = lambda p: True
setup(
name='dichalcogenides',
version=__version__,
author='Evan Sosenko',
author_email='razorx@evansosenko.com',
packages=find_packages(exclude=['docs']),
url='https://github.com/razor-x/dichalcogenides',
license='MIT',
description='Python analysis code for dichalcogenide systems.',
long_description=long_description,
install_requires=list(filter(mock_filter, [
'numpy>=1.11.0,<2.0.0',
'scipy>=0.17.0,<1.0.0',
'pyyaml>=3.11,<4.0'
]))
)
|
Remove mocked dependencies for readthedocs
|
Remove mocked dependencies for readthedocs
|
Python
|
mit
|
razor-x/dichalcogenides
|
---
+++
@@ -1,9 +1,17 @@
+import os
+import re
from setuptools import find_packages, setup
from dichalcogenides import __version__
with open('README.rst', 'r') as f:
long_description = f.read()
+
+if os.environ.get('READTHEDOCS') == 'True':
+ mocked = ['numpy', 'scipy']
+ mock_filter = lambda x: re.sub(r'>.+', '', x) not in mocked
+else:
+ mock_filter = lambda p: True
setup(
name='dichalcogenides',
@@ -15,9 +23,9 @@
license='MIT',
description='Python analysis code for dichalcogenide systems.',
long_description=long_description,
- install_requires=[
+ install_requires=list(filter(mock_filter, [
'numpy>=1.11.0,<2.0.0',
'scipy>=0.17.0,<1.0.0',
'pyyaml>=3.11,<4.0'
- ]
+ ]))
)
|
94ed3cdb5234a79bc754c54721f41eaeec51b846
|
setup.py
|
setup.py
|
from distutils.core import setup
from setuptools import find_packages
setup(name="django-image-cropping",
version="0.6.4",
description="A reusable app for cropping images easily and non-destructively in Django",
long_description=open('README.rst').read(),
author="jonasvp",
author_email="jvp@jonasundderwolf.de",
url="http://github.com/jonasundderwolf/django-image-cropping",
packages=find_packages(),
include_package_data=True,
install_requires=[
'easy_thumbnails==1.2',
],
test_suite='example.runtests.runtests',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
from distutils.core import setup
from setuptools import find_packages
setup(name="django-image-cropping",
version="0.6.4",
description="A reusable app for cropping images easily and non-destructively in Django",
long_description=open('README.rst').read(),
author="jonasvp",
author_email="jvp@jonasundderwolf.de",
url="http://github.com/jonasundderwolf/django-image-cropping",
packages=find_packages(),
include_package_data=True,
install_requires=[
'easy_thumbnails==1.2',
],
test_suite='example.runtests.runtests',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
Set Development Status to "Stable"
|
Set Development Status to "Stable"
|
Python
|
bsd-3-clause
|
winzard/django-image-cropping,henriquechehad/django-image-cropping,winzard/django-image-cropping,henriquechehad/django-image-cropping,henriquechehad/django-image-cropping,winzard/django-image-cropping
|
---
+++
@@ -15,7 +15,7 @@
],
test_suite='example.runtests.runtests',
classifiers=[
- 'Development Status :: 4 - Beta',
+ 'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
|
682c02b01775a443ce17aa5ea9805e5be6fd120b
|
setup.py
|
setup.py
|
import os
from setuptools import setup, find_packages
def read(filename):
full_path = os.path.join(os.path.dirname(__file__), filename)
with open(full_path) as fd:
return fd.read()
setup(
name='nymms',
version='0.4.2',
author='Michael Barrett',
author_email='loki77@gmail.com',
license="New BSD license",
url="https://github.com/cloudtools/nymms",
description='Not Your Mother\'s Monitoring System (NYMMS)',
long_description=read('README.rst'),
classifiers=[
"Topic :: System :: Monitoring",
"License :: OSI Approved :: BSD License",
"Development Status :: 3 - Alpha"],
packages=find_packages(),
)
|
import os
from setuptools import setup, find_packages
import glob
src_dir = os.path.dirname(__file__)
def read(filename):
full_path = os.path.join(src_dir, filename)
with open(full_path) as fd:
return fd.read()
setup(
name='nymms',
version='0.2.1',
author='Michael Barrett',
author_email='loki77@gmail.com',
license="New BSD license",
url="https://github.com/cloudtools/nymms",
description='Not Your Mother\'s Monitoring System (NYMMS)',
long_description=read('README.rst'),
classifiers=[
"Topic :: System :: Monitoring",
"License :: OSI Approved :: BSD License",
"Development Status :: 3 - Alpha"],
packages=find_packages(),
scripts=glob.glob(os.path.join(src_dir, 'scripts', '*')),
)
|
Fix versioning and add scripts directory
|
Fix versioning and add scripts directory
|
Python
|
bsd-2-clause
|
cloudtools/nymms
|
---
+++
@@ -1,15 +1,18 @@
import os
from setuptools import setup, find_packages
+import glob
+
+src_dir = os.path.dirname(__file__)
def read(filename):
- full_path = os.path.join(os.path.dirname(__file__), filename)
+ full_path = os.path.join(src_dir, filename)
with open(full_path) as fd:
return fd.read()
setup(
name='nymms',
- version='0.4.2',
+ version='0.2.1',
author='Michael Barrett',
author_email='loki77@gmail.com',
license="New BSD license",
@@ -21,4 +24,5 @@
"License :: OSI Approved :: BSD License",
"Development Status :: 3 - Alpha"],
packages=find_packages(),
+ scripts=glob.glob(os.path.join(src_dir, 'scripts', '*')),
)
|
b1fc016fa56ffc801659f6ba2d807694529cfa57
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: iso-8859-1 -*-
from setuptools import setup
setup(
name = 'TracMasterTickets',
version = '2.1.1',
packages = ['mastertickets'],
package_data = { 'mastertickets': ['templates/*.html', 'htdocs/*.js', 'htdocs/*.css' ] },
author = "Noah Kantrowitz",
author_email = "coderanger@yahoo.com",
description = "Provides support for ticket dependencies and master tickets.",
license = "BSD",
keywords = "trac plugin ticket dependencies master",
url = "http://trac-hacks.org/wiki/MasterTicketsPlugin",
classifiers = [
'Framework :: Trac',
],
install_requires = ['Trac', 'Genshi >= 0.5.dev-r698,==dev'],
entry_points = {
'trac.plugins': [
'mastertickets.web_ui = mastertickets.web_ui',
'mastertickets.api = mastertickets.api',
]
}
)
|
#!/usr/bin/env python
# -*- coding: iso-8859-1 -*-
from setuptools import setup
setup(
name = 'TracMasterTickets',
version = '2.1.1',
packages = ['mastertickets'],
package_data = { 'mastertickets': ['templates/*.html', 'htdocs/*.js', 'htdocs/*.css' ] },
author = "Noah Kantrowitz",
author_email = "noah@coderanger.net",
description = "Provides support for ticket dependencies and master tickets.",
license = "BSD",
keywords = "trac plugin ticket dependencies master",
url = "http://trac-hacks.org/wiki/MasterTicketsPlugin",
classifiers = [
'Framework :: Trac',
],
install_requires = ['Trac', 'Genshi >= 0.5.dev-r698,==dev'],
entry_points = {
'trac.plugins': [
'mastertickets.web_ui = mastertickets.web_ui',
'mastertickets.api = mastertickets.api',
]
}
)
|
Change my email to avoid Yahoo, which decided to brake my scraper script recently.
|
Change my email to avoid Yahoo, which decided to brake my scraper script recently.
|
Python
|
bsd-3-clause
|
SpamExperts/trac-masterticketsplugin,SpamExperts/trac-masterticketsplugin,SpamExperts/trac-masterticketsplugin
|
---
+++
@@ -10,7 +10,7 @@
package_data = { 'mastertickets': ['templates/*.html', 'htdocs/*.js', 'htdocs/*.css' ] },
author = "Noah Kantrowitz",
- author_email = "coderanger@yahoo.com",
+ author_email = "noah@coderanger.net",
description = "Provides support for ticket dependencies and master tickets.",
license = "BSD",
keywords = "trac plugin ticket dependencies master",
|
0e97763d49f449a9f0399ef1fded9e6c5997d8b0
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name="sbq",
packages=["sbq"],
version="0.2.0",
description="Low-dependency package for automating bigquery queries.",
author="Colin Fuller",
author_email="colin@khanacademy.org",
url="github.com/cjfuller/sbq",
keywords=["bigquery"],
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
requires=['requests']
)
|
from setuptools import setup
setup(
name="sbq",
packages=["sbq"],
version="0.2.0",
description="Low-dependency package for automating bigquery queries.",
author="Colin Fuller",
author_email="colin@khanacademy.org",
url="https://github.com/cjfuller/sbq",
keywords=["bigquery"],
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
requires=['requests']
)
|
Add a protocol to the url
|
Add a protocol to the url
|
Python
|
mit
|
cjfuller/sbq
|
---
+++
@@ -6,7 +6,7 @@
description="Low-dependency package for automating bigquery queries.",
author="Colin Fuller",
author_email="colin@khanacademy.org",
- url="github.com/cjfuller/sbq",
+ url="https://github.com/cjfuller/sbq",
keywords=["bigquery"],
classifiers=[
"Programming Language :: Python",
|
6e3d80d13864510cf2def7a20660a40daa793e5e
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
import journal
setup(
name = 'journal',
version = journal.__version__,
author = journal.__author__,
author_email = 'askedrelic@gmail.com',
description = 'Simple CLI tool to help with keeping a work/personal journal',
long_description = open('README.markdown').read(),
url = 'https://github.com/askedrelic/journal',
packages = find_packages(),
test_suite = 'tests',
entry_points = """
[console_scripts]
journal = journal.main:main"""
)
|
from setuptools import setup, find_packages
import journal
setup(
name = 'journal',
version = journal.__version__,
author = journal.__author__,
author_email = 'askedrelic@gmail.com',
description = 'Simple CLI tool to help with keeping a work/personal journal',
long_description = open('README.markdown').read(),
url = 'https://github.com/askedrelic/journal',
packages = find_packages(),
entry_points = """
[console_scripts]
journal = journal.main:main""",
install_requires = ['argparse'],
)
|
Add argparse as install requirement for 2.5/2.6 systems
|
Add argparse as install requirement for 2.5/2.6 systems
|
Python
|
mit
|
askedrelic/journal
|
---
+++
@@ -13,9 +13,10 @@
url = 'https://github.com/askedrelic/journal',
packages = find_packages(),
-test_suite = 'tests',
entry_points = """
[console_scripts]
-journal = journal.main:main"""
+journal = journal.main:main""",
+
+install_requires = ['argparse'],
)
|
721089bfa4fb6316344b41355a1a0bf9611e96a4
|
setup.py
|
setup.py
|
# Haze
#
# Author: Joe Block <jpb@unixorn.net>
# License: Apache 2.0
from setuptools import setup, find_packages
name = "haze"
requirements = map(str.strip, open("requirements.txt").readlines())
setup(
name = name,
description = "Haze AWS utility functions",
packages = find_packages(),
version = "0.0.4",
download_url = 'https://github.com/unixorn/haze/tarball/0.0.4',
keywords = ['aws', 'cloud'],
install_requires = requirements,
entry_points = {
"console_scripts": [
"aws-instance-id = %s.commands.myinstanceid:awsInstanceID" % name,
("aws-region = %s.commands.myregion:awsMyRegion" % name)
]
}
)
|
# Haze
#
# Author: Joe Block <jpb@unixorn.net>
# License: Apache 2.0
from setuptools import setup, find_packages
name = "haze"
requirements = map(str.strip, open("requirements.txt").readlines())
setup(
name = name,
author = "Joe Block",
author_email = "jpb@unixorn.net",
description = "Haze AWS utility functions",
url = "https://github.com/unixorn/haze",
packages = find_packages(),
version = "0.0.5",
download_url = 'https://github.com/unixorn/haze/tarball/0.0.5',
keywords = ['aws', 'cloud'],
install_requires = requirements,
entry_points = {
"console_scripts": [
"aws-instance-id = %s.commands.myinstanceid:awsInstanceID" % name,
("aws-region = %s.commands.myregion:awsMyRegion" % name)
]
}
)
|
Add author, author_email & url to make pypi happy
|
Add author, author_email & url to make pypi happy
|
Python
|
apache-2.0
|
unixorn/haze
|
---
+++
@@ -10,10 +10,13 @@
setup(
name = name,
+ author = "Joe Block",
+ author_email = "jpb@unixorn.net",
description = "Haze AWS utility functions",
+ url = "https://github.com/unixorn/haze",
packages = find_packages(),
- version = "0.0.4",
- download_url = 'https://github.com/unixorn/haze/tarball/0.0.4',
+ version = "0.0.5",
+ download_url = 'https://github.com/unixorn/haze/tarball/0.0.5',
keywords = ['aws', 'cloud'],
install_requires = requirements,
entry_points = {
|
254bea654189dea4ce6d20c981670b775e2f4318
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read()
requirements = [
# TODO: put package requirements here
]
test_requirements = [
# TODO: put package test requirements here
]
setup(
name='lambda_utils',
version='0.2.3',
description="A collection of AWS Lambda Utils / Decorator for different AWS events e.g. Api Gateway, S3, CloudFormation, CloudWatch ",
long_description=readme + '\n\n' + history,
author="Cloudheads",
author_email='theguys@cloudheads.io',
url='https://github.com/CloudHeads/lambda_utils',
packages=find_packages(exclude=('tests', )),
package_dir={'lambda_utils': 'lambda_utils'},
include_package_data=True,
install_requires=requirements,
license="MIT license",
zip_safe=False,
keywords='lambda_utils',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
],
test_suite='tests',
tests_require=test_requirements
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read()
requirements = [
# TODO: put package requirements here
]
test_requirements = [
# TODO: put package test requirements here
]
setup(
name='lambda_utils',
version='0.2.3',
description="A collection of AWS Lambda Utils / Decorator for different AWS events e.g. Api Gateway, S3, CloudFormation, CloudWatch ",
long_description=readme + '\n\n' + history,
author="Cloudheads",
author_email='theguys@cloudheads.io',
url='https://github.com/CloudHeads/lambda_utils',
packages=find_packages(exclude=[
"tests",
"tests.*",
]),
package_dir={'lambda_utils': 'lambda_utils'},
include_package_data=True,
install_requires=requirements,
license="MIT license",
zip_safe=False,
keywords='lambda_utils',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
],
test_suite='tests',
tests_require=test_requirements
)
|
Add tests. For exclude packages
|
Add tests. For exclude packages
|
Python
|
mit
|
CloudHeads/lambda_utils
|
---
+++
@@ -25,7 +25,10 @@
author="Cloudheads",
author_email='theguys@cloudheads.io',
url='https://github.com/CloudHeads/lambda_utils',
- packages=find_packages(exclude=('tests', )),
+ packages=find_packages(exclude=[
+ "tests",
+ "tests.*",
+ ]),
package_dir={'lambda_utils': 'lambda_utils'},
include_package_data=True,
install_requires=requirements,
|
5610f1eaea6f7e2c72c823c9cf2f29b423ce1209
|
setup.py
|
setup.py
|
import setuptools
REQUIREMENTS = [
"nose==1.3.0",
"python-dateutil==1.5",
]
if __name__ == "__main__":
setuptools.setup(
name="jsond",
version="0.0.1",
author="EDITD",
author_email="engineering@editd.com",
packages=setuptools.find_packages(),
scripts=[],
url="https://github.com/EDITD/jsond",
license="LICENSE.txt",
description="JSON (with dates)",
long_description="View the github page (https://github.com/EDITD/jsond) for more details.",
install_requires=REQUIREMENTS
)
|
import setuptools
REQUIREMENTS = [
"nose==1.3.0",
"python-dateutil==1.5",
]
if __name__ == "__main__":
setuptools.setup(
name="jsond",
version="1.0.0",
author="EDITD",
author_email="engineering@editd.com",
packages=setuptools.find_packages(),
scripts=[],
url="https://github.com/EDITD/jsond",
license="LICENSE.txt",
description="JSON (with dates)",
long_description="View the github page (https://github.com/EDITD/jsond) for more details.",
install_requires=REQUIREMENTS
)
|
Make the leap to v1
|
Make the leap to v1
|
Python
|
mit
|
EDITD/jsond
|
---
+++
@@ -10,7 +10,7 @@
if __name__ == "__main__":
setuptools.setup(
name="jsond",
- version="0.0.1",
+ version="1.0.0",
author="EDITD",
author_email="engineering@editd.com",
packages=setuptools.find_packages(),
|
15d0e080225586f06044a25dec485a5ebb65b799
|
setup.py
|
setup.py
|
#!/usr/bin/python2.6
"""Setup file for r53."""
__author__ = 'memory@blank.org'
from setuptools import setup
setup(
name='r53',
version='0.1',
description='Command line script to synchronize Amazon Route53 DNS data.',
package_dir={'': 'src'},
install_requires=[
'boto',
'lxml',
'argparse',
],
entry_points={
'console_scripts': [
'r53 = r53.r53:main',
],
},
zip_safe=False,
)
|
#!/usr/bin/python2.6
"""Setup file for r53."""
__author__ = 'memory@blank.org'
from setuptools import setup
setup(
name='r53',
version='0.1',
description='Command line script to synchronize Amazon Route53 DNS data.',
package_dir={'': 'src'},
packages=['r53'],
install_requires=[
'boto',
'lxml',
'argparse',
],
entry_points={
'console_scripts': [
'r53 = r53.r53:main',
],
},
zip_safe=False,
)
|
Add missing package declaration to distutils
|
Add missing package declaration to distutils
Through easy_install and pip, the r53 package is not included (just missing).
Declare it here so it will be included and prevent this:
[jed@js route53]$ r53
Traceback (most recent call last):
File /usr/local/bin/r53, line 9, in <module>
load_entry_point('r53==0.1', 'console_scripts', 'r53')()
File /Library/Python/2.7/site-packages/distribute-0.6.24-py2.7.egg/pkg_resources.py, line 337, in load_entry_point
return get_distribution(dist).load_entry_point(group, name)
File /Library/Python/2.7/site-packages/distribute-0.6.24-py2.7.egg/pkg_resources.py, line 2279, in load_entry_point
return ep.load()
File /Library/Python/2.7/site-packages/distribute-0.6.24-py2.7.egg/pkg_resources.py, line 1989, in load
entry = __import__(self.module_name, globals(),globals(), ['__name__'])
ImportError: No module named r53.r53
|
Python
|
mit
|
coops/r53
|
---
+++
@@ -10,6 +10,7 @@
version='0.1',
description='Command line script to synchronize Amazon Route53 DNS data.',
package_dir={'': 'src'},
+ packages=['r53'],
install_requires=[
'boto',
'lxml',
|
9df7efa09e5d27ef0b238b6f4091e99d63fadd82
|
setup.py
|
setup.py
|
#!/usr/bin/env python2.6
import setuptools
setuptools.setup(
name='clicast',
version='0.4.5',
author='Max Zheng',
author_email='maxzheng.os @t gmail.com',
description=open('README.rst').read(),
entry_points={
'console_scripts': [
'cast = clicast.editor:cast',
],
},
install_requires=open('requirements.txt').read(),
license='MIT',
package_dir={'': 'src'},
packages=setuptools.find_packages('src'),
include_package_data=True,
setup_requires=['setuptools-git'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development :: Development Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
keywords='cli broadcast command warning critical bug',
)
|
#!/usr/bin/env python2.6
import setuptools
setuptools.setup(
name='clicast',
version='0.4.5',
author='Max Zheng',
author_email='maxzheng.os @t gmail.com',
description='Broadcast messages for CLI tools, such as a warning for critical bug or notification about new features.',
long_description=open('README.rst').read(),
url='https://github.com/maxzheng/clicast',
entry_points={
'console_scripts': [
'cast = clicast.editor:cast',
],
},
install_requires=open('requirements.txt').read(),
license='MIT',
package_dir={'': 'src'},
packages=setuptools.find_packages('src'),
include_package_data=True,
setup_requires=['setuptools-git'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
keywords='cli broadcast command warning critical bug',
)
|
Add long description / url
|
Add long description / url
|
Python
|
mit
|
maxzheng/clicast
|
---
+++
@@ -10,7 +10,10 @@
author='Max Zheng',
author_email='maxzheng.os @t gmail.com',
- description=open('README.rst').read(),
+ description='Broadcast messages for CLI tools, such as a warning for critical bug or notification about new features.',
+ long_description=open('README.rst').read(),
+
+ url='https://github.com/maxzheng/clicast',
entry_points={
'console_scripts': [
@@ -32,7 +35,7 @@
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
- 'Topic :: Software Development :: Development Tools',
+ 'Topic :: Software Development',
'License :: OSI Approved :: MIT License',
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.