commit stringlengths 40 40 | old_file stringlengths 4 150 | new_file stringlengths 4 150 | old_contents stringlengths 0 3.26k | new_contents stringlengths 1 4.43k | subject stringlengths 15 501 | message stringlengths 15 4.06k | lang stringclasses 4 values | license stringclasses 13 values | repos stringlengths 5 91.5k | diff stringlengths 0 4.35k |
|---|---|---|---|---|---|---|---|---|---|---|
d852781fe93c7a37b90387e2b388bf18ed3823d1 | command/install_ext.py | command/install_ext.py | """install_ext
Implement the Distutils "install_ext" command to install extension modules."""
# created 1999/09/12, Greg Ward
__revision__ = "$Id$"
from distutils.core import Command
from distutils.util import copy_tree
class install_ext (Command):
description = "install C/C++ extension modules"
user_options = [
('install-dir=', 'd', "directory to install to"),
('build-dir=','b', "build directory (where to install from)"),
]
def initialize_options (self):
# let the 'install' command dictate our installation directory
self.install_dir = None
self.build_dir = None
def finalize_options (self):
self.set_undefined_options ('install',
('build_platlib', 'build_dir'),
('install_platlib', 'install_dir'))
def run (self):
# Make sure we have built all extension modules first
self.run_peer ('build_ext')
# Dump the entire "build/platlib" directory (or whatever it really
# is; "build/platlib" is the default) to the installation target
# (eg. "/usr/local/lib/python1.5/site-packages"). Note that
# putting files in the right package dir is already done when we
# build.
outfiles = self.copy_tree (self.build_dir, self.install_dir)
# class InstallExt
| """install_ext
Implement the Distutils "install_ext" command to install extension modules."""
# created 1999/09/12, Greg Ward
__revision__ = "$Id$"
from distutils.core import Command
from distutils.util import copy_tree
class install_ext (Command):
description = "install C/C++ extension modules"
user_options = [
('install-dir=', 'd', "directory to install to"),
('build-dir=','b', "build directory (where to install from)"),
]
def initialize_options (self):
# let the 'install' command dictate our installation directory
self.install_dir = None
self.build_dir = None
def finalize_options (self):
self.set_undefined_options ('install',
('build_lib', 'build_dir'),
('install_lib', 'install_dir'))
def run (self):
# Make sure we have built all extension modules first
self.run_peer ('build_ext')
# Dump the entire "build/platlib" directory (or whatever it really
# is; "build/platlib" is the default) to the installation target
# (eg. "/usr/local/lib/python1.5/site-packages"). Note that
# putting files in the right package dir is already done when we
# build.
outfiles = self.copy_tree (self.build_dir, self.install_dir)
# class InstallExt
| Fix how we set 'build_dir' and 'install_dir' options from 'install' options -- irrelevant because this file is about to go away, but oh well. | Fix how we set 'build_dir' and 'install_dir' options from 'install' options --
irrelevant because this file is about to go away, but oh well.
| Python | mit | pypa/setuptools,pypa/setuptools,pypa/setuptools | ---
+++
@@ -25,8 +25,8 @@
def finalize_options (self):
self.set_undefined_options ('install',
- ('build_platlib', 'build_dir'),
- ('install_platlib', 'install_dir'))
+ ('build_lib', 'build_dir'),
+ ('install_lib', 'install_dir'))
def run (self):
|
aace7956091f10af19dfe9eaaf12aef8b0f9f579 | new_validity.py | new_validity.py | import pandas as pd
import numpy as np
import operator
from sys import argv
import os
def extract( file_name ):
with open(file_name) as f:
for i,line in enumerate(f,1):
if "SCN" in line:
return i
os.system('ltahdr -i'+ argv[1]+ '> lta_file.txt')
dictionary = {}
#lta_file = str(argv[1])
skipped_rows = extract('lta_file.txt')-1
header = pd.read_csv('lta_file.txt',skiprows=skipped_rows,delimiter=r"\s+")
flux = list(set(header["OBJECT"]))
print flux
header['Nrecs'] = header['Nrecs'].astype(float)
for i in flux :
temp = header.loc[header.OBJECT==i,'Nrecs'].values
temp = np.mean(temp)
dictionary[i]=temp
print dictionary
source = max(dictionary.iteritems(),key=operator.itemgetter(1))[0]
print source
| import pandas as pd
import numpy as np
import operator
from sys import argv
import os
def extract( file_name ):
with open(file_name) as f:
for i,line in enumerate(f,1):
if "SCN" in line:
return i
def main(lta_name):
os.system('ltahdr -i'+ lta_name + '> lta_file.txt')
dictionary = {}
#lta_file = str(argv[1])
skipped_rows = extract('lta_file.txt')-1
header = pd.read_csv('lta_file.txt',skiprows=skipped_rows,delimiter=r"\s+")
flux = list(set(header["OBJECT"]))
#print flux
header['Nrecs'] = header['Nrecs'].astype(float)
for i in flux :
temp = header.loc[header.OBJECT==i,'Nrecs'].values
temp = np.mean(temp)
dictionary[i]=temp
#print dictionary
source = max(dictionary.iteritems(),key=operator.itemgetter(1))[0]
return source
| Insert main body of code into function | Insert main body of code into function
| Python | mit | NCRA-TIFR/gadpu,NCRA-TIFR/gadpu | ---
+++
@@ -5,29 +5,27 @@
import os
def extract( file_name ):
- with open(file_name) as f:
- for i,line in enumerate(f,1):
- if "SCN" in line:
- return i
+ with open(file_name) as f:
+ for i,line in enumerate(f,1):
+ if "SCN" in line:
+ return i
+def main(lta_name):
+ os.system('ltahdr -i'+ lta_name + '> lta_file.txt')
+ dictionary = {}
+ #lta_file = str(argv[1])
+ skipped_rows = extract('lta_file.txt')-1
-os.system('ltahdr -i'+ argv[1]+ '> lta_file.txt')
-dictionary = {}
-#lta_file = str(argv[1])
-skipped_rows = extract('lta_file.txt')-1
+ header = pd.read_csv('lta_file.txt',skiprows=skipped_rows,delimiter=r"\s+")
+ flux = list(set(header["OBJECT"]))
+ #print flux
-header = pd.read_csv('lta_file.txt',skiprows=skipped_rows,delimiter=r"\s+")
-flux = list(set(header["OBJECT"]))
-print flux
+ header['Nrecs'] = header['Nrecs'].astype(float)
-header['Nrecs'] = header['Nrecs'].astype(float)
-
-for i in flux :
+ for i in flux :
temp = header.loc[header.OBJECT==i,'Nrecs'].values
temp = np.mean(temp)
dictionary[i]=temp
-print dictionary
+ #print dictionary
-source = max(dictionary.iteritems(),key=operator.itemgetter(1))[0]
-print source
-
-
+ source = max(dictionary.iteritems(),key=operator.itemgetter(1))[0]
+ return source |
7cd69c1a4b8cc221bbbb40d5b2a1a53a835b11e9 | nhs/raw/urls.py | nhs/raw/urls.py | """
URLs for the Raw data dumps.
"""
from django.conf.urls.defaults import patterns, url
from nhs.raw.views import Ratio, Drug
urlpatterns = patterns(
'',
url(r'/ratio/(?P<bucket1>[0-9A-Z]+)/(?P<bucket2>[0-9A-Z]+)/ratio.zip$',
Ratio.as_view(), name='rawcompare'),
url(r'/drug/percapitamap/ccg/(?P<bnf_code>[0-9A-Z]+)/percap.zip$',
Drug.as_view(), name='rawdrug'),
)
| """
URLs for the Raw data dumps.
"""
from django.conf.urls.defaults import patterns, url
from nhs.raw.views import Ratio, Drug
urlpatterns = patterns(
'',
url(r'/ratio/(?P<bucket1>[0-9A-Z]+)/(?P<bucket2>[0-9A-Z]+)/ratio.zip$',
Ratio.as_view(), name='rawcompare'),
url(r'/drug/(?P<bnf_code>[0-9A-Z]+)/percap.zip$',
Drug.as_view(), name='rawdrug'),
)
| Update URls for raw download. | Update URls for raw download.
| Python | agpl-3.0 | openhealthcare/open-prescribing,openhealthcare/open-prescribing,openhealthcare/open-prescribing | ---
+++
@@ -11,6 +11,6 @@
url(r'/ratio/(?P<bucket1>[0-9A-Z]+)/(?P<bucket2>[0-9A-Z]+)/ratio.zip$',
Ratio.as_view(), name='rawcompare'),
- url(r'/drug/percapitamap/ccg/(?P<bnf_code>[0-9A-Z]+)/percap.zip$',
+ url(r'/drug/(?P<bnf_code>[0-9A-Z]+)/percap.zip$',
Drug.as_view(), name='rawdrug'),
) |
3f1f666606a0b092c796fab27a67b2dde5a33cb9 | nipype/setup.py | nipype/setup.py | def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('nipype', parent_package, top_path)
# List all packages to be loaded here
config.add_subpackage('interfaces')
config.add_subpackage('pipeline')
config.add_subpackage('utils')
config.add_subpackage('externals')
# List all data directories to be loaded here
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
| def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('nipype', parent_package, top_path)
# List all packages to be loaded here
config.add_subpackage('algorithms')
config.add_subpackage('interfaces')
config.add_subpackage('pipeline')
config.add_subpackage('utils')
config.add_subpackage('externals')
config.add_subpackage('testing')
# List all data directories to be loaded here
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
| Add missing packages for install: algorithms, testing. | Add missing packages for install: algorithms, testing.
git-svn-id: 24f545668198cdd163a527378499f2123e59bf9f@527 ead46cd0-7350-4e37-8683-fc4c6f79bf00
| Python | bsd-3-clause | christianbrodbeck/nipype,arokem/nipype,carlohamalainen/nipype,pearsonlab/nipype,sgiavasis/nipype,gerddie/nipype,blakedewey/nipype,FredLoney/nipype,fprados/nipype,grlee77/nipype,sgiavasis/nipype,JohnGriffiths/nipype,iglpdc/nipype,gerddie/nipype,iglpdc/nipype,dmordom/nipype,arokem/nipype,FCP-INDI/nipype,mick-d/nipype_source,mick-d/nipype_source,Leoniela/nipype,wanderine/nipype,FCP-INDI/nipype,blakedewey/nipype,iglpdc/nipype,JohnGriffiths/nipype,wanderine/nipype,carolFrohlich/nipype,sgiavasis/nipype,sgiavasis/nipype,rameshvs/nipype,blakedewey/nipype,grlee77/nipype,iglpdc/nipype,christianbrodbeck/nipype,mick-d/nipype,mick-d/nipype,glatard/nipype,arokem/nipype,pearsonlab/nipype,dgellis90/nipype,grlee77/nipype,dmordom/nipype,pearsonlab/nipype,mick-d/nipype,JohnGriffiths/nipype,Leoniela/nipype,dgellis90/nipype,FCP-INDI/nipype,glatard/nipype,grlee77/nipype,carolFrohlich/nipype,rameshvs/nipype,gerddie/nipype,Leoniela/nipype,carolFrohlich/nipype,FredLoney/nipype,gerddie/nipype,dmordom/nipype,pearsonlab/nipype,mick-d/nipype,carlohamalainen/nipype,wanderine/nipype,satra/NiPypeold,glatard/nipype,mick-d/nipype_source,dgellis90/nipype,satra/NiPypeold,blakedewey/nipype,glatard/nipype,fprados/nipype,carolFrohlich/nipype,wanderine/nipype,rameshvs/nipype,arokem/nipype,FCP-INDI/nipype,carlohamalainen/nipype,FredLoney/nipype,JohnGriffiths/nipype,dgellis90/nipype,fprados/nipype,rameshvs/nipype | ---
+++
@@ -3,10 +3,12 @@
config = Configuration('nipype', parent_package, top_path)
# List all packages to be loaded here
+ config.add_subpackage('algorithms')
config.add_subpackage('interfaces')
config.add_subpackage('pipeline')
config.add_subpackage('utils')
config.add_subpackage('externals')
+ config.add_subpackage('testing')
# List all data directories to be loaded here
return config |
4417b3c701e44fbf94fb7375a7a3f148f1ee6112 | tilequeue/queue/file.py | tilequeue/queue/file.py | from tilequeue.tile import serialize_coord, deserialize_coord, CoordMessage
import threading
class OutputFileQueue(object):
def __init__(self, fp):
self.fp = fp
self.lock = threading.RLock()
def enqueue(self, coord):
with self.lock:
payload = serialize_coord(coord)
self.fp.write(payload + '\n')
def enqueue_batch(self, coords):
n = 0
for coord in coords:
self.enqueue(coord)
n += 1
return n, 0
def read(self, max_to_read=1, timeout_seconds=20):
with self.lock:
coords = []
for _ in range(max_to_read):
try:
coord = next(self.fp)
except StopIteration:
break
coords.append(CoordMessage(deserialize_coord(coord), None))
return coords
def job_done(self, coord_message):
pass
def clear(self):
with self.lock:
self.fp.seek(0)
self.fp.truncate()
return -1
def close(self):
with self.lock:
remaining_queue = "".join([ln for ln in self.fp])
self.clear()
self.fp.write(remaining_queue)
self.fp.close()
| from tilequeue.tile import serialize_coord, deserialize_coord, CoordMessage
import threading
class OutputFileQueue(object):
def __init__(self, fp):
self.fp = fp
self._lock = threading.RLock()
def enqueue(self, coord):
with self._lock:
payload = serialize_coord(coord)
self.fp.write(payload + '\n')
def enqueue_batch(self, coords):
n = 0
for coord in coords:
self.enqueue(coord)
n += 1
return n, 0
def read(self, max_to_read=1, timeout_seconds=20):
with self._lock:
coords = []
for _ in range(max_to_read):
try:
coord = next(self.fp)
except StopIteration:
break
coords.append(CoordMessage(deserialize_coord(coord), None))
return coords
def job_done(self, coord_message):
pass
def clear(self):
with self._lock:
self.fp.seek(0)
self.fp.truncate()
return -1
def close(self):
with self._lock:
remaining_queue = "".join([ln for ln in self.fp])
self.clear()
self.fp.write(remaining_queue)
self.fp.close()
| Rename lock to _lock to imply that it's private. | Rename lock to _lock to imply that it's private.
tilequeue/queue/file.py
-The `lock` instance variable shouldn't be used outside of the
`OutputFileQueue`'s methods.
| Python | mit | tilezen/tilequeue,mapzen/tilequeue | ---
+++
@@ -6,10 +6,10 @@
def __init__(self, fp):
self.fp = fp
- self.lock = threading.RLock()
+ self._lock = threading.RLock()
def enqueue(self, coord):
- with self.lock:
+ with self._lock:
payload = serialize_coord(coord)
self.fp.write(payload + '\n')
@@ -21,7 +21,7 @@
return n, 0
def read(self, max_to_read=1, timeout_seconds=20):
- with self.lock:
+ with self._lock:
coords = []
for _ in range(max_to_read):
try:
@@ -36,13 +36,13 @@
pass
def clear(self):
- with self.lock:
+ with self._lock:
self.fp.seek(0)
self.fp.truncate()
return -1
def close(self):
- with self.lock:
+ with self._lock:
remaining_queue = "".join([ln for ln in self.fp])
self.clear()
self.fp.write(remaining_queue) |
4403808f07e39fbc420704b33cb74fea9af72b9f | modelview/urls.py | modelview/urls.py | from django.conf.urls import url
from modelview import views
from oeplatform import settings
from django.conf.urls.static import static
urlpatterns = [
url(r'^(?P<sheettype>[\w\d_]+)s/$', views.listsheets, {}, name='modellist'),
url(r'^overview/$', views.overview, {}),
url(r'^(?P<sheettype>[\w\d_]+)s/add/$', views.FSAdd.as_view(), {'method':'add'}, name='modellist'),
url(r'^(?P<sheettype>[\w\d_]+)s/download/$', views.model_to_csv, {}, name='index'),
url(r'^(?P<sheettype>[\w\d_]+)s/(?P<model_name>[\w\d_]+)/$', views.show, {}, name='index'),
url(r'^(?P<sheettype>[\w\d_]+)s/(?P<model_name>[\w\d_]+)/edit/$', views.editModel, {}, name='index'),
url(r'^(?P<sheettype>[\w\d_]+)s/(?P<pk>[\w\d_]+)/update/$', views.FSAdd.as_view(), {'method':'update'}, name='index'),
]
| from django.conf.urls import url
from modelview import views
from oeplatform import settings
from django.conf.urls.static import static
urlpatterns = [
url(r'^(?P<sheettype>[\w\d_]+)s/$', views.listsheets, {}, name='modellist'),
url(r'^overview/$', views.overview, {}),
url(r'^(?P<sheettype>[\w\d_]+)s/add/$', views.FSAdd.as_view(), {'method':'add'}, name='modellist'),
url(r'^(?P<sheettype>[\w\d_]+)s/download/$', views.model_to_csv, {}, name='index'),
url(r'^(?P<sheettype>[\w\d_]+)s/(?P<model_name>[\d]+)/$', views.show, {}, name='index'),
url(r'^(?P<sheettype>[\w\d_]+)s/(?P<model_name>[\d]+)/edit/$', views.editModel, {}, name='index'),
url(r'^(?P<sheettype>[\w\d_]+)s/(?P<pk>[\d]+)/update/$', views.FSAdd.as_view(), {'method':'update'}, name='index'),
]
| Simplify regex in url matching | Simplify regex in url matching
| Python | agpl-3.0 | openego/oeplatform,openego/oeplatform,openego/oeplatform,openego/oeplatform | ---
+++
@@ -9,8 +9,8 @@
url(r'^overview/$', views.overview, {}),
url(r'^(?P<sheettype>[\w\d_]+)s/add/$', views.FSAdd.as_view(), {'method':'add'}, name='modellist'),
url(r'^(?P<sheettype>[\w\d_]+)s/download/$', views.model_to_csv, {}, name='index'),
- url(r'^(?P<sheettype>[\w\d_]+)s/(?P<model_name>[\w\d_]+)/$', views.show, {}, name='index'),
- url(r'^(?P<sheettype>[\w\d_]+)s/(?P<model_name>[\w\d_]+)/edit/$', views.editModel, {}, name='index'),
- url(r'^(?P<sheettype>[\w\d_]+)s/(?P<pk>[\w\d_]+)/update/$', views.FSAdd.as_view(), {'method':'update'}, name='index'),
+ url(r'^(?P<sheettype>[\w\d_]+)s/(?P<model_name>[\d]+)/$', views.show, {}, name='index'),
+ url(r'^(?P<sheettype>[\w\d_]+)s/(?P<model_name>[\d]+)/edit/$', views.editModel, {}, name='index'),
+ url(r'^(?P<sheettype>[\w\d_]+)s/(?P<pk>[\d]+)/update/$', views.FSAdd.as_view(), {'method':'update'}, name='index'),
] |
d54854c11094e0ca8598e59de0bc0795dc8143c9 | lib/recordclass/__init__.py | lib/recordclass/__init__.py | # The MIT License (MIT)
#
# Copyright (c) <2011-2014> <Shibzukhov Zaur, szport at gmail dot com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from .memoryslots import memoryslots, itemgetset
from .record import recordclass
__version__ = '0.4.2'
| # The MIT License (MIT)
#
# Copyright (c) <2011-2014> <Shibzukhov Zaur, szport at gmail dot com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from .memoryslots import memoryslots, itemgetset
from .record import recordclass, RecordClass
__version__ = '0.4.2'
| Add import of RecordClass to init | Add import of RecordClass to init
--HG--
branch : typing
| Python | mit | vovanbo/trafaretrecord,vovanbo/trafaretrecord | ---
+++
@@ -21,6 +21,6 @@
# THE SOFTWARE.
from .memoryslots import memoryslots, itemgetset
-from .record import recordclass
+from .record import recordclass, RecordClass
__version__ = '0.4.2' |
8e0cf99380b284ff4f7b962f622933c243828be7 | setup.py | setup.py | from setuptools import setup, find_packages
setup(
name="django-posgtres-geometry",
version="0.1.2",
packages=find_packages(),
install_requires=['django', 'psycopg2'],
description="Django ORM field for Postgres geometry types",
author="Daniele Esposti",
author_email="expo@expobrain.net",
maintainer="Daniele Esposti",
maintainer_email="expo@expobrain.net",
url="http://github.com/expobrain/django-postgres-geometry",
)
| from setuptools import setup, find_packages
setup(
name="django-postgres-geometry",
version="0.1.2",
packages=find_packages(),
install_requires=['django', 'psycopg2'],
description="Django ORM field for Postgres geometry types",
author="Daniele Esposti",
author_email="expo@expobrain.net",
maintainer="Daniele Esposti",
maintainer_email="expo@expobrain.net",
url="http://github.com/expobrain/django-postgres-geometry",
)
| Fix typo in package name | Fix typo in package name
| Python | mit | team23/django-postgres-geometry | ---
+++
@@ -2,7 +2,7 @@
setup(
- name="django-posgtres-geometry",
+ name="django-postgres-geometry",
version="0.1.2",
packages=find_packages(),
install_requires=['django', 'psycopg2'], |
f0a1555d9767db413d39b08865e2e3aa8f633d2f | setup.py | setup.py | #! /usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(name='permamodel',
version='0.1.0',
author='Elchin Jafarov and Scott Stewart',
author_email='james.stewart@colorado.edu',
description='Permamodel',
long_description=open('README.md').read(),
packages=find_packages(),
#install_requires=('numpy', 'nose', 'gdal', 'pyproj'),
install_requires=('affine', 'netCDF4', 'scipy', 'numpy', 'nose',),
package_data={'': ['examples/*.cfg', 'examples/*.dat']}
)
| #! /usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(name='permamodel',
version='0.1.0',
author='Elchin Jafarov and Scott Stewart',
author_email='james.stewart@colorado.edu',
description='Permamodel',
long_description=open('README.md').read(),
packages=find_packages(),
#install_requires=('numpy', 'nose', 'gdal', 'pyproj'),
install_requires=('affine', 'netCDF4', 'scipy', 'numpy', 'nose',),
package_data={'': ['examples/*.cfg',
'examples/*.dat',
'components/Parameters/*']}
)
| Add data files for KuModel to package data | Add data files for KuModel to package data
| Python | mit | permamodel/permamodel,permamodel/permamodel | ---
+++
@@ -13,5 +13,7 @@
packages=find_packages(),
#install_requires=('numpy', 'nose', 'gdal', 'pyproj'),
install_requires=('affine', 'netCDF4', 'scipy', 'numpy', 'nose',),
- package_data={'': ['examples/*.cfg', 'examples/*.dat']}
+ package_data={'': ['examples/*.cfg',
+ 'examples/*.dat',
+ 'components/Parameters/*']}
) |
ab6393c6b032689417c22d40fdce88807190b672 | setup.py | setup.py | from setuptools import setup, find_packages
with open('README.rst') as f:
readme = f.read()
with open('LICENSE') as f:
license = f.read()
with open('requirements.txt') as f:
requirements = f.readlines()
setup(
name='pycc',
version='1.0.0',
url='https://github.com/kevinconway/pycc',
license=license,
description='Python code optimizer.',
author='Kevin Conway',
author_email='kevinjacobconway@gmail.com',
long_description=readme,
classifiers=[],
packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']),
requires=requirements,
entry_points={
'console_scripts': [
'pycc-transform = pycc.cli.transform:main',
'pycc-compile = pycc.cli.compile:main',
],
'pycc.optimizers': [
'pycc_constant_inliner = pycc.cli.extensions.constants:ConstantInlineExtension',
],
},
)
| from setuptools import setup, find_packages
with open('README.rst') as f:
readme = f.read()
with open('LICENSE') as f:
license = f.read()
with open('requirements.txt') as f:
requirements = f.readlines()
setup(
name='pycc',
version='2.0.0',
url='https://github.com/kevinconway/pycc',
license=license,
description='Python code optimizer.',
author='Kevin Conway',
author_email='kevinjacobconway@gmail.com',
long_description=readme,
classifiers=[],
packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']),
requires=requirements,
entry_points={
'console_scripts': [
'pycc-transform = pycc.cli.transform:main',
'pycc-compile = pycc.cli.compile:main',
],
'pycc.optimizers': [
'pycc_constant_inliner = pycc.cli.extensions.constants:ConstantInlineExtension',
],
},
)
| Change version number to reflect recent changes | Change version number to reflect recent changes
Bumping a major version to reflect the interface and extension
changes which have occurred.
Signed-off-by: Kevin Conway <3473c1f185ca03eadc40ad288d84425b54fd7d57@gmail.com>
| Python | apache-2.0 | kevinconway/pycc,kevinconway/pycc | ---
+++
@@ -12,7 +12,7 @@
setup(
name='pycc',
- version='1.0.0',
+ version='2.0.0',
url='https://github.com/kevinconway/pycc',
license=license,
description='Python code optimizer.', |
5c53a20363786bdecc4414631ad776abb1dd72d1 | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup
import argparse_addons
setup(name='argparse_addons',
version=argparse_addons.__version__,
description=('Additional argparse types and actions.'),
long_description=open('README.rst', 'r').read(),
author='Erik Moqvist',
author_email='erik.moqvist@gmail.com',
license='MIT',
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
],
keywords=['argparse'],
url='https://github.com/eerimoq/argparse_addons',
py_modules=['argparse_addons'],
install_requires=[
],
test_suite="tests")
| #!/usr/bin/env python
from setuptools import setup
import argparse_addons
setup(name='argparse_addons',
version=argparse_addons.__version__,
description=('Additional argparse types and actions.'),
long_description=open('README.rst', 'r').read(),
author='Erik Moqvist',
author_email='erik.moqvist@gmail.com',
license='MIT',
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
],
keywords=['argparse'],
url='https://github.com/eerimoq/argparse_addons',
py_modules=['argparse_addons'],
python_requires=['>=3.6'],
install_requires=[
],
test_suite="tests")
| Document 3.6+ support in classifiers and python_requires | Document 3.6+ support in classifiers and python_requires | Python | mit | eerimoq/argparse_addons | ---
+++
@@ -13,10 +13,15 @@
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
+ 'Programming Language :: Python :: 3.8',
+ 'Programming Language :: Python :: 3.9',
],
keywords=['argparse'],
url='https://github.com/eerimoq/argparse_addons',
py_modules=['argparse_addons'],
+ python_requires=['>=3.6'],
install_requires=[
],
test_suite="tests") |
83b7b66c2558f292ba686583388065b6b2da25cb | setup.py | setup.py | from setuptools import setup, find_packages
setup(
name='ppci',
description="Pure python compiler infrastructure",
scripts=["bin/zcc.py", "bin/st-flash.py", 'bin/hexutil.py'],
version='0.0.1',
author='Windel Bouwman',
include_package_data=True,
packages=find_packages(exclude=["*.test.*", "test"]),
package_data = {'': ['*.grammar', "*.brg", "*.sled"]},
url='https://bitbucket.org/windel/ppci',
license='license.txt',
test_suite="test"
)
| from setuptools import setup, find_packages
setup(
name='ppci',
description="Pure python compiler infrastructure",
scripts=["bin/zcc.py", "bin/st-flash.py", 'bin/hexutil.py'],
version='0.0.1',
author='Windel Bouwman',
include_package_data=True,
packages=find_packages(exclude=["*.test.*", "test"]),
package_data = {'': ['*.grammar', "*.brg", "*.sled", "*.rst"]},
url='https://bitbucket.org/windel/ppci',
license='license.txt',
test_suite="test"
)
| Add doc files for packages | Add doc files for packages
| Python | bsd-2-clause | windelbouwman/ppci-mirror,windelbouwman/ppci-mirror,windelbouwman/ppci-mirror,windelbouwman/ppci-mirror,windelbouwman/ppci-mirror,windelbouwman/ppci-mirror | ---
+++
@@ -8,7 +8,7 @@
author='Windel Bouwman',
include_package_data=True,
packages=find_packages(exclude=["*.test.*", "test"]),
- package_data = {'': ['*.grammar', "*.brg", "*.sled"]},
+ package_data = {'': ['*.grammar', "*.brg", "*.sled", "*.rst"]},
url='https://bitbucket.org/windel/ppci',
license='license.txt',
test_suite="test" |
7c9dac667c47d41c2da52ff993967f27016f6ba9 | profile_benchmark.py | profile_benchmark.py | # Profile the basic test execution
from pyresttest import resttest
from pyresttest.benchmarks import Benchmark
import cProfile
test = Benchmark()
test.warmup_runs = 0
test.benchmark_runs = 1000
test.raw_metrics = set()
test.metrics = {'total_time'}
test.aggregated_metrics = {'total_time': ['total','mean']}
test.url = 'http://localhost:8000/api/person/'
test.name = 'Basic GET'
cProfile.run('resttest.run_benchmark(test)', sort='cumtime')
| # Profile the basic test execution
from pyresttest import resttest
from pyresttest.benchmarks import Benchmark
from pyresttest.binding import Context
from pyresttest.contenthandling import ContentHandler
from pyresttest.generators import factory_generate_ids
import cProfile
test = Benchmark()
test.warmup_runs = 0
test.benchmark_runs = 1000
test.raw_metrics = set()
test.metrics = {'total_time'}
test.aggregated_metrics = {'total_time': ['total','mean']}
# Basic get test
test.url = 'http://localhost:8000/api/person/'
test.name = 'Basic GET'
print 'Basic GET test'
#cProfile.run('resttest.run_benchmark(test)', sort='cumtime')
# Test a generator PUT method
test.method = 'PUT'
test.set_url('http://localhost:8000/api/person/$id/', isTemplate=True)
test.headers = {'Content-Type': 'application/json'}
handler = ContentHandler()
handler.setup('{"first_name": "Gaius","id": "$id","last_name": "Baltar","login": "$id"}',
is_template_content=True)
test.body = handler
context = Context()
context.add_generator('gen', factory_generate_ids(starting_id=10)())
test.generator_binds = {'id':'gen'}
print 'Running templated PUT test'
cProfile.run('resttest.run_benchmark(test, context=context)', sort='cumtime') | Add profiler benchmark for generator functionality | Add profiler benchmark for generator functionality
| Python | apache-2.0 | MorrisJobke/pyresttest,alazaro/pyresttest,suvarnaraju/pyresttest,sunyanhui/pyresttest,wirewit/pyresttest,holdenweb/pyresttest,suvarnaraju/pyresttest,janusnic/pyresttest,MorrisJobke/pyresttest,alazaro/pyresttest,wirewit/pyresttest,sunyanhui/pyresttest,svanoort/pyresttest,netjunki/pyresttest,janusnic/pyresttest,TimYi/pyresttest,holdenweb/pyresttest,svanoort/pyresttest,netjunki/pyresttest,satish-suradkar/pyresttest,TimYi/pyresttest,satish-suradkar/pyresttest | ---
+++
@@ -2,6 +2,10 @@
from pyresttest import resttest
from pyresttest.benchmarks import Benchmark
+from pyresttest.binding import Context
+from pyresttest.contenthandling import ContentHandler
+from pyresttest.generators import factory_generate_ids
+
import cProfile
test = Benchmark()
@@ -10,8 +14,24 @@
test.raw_metrics = set()
test.metrics = {'total_time'}
test.aggregated_metrics = {'total_time': ['total','mean']}
+
+# Basic get test
test.url = 'http://localhost:8000/api/person/'
test.name = 'Basic GET'
+print 'Basic GET test'
+#cProfile.run('resttest.run_benchmark(test)', sort='cumtime')
-cProfile.run('resttest.run_benchmark(test)', sort='cumtime')
+# Test a generator PUT method
+test.method = 'PUT'
+test.set_url('http://localhost:8000/api/person/$id/', isTemplate=True)
+test.headers = {'Content-Type': 'application/json'}
+handler = ContentHandler()
+handler.setup('{"first_name": "Gaius","id": "$id","last_name": "Baltar","login": "$id"}',
+ is_template_content=True)
+test.body = handler
+context = Context()
+context.add_generator('gen', factory_generate_ids(starting_id=10)())
+test.generator_binds = {'id':'gen'}
+print 'Running templated PUT test'
+cProfile.run('resttest.run_benchmark(test, context=context)', sort='cumtime') |
e3fbf8ca10f0ed522bd2e819479bd93243ec4990 | setup.py | setup.py | from setuptools import setup, find_packages
setup(
name='validation',
url='https://github.com/JOIVY/validation',
version='0.0.1',
author='Ben Mather',
author_email='bwhmather@bwhmather.com',
maintainer='',
license='BSD',
description=(
"A library for runtime type checking and validation of python values"
),
long_description=__doc__,
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
install_requires=[
],
packages=find_packages(),
package_data={
'': ['*.*'],
},
entry_points={
'console_scripts': [
],
},
test_suite='validation.tests.suite',
)
| from setuptools import setup, find_packages
setup(
name='validation',
url='https://github.com/JOIVY/validation',
version='0.0.1',
author='Ben Mather',
author_email='bwhmather@bwhmather.com',
maintainer='',
license='BSD',
description=(
"A library for runtime type checking and validation of python values"
),
long_description=__doc__,
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
install_requires=[
'six >= 1.10, < 2',
],
packages=find_packages(),
package_data={
'': ['*.*'],
},
entry_points={
'console_scripts': [
],
},
test_suite='validation.tests.suite',
)
| Add dependency on six to help support legacy python2 | Add dependency on six to help support legacy python2
| Python | apache-2.0 | JOIVY/validation | ---
+++
@@ -23,6 +23,7 @@
'Programming Language :: Python :: 3.5',
],
install_requires=[
+ 'six >= 1.10, < 2',
],
packages=find_packages(),
package_data={ |
1f272ede03be0a9953839b1d19fe8a61ebef488f | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup
setup(name='l1',
version='0.1',
description='L1',
author='Bugra Akyildiz',
author_email='vbugra@gmail.com',
url='bugra.github.io',
packages=['l1'],
install_requires=['pandas==1.2.1',
'cvxopt==1.2.5.post1',
'statsmodels==0.12.2',
]
)
| #!/usr/bin/env python
from setuptools import setup
setup(name='l1',
version='0.1',
description='L1',
author='Bugra Akyildiz',
author_email='vbugra@gmail.com',
url='bugra.github.io',
packages=['l1'],
install_requires=['pandas==1.2.1',
'cvxopt==1.2.6',
'statsmodels==0.12.2',
]
)
| Bump cvxopt from 1.2.5.post1 to 1.2.6 | Bump cvxopt from 1.2.5.post1 to 1.2.6
Bumps [cvxopt](http://cvxopt.org) from 1.2.5.post1 to 1.2.6.
Signed-off-by: dependabot-preview[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@dependabot.com> | Python | apache-2.0 | bugra/l1 | ---
+++
@@ -10,7 +10,7 @@
url='bugra.github.io',
packages=['l1'],
install_requires=['pandas==1.2.1',
- 'cvxopt==1.2.5.post1',
+ 'cvxopt==1.2.6',
'statsmodels==0.12.2',
]
|
323d52928fa58196299e1dde0c5b3ef0d1451d9f | setup.py | setup.py | from setuptools import find_packages, setup
import sys
if 'install' in sys.argv:
import webbrowser
webbrowser.open('https://www.youtube.com/watch?v=NMZcwXh7HDA', new=2, autoraise=True)
setup(
name='rdalal',
version='1.0',
description='Install some sweet Rehan',
author='Will Kahn-Greene',
author_email='willkg@bluesock.org',
url='https://github.com/willkg/rdalal',
zip_safe=True,
packages=find_packages(),
entry_points="""
[console_scripts]
rdalal=rdalal.cmdline:run
""",
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'Operating System :: Unix',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
)
| from setuptools import find_packages, setup
import sys
if 'install' in sys.argv:
import webbrowser
webbrowser.open('https://www.youtube.com/watch?v=NMZcwXh7HDA', new=2, autoraise=True)
setup(
name='rdalal',
version='1.1',
description='Install some sweet Rehan',
author='Will Kahn-Greene',
author_email='willkg@bluesock.org',
url='https://github.com/willkg/rdalal',
zip_safe=True,
packages=find_packages(),
entry_points="""
[console_scripts]
rdalal=rdalal.cmdline:run
""",
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'Operating System :: Unix',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
)
| Set to 1.1 for new release | Set to 1.1 for new release
| Python | bsd-3-clause | willkg/rdalal | ---
+++
@@ -9,7 +9,7 @@
setup(
name='rdalal',
- version='1.0',
+ version='1.1',
description='Install some sweet Rehan',
author='Will Kahn-Greene',
author_email='willkg@bluesock.org', |
ae09eb021d6b119a2fdb9a82852ab424acf8f409 | setup.py | setup.py | from setuptools import setup
cli_tools = ["spritecss = spritecss.main:main"]
setup(name="spritecss", version="0.5", url="http://yostudios.se/",
author="Yo Studios AB", author_email="opensource@yostudios.se",
description="Do-what-I-mean automatic CSS spritemapper",
license="MIT/X11",
packages=["spritecss", "spritecss.css", "spritecss.packing"],
test_suite="nose.collector", tests_require=["nose"],
entry_points={"console_scripts": cli_tools})
| from setuptools import setup
cli_tools = ["spritemapper = spritecss.main:main"]
setup(name="spritemapper", version="0.5", url="http://yostudios.se/",
author="Yo Studios AB", author_email="opensource@yostudios.se",
description="Do-what-I-mean automatic CSS spritemapper",
license="MIT/X11",
packages=["spritecss", "spritecss.css", "spritecss.packing"],
test_suite="nose.collector", tests_require=["nose"],
entry_points={"console_scripts": cli_tools})
| Rename distribution, binary to spritemapper | Rename distribution, binary to spritemapper | Python | mit | wpj-cz/Spritemapper,yostudios/Spritemapper,yostudios/Spritemapper,wpj-cz/Spritemapper,wpj-cz/Spritemapper,wpj-cz/Spritemapper,yostudios/Spritemapper | ---
+++
@@ -1,8 +1,8 @@
from setuptools import setup
-cli_tools = ["spritecss = spritecss.main:main"]
+cli_tools = ["spritemapper = spritecss.main:main"]
-setup(name="spritecss", version="0.5", url="http://yostudios.se/",
+setup(name="spritemapper", version="0.5", url="http://yostudios.se/",
author="Yo Studios AB", author_email="opensource@yostudios.se",
description="Do-what-I-mean automatic CSS spritemapper",
license="MIT/X11", |
3154a8bd09e8d445de6893862c646c9fb983021d | setup.py | setup.py | #!/usr/bin/env python3
from setuptools import find_packages, setup
setup(
name='django-afip',
description='AFIP integration for django',
author='Hugo Osvaldo Barrera',
author_email='hugo@barrera.io',
url='https://gitlab.com/hobarrera/django-afip',
license='ISC',
packages=find_packages(),
include_package_data=True,
long_description=open('README.rst').read(),
install_requires=open('requirements.txt').read().splitlines() + [
'Django>=1.8.4'
],
extras_require={
'docs': ['Sphinx', 'sphinx-autobuild']
},
use_scm_version={
'version_scheme': 'post-release',
'write_to': 'django_afip/version.py',
},
setup_requires=['setuptools_scm'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.10',
'Framework :: Django :: 1.11',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
| #!/usr/bin/env python3
from setuptools import find_packages, setup
setup(
name='django-afip',
description='AFIP integration for django',
author='Hugo Osvaldo Barrera',
author_email='hugo@barrera.io',
url='https://gitlab.com/hobarrera/django-afip',
license='ISC',
packages=find_packages(),
include_package_data=True,
long_description=open('README.rst').read(),
install_requires=open('requirements.txt').read().splitlines() + [
'Django>=1.8.4'
],
extras_require={
'docs': ['Sphinx', 'sphinx-autobuild']
},
use_scm_version={
'version_scheme': 'post-release',
'write_to': 'django_afip/version.py',
},
setup_requires=['setuptools_scm'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.10',
'Framework :: Django :: 1.11',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
| Add classifiers for py35 and py36 | Add classifiers for py35 and py36
| Python | isc | hobarrera/django-afip,hobarrera/django-afip | ---
+++
@@ -34,6 +34,8 @@
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries :: Python Modules',
] |
fe7d10780cd3c98277c64621c07daab0837b6722 | setup.py | setup.py | from setuptools import setup, find_packages
from lighthouse import __version__
classifiers = []
with open("classifiers.txt") as fd:
classifiers = fd.readlines()
setup(
name="lighthouse",
version=__version__,
description="Service discovery tool focused on ease-of-use and resiliency",
author="William Glass",
author_email="william.glass@gmail.com",
url="http://github.com/wglass/lighthouse",
license="MIT",
classifiers=classifiers,
packages=find_packages(exclude=["tests", "tests.*"]),
include_package_data=True,
package_data={
"lighthouse": ["haproxy/*.json"],
},
install_requires=[
"watchdog",
"pyyaml",
"kazoo",
"six",
],
extras_require={
"redis": [],
},
entry_points={
"console_scripts": [
"lighthouse-reporter = lighthouse.scripts.reporter:run",
"lighthouse-writer = lighthouse.scripts.writer:run"
],
"lighthouse.balancers": [
"haproxy = lighthouse.haproxy.balancer:HAProxy",
],
"lighthouse.discovery": [
"zookeeper = lighthouse.zookeeper:ZookeeperDiscovery",
],
"lighthouse.checks": [
"http = lighthouse.checks.http:HTTPCheck",
"tcp = lighthouse.checks.tcp:TCPCheck",
"redis = lighthouse.redis.check:RedisCheck [redis]",
]
},
tests_require=[
"nose",
"mock",
"coverage",
"flake8",
],
)
| from setuptools import setup, find_packages
from lighthouse import __version__
classifiers = []
with open("classifiers.txt") as fd:
classifiers = fd.readlines()
setup(
name="lighthouse",
version=__version__,
description="Service discovery tool focused on ease-of-use and resiliency",
author="William Glass",
author_email="william.glass@gmail.com",
url="http://github.com/wglass/lighthouse",
license="MIT",
classifiers=classifiers,
packages=find_packages(exclude=["tests", "tests.*"]),
include_package_data=True,
package_data={
"lighthouse": ["haproxy/*.json"],
},
install_requires=[
"watchdog",
"pyyaml",
"kazoo",
"six",
"futures",
],
extras_require={
"redis": [],
},
entry_points={
"console_scripts": [
"lighthouse-reporter = lighthouse.scripts.reporter:run",
"lighthouse-writer = lighthouse.scripts.writer:run"
],
"lighthouse.balancers": [
"haproxy = lighthouse.haproxy.balancer:HAProxy",
],
"lighthouse.discovery": [
"zookeeper = lighthouse.zookeeper:ZookeeperDiscovery",
],
"lighthouse.checks": [
"http = lighthouse.checks.http:HTTPCheck",
"tcp = lighthouse.checks.tcp:TCPCheck",
"redis = lighthouse.redis.check:RedisCheck [redis]",
]
},
tests_require=[
"nose",
"mock",
"coverage",
"flake8",
],
)
| Add the futures lib backport as a dependency. | Add the futures lib backport as a dependency.
| Python | apache-2.0 | wglass/lighthouse | ---
+++
@@ -27,6 +27,7 @@
"pyyaml",
"kazoo",
"six",
+ "futures",
],
extras_require={
"redis": [], |
4c75f1ecb9a0a6533b02834ec2a0db91472f7f32 | setup.py | setup.py | from setuptools import setup, find_packages
setup(
name='jupyterhub-kubespawner',
version='0.5.1',
install_requires=[
'jupyterhub',
'pyyaml',
'kubernetes==2.*',
'escapism',
],
setup_requires=['pytest-runner'],
tests_require=['pytest'],
description='JupyterHub Spawner targetting Kubernetes',
url='http://github.com/jupyterhub/kubespawner',
author='Yuvi Panda',
author_email='yuvipanda@gmail.com',
license='BSD',
packages=find_packages(),
)
| from setuptools import setup, find_packages
setup(
name='jupyterhub-kubespawner',
version='0.5.1',
install_requires=[
'jupyterhub',
'pyyaml',
'kubernetes==2.*',
'escapism',
'jupyter',
],
setup_requires=['pytest-runner'],
tests_require=['pytest'],
description='JupyterHub Spawner targeting Kubernetes',
url='http://github.com/jupyterhub/kubespawner',
author='Yuvi Panda',
author_email='yuvipanda@gmail.com',
license='BSD',
packages=find_packages(),
)
| Add jupyter to satisfy jupyterhub dependency | Add jupyter to satisfy jupyterhub dependency
| Python | bsd-3-clause | jupyterhub/kubespawner,ktong/kubespawner,yuvipanda/jupyterhub-kubernetes-spawner | ---
+++
@@ -8,10 +8,11 @@
'pyyaml',
'kubernetes==2.*',
'escapism',
+ 'jupyter',
],
setup_requires=['pytest-runner'],
tests_require=['pytest'],
- description='JupyterHub Spawner targetting Kubernetes',
+ description='JupyterHub Spawner targeting Kubernetes',
url='http://github.com/jupyterhub/kubespawner',
author='Yuvi Panda',
author_email='yuvipanda@gmail.com', |
92af23a3677d57364ec3d7bf6aaf8d320dd4fe94 | setup.py | setup.py | from setuptools import setup, find_packages
setup(
name="vxtwinio",
version="0.0.1a",
url="https://github.com/praekelt/vumi-twilio-api",
license="BSD",
description="Provides a REST API to Vumi that emulates the Twilio API",
long_description=open("README.rst", "r").read(),
author="Praekelt Foundation",
author_email="dev@praekeltfoundation.org",
packages=find_packages(),
scripts=[],
install_requires=[],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Networking',
],
)
| from setuptools import setup, find_packages
setup(
name="vxtwinio",
version="0.0.1a",
url="https://github.com/praekelt/vumi-twilio-api",
license="BSD",
description="Provides a REST API to Vumi that emulates the Twilio API",
long_description=open("README.rst", "r").read(),
author="Praekelt Foundation",
author_email="dev@praekeltfoundation.org",
packages=find_packages(),
scripts=[],
install_requires=[
'vumi',
'klein',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Networking',
],
)
| Add vumi and klein to requirements | Add vumi and klein to requirements
| Python | bsd-3-clause | praekelt/vumi-twilio-api | ---
+++
@@ -11,7 +11,10 @@
author_email="dev@praekeltfoundation.org",
packages=find_packages(),
scripts=[],
- install_requires=[],
+ install_requires=[
+ 'vumi',
+ 'klein',
+ ],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers', |
f176a726debd870fa2f75bdf7ccf8229bab95da4 | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup, find_packages
import sys
import os
import glob
setup(name = "scilifelab",
version = "0.2.2",
author = "Science for Life Laboratory",
author_email = "genomics_support@scilifelab.se",
description = "Useful scripts for use at SciLifeLab",
license = "MIT",
scripts = glob.glob('scripts/*.py') + glob.glob('scripts/bcbb_helpers/*.py') + ['scripts/pm'],
install_requires = [
"bcbio-nextgen >= 0.2",
"drmaa >= 0.5",
"sphinx >= 1.1.3",
"couchdb >= 0.8",
"reportlab >= 2.5",
"cement >= 2.0.2",
"mock",
"PIL",
"pyPdf",
"logbook >= 0.4",
# pandas screws up installation; tries to look for local site
# packages and not in virtualenv
#"pandas >= 0.9",
"biopython",
"rst2pdf",
#"psutil",
],
test_suite = 'nose.collector',
packages=find_packages(exclude=['tests']),
package_data = {'scilifelab':[
'data/grf/*',
'data/templates/*.*',
'data/templates/rst/*',
]}
)
os.system("git rev-parse --short --verify HEAD > ~/.scilifelab_version")
| #!/usr/bin/env python
from setuptools import setup, find_packages
import sys
import os
import glob
setup(name = "scilifelab",
version = "0.2.2",
author = "Science for Life Laboratory",
author_email = "genomics_support@scilifelab.se",
description = "Useful scripts for use at SciLifeLab",
license = "MIT",
scripts = glob.glob('scripts/*.py') + glob.glob('scripts/bcbb_helpers/*.py') + ['scripts/pm'],
install_requires = [
"bcbio-nextgen >= 0.2",
"drmaa >= 0.5",
"sphinx >= 1.1.3",
"couchdb >= 0.8",
"reportlab >= 2.5",
"cement >= 2.0.2",
"mock",
"PIL",
"pyPdf",
"logbook >= 0.4",
# pandas screws up installation; tries to look for local site
# packages and not in virtualenv
#"pandas >= 0.9",
"biopython",
"rst2pdf",
#"psutil",
],
test_suite = 'nose.collector',
packages=find_packages(exclude=['tests']),
package_data = {'scilifelab':[
'data/grf/*',
'data/templates/*.mako',
'data/templates/rst/*',
]}
)
os.system("git rev-parse --short --verify HEAD > ~/.scilifelab_version")
| Set template wildcard to *.mako | Set template wildcard to *.mako
| Python | mit | senthil10/scilifelab,SciLifeLab/scilifelab,SciLifeLab/scilifelab,SciLifeLab/scilifelab,kate-v-stepanova/scilifelab,jun-wan/scilifelab,senthil10/scilifelab,kate-v-stepanova/scilifelab,jun-wan/scilifelab,SciLifeLab/scilifelab,kate-v-stepanova/scilifelab,jun-wan/scilifelab,senthil10/scilifelab,jun-wan/scilifelab,kate-v-stepanova/scilifelab,senthil10/scilifelab | ---
+++
@@ -33,7 +33,7 @@
packages=find_packages(exclude=['tests']),
package_data = {'scilifelab':[
'data/grf/*',
- 'data/templates/*.*',
+ 'data/templates/*.mako',
'data/templates/rst/*',
]}
) |
e211c9cb76a2f20e8b00a88e148850829ad86ec5 | setup.py | setup.py | from distutils.core import setup
from setuptools import setup, find_packages
setup(
name = "django-webmaster-verification",
version = "0.1",
author = "Nicolas Kuttler",
author_email = "pypi@nicolaskuttler.com",
description = "Webmaster tools verification for Django",
long_description = open("README.rst").read(),
license = "BSD",
url = "http://github.com/nkuttler/django-webmaster-verification",
packages = find_packages(),
classifiers = [
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Framework :: Django",
],
)
| try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name = "django-webmaster-verification",
version = "0.1",
author = "Nicolas Kuttler",
author_email = "pypi@nicolaskuttler.com",
description = "Webmaster tools verification for Django",
long_description = open("README.rst").read(),
license = "BSD",
url = "http://github.com/nkuttler/django-webmaster-verification",
packages = ['webmaster_verification'],
classifiers = [
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Framework :: Django",
],
zip_safe = True,
)
| Update according to the django wiki | Update according to the django wiki
https://code.djangoproject.com/wiki/DosAndDontsForApplicationWriters
| Python | bsd-3-clause | nkuttler/django-webmaster-verification,nkuttler/django-webmaster-verification | ---
+++
@@ -1,5 +1,7 @@
-from distutils.core import setup
-from setuptools import setup, find_packages
+try:
+ from setuptools import setup
+except ImportError:
+ from distutils.core import setup
setup(
name = "django-webmaster-verification",
@@ -10,7 +12,7 @@
long_description = open("README.rst").read(),
license = "BSD",
url = "http://github.com/nkuttler/django-webmaster-verification",
- packages = find_packages(),
+ packages = ['webmaster_verification'],
classifiers = [
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
@@ -20,4 +22,5 @@
"Programming Language :: Python",
"Framework :: Django",
],
+ zip_safe = True,
) |
26e30e7f283a9ffb0a9a9a3f1a6f11f11bbab1e1 | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup, find_packages
import os
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README')) as f:
README = f.read()
requires = []
setup(name='python-bitcoinlib',
version='0.2.1',
description='This python library provides an easy interface to the Bitcoin data structures and protocol.',
long_description=README,
classifiers=[
"Programming Language :: Python",
],
url='https://github.com/petertodd/python-bitcoinlib',
keywords='bitcoin',
packages=find_packages(),
zip_safe=False,
install_requires=requires,
test_suite="bitcoin.tests"
)
| #!/usr/bin/env python
from setuptools import setup, find_packages
import os
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README')) as f:
README = f.read()
requires = []
setup(name='python-bitcoinlib',
version='0.2.2-SNAPSHOT',
description='This python library provides an easy interface to the Bitcoin data structures and protocol.',
long_description=README,
classifiers=[
"Programming Language :: Python",
],
url='https://github.com/petertodd/python-bitcoinlib',
keywords='bitcoin',
packages=find_packages(),
zip_safe=False,
install_requires=requires,
test_suite="bitcoin.tests"
)
| Reset version for future development | Reset version for future development
| Python | mit | petertodd/dust-b-gone | ---
+++
@@ -10,7 +10,7 @@
requires = []
setup(name='python-bitcoinlib',
- version='0.2.1',
+ version='0.2.2-SNAPSHOT',
description='This python library provides an easy interface to the Bitcoin data structures and protocol.',
long_description=README,
classifiers=[ |
8a2fa728cc62b61eb83f180fb73784d06482ec2e | setup.py | setup.py | import io
import os
import re
from setuptools import setup, find_packages
def find_version():
file_dir = os.path.dirname(__file__)
with io.open(os.path.join(file_dir, 'auth0', '__init__.py')) as f:
version = re.search(r'^__version__ = [\'"]([^\'"]*)[\'"]', f.read())
if version:
return version.group(1)
else:
raise RuntimeError("Unable to find version string.")
setup(
name='auth0-python',
version=find_version(),
description='Auth0 Python SDK',
author='Auth0',
author_email='support@auth0.com',
license='MIT',
packages=find_packages(),
install_requires=['requests'],
extras_require={'test': ['mock']},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
url='https://github.com/auth0/auth0-python',
)
| import io
import os
import re
from setuptools import setup, find_packages
def find_version():
file_dir = os.path.dirname(__file__)
with io.open(os.path.join(file_dir, 'auth0', '__init__.py')) as f:
version = re.search(r'^__version__ = [\'"]([^\'"]*)[\'"]', f.read())
if version:
return version.group(1)
else:
raise RuntimeError("Unable to find version string.")
setup(
name='auth0-python',
version=find_version(),
description='Auth0 Python SDK',
author='Auth0',
author_email='support@auth0.com',
license='MIT',
packages=find_packages(),
install_requires=['requests'],
extras_require={'test': ['mock']},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
url='https://github.com/auth0/auth0-python',
)
| Update supported versions to match supported CPython versions | Update supported versions to match supported CPython versions
| Python | mit | auth0/auth0-python,auth0/auth0-python | ---
+++
@@ -29,12 +29,13 @@
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
+ 'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
- 'Programming Language :: Python :: 3.2',
- 'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
],
url='https://github.com/auth0/auth0-python',
) |
9c8289f531abfd75a1c378f53ea58136e29e662c | setup.py | setup.py |
from setuptools import setup
setup(
name='exp_sdk',
packages= ['exp_sdk'],
version='1.0.0a2',
description='EXP Python SDK',
author='Scala',
author_email='james.dalessio@scala.com',
url='https://github.com/scalainc/exp-python2-sdk',
download_url='https://github.com/scalainc/exp-python2-sdk/tarball/1.0.0a2',
install_requires=["requests", "socketIO_client"],
license='MIT',
keywords=['scala', 'exp', 'sdk', 'signage'],
classifiers=[
'Programming Language :: Python :: 2'
]
)
|
from setuptools import setup
setup(
name='exp_sdk',
packages= ['exp_sdk'],
version='1.0.0rc1',
description='EXP Python SDK',
author='Scala',
author_email='james.dalessio@scala.com',
url='https://github.com/scalainc/exp-python2-sdk',
download_url='https://github.com/scalainc/exp-python2-sdk/tarball/1.0.0rc1',
install_requires=["requests", "socketIO_client"],
license='MIT',
keywords=['scala', 'exp', 'sdk', 'signage'],
classifiers=[
'Programming Language :: Python :: 2'
]
)
| Move to rc1. Figuring this out. | Move to rc1. Figuring this out.
| Python | mit | ScalaInc/exp-python2-sdk,ScalaInc/exp-python2-sdk | ---
+++
@@ -4,12 +4,12 @@
setup(
name='exp_sdk',
packages= ['exp_sdk'],
- version='1.0.0a2',
+ version='1.0.0rc1',
description='EXP Python SDK',
author='Scala',
author_email='james.dalessio@scala.com',
url='https://github.com/scalainc/exp-python2-sdk',
- download_url='https://github.com/scalainc/exp-python2-sdk/tarball/1.0.0a2',
+ download_url='https://github.com/scalainc/exp-python2-sdk/tarball/1.0.0rc1',
install_requires=["requests", "socketIO_client"],
license='MIT',
keywords=['scala', 'exp', 'sdk', 'signage'], |
8bc2395dd1814f0fa02dc8c48db1cae3f340c6a6 | setup.py | setup.py | from setuptools import setup, find_packages
install_requires = [
'dill==0.2.5',
'easydict==1.6',
'h5py==2.6.0',
'jsonpickle==0.9.3',
'Keras==1.2.0',
'nflgame==1.2.20',
'numpy==1.11.2',
'pandas==0.19.1',
'scikit-learn==0.18.1',
'scipy==0.18.1',
'tensorflow==0.12.0rc1',
'Theano==0.8.2',
'tabulate==0.7.7',
]
with open('README.md', 'r') as f:
readme = f.read()
setup(
name="wincast",
version='0.0.8',
url='https://github.com/kahnjw/wincast',
author_email='jarrod.kahn+wincast@gmail.com',
long_description=readme,
license='MIT',
packages=find_packages(exclude=['tests', 'tests.*']),
install_requires=install_requires,
package_data={
'wincast': ['models/wincast.model.h5', 'models/wincast.scaler.pkl']
}
)
| from setuptools import setup, find_packages
install_requires = [
'dill==0.2.5',
'easydict==1.6',
'h5py==2.6.0',
'jsonpickle==0.9.3',
'Keras==1.2.0',
'nflgame==1.2.20',
'numpy==1.11.2',
'pandas==0.19.1',
'scikit-learn==0.18.1',
'scipy==0.18.1',
'tensorflow==0.12.0rc1',
'Theano==0.8.2',
'tabulate==0.7.7',
]
with open('README.md', 'r') as f:
readme = f.read()
setup(
name="wincast",
version='0.0.8',
url='https://github.com/kahnjw/wincast',
author_email='jarrod.kahn+wincast@gmail.com',
long_description=readme,
license='MIT',
packages=find_packages(exclude=['tests', 'tests.*']),
install_requires=install_requires,
package_data={
'wincast': ['models/wincast.clf.pkl', 'models/wincast.scaler.pkl']
}
)
| Add classifier pkl to package data | Add classifier pkl to package data
| Python | mit | kahnjw/wincast | ---
+++
@@ -30,6 +30,6 @@
packages=find_packages(exclude=['tests', 'tests.*']),
install_requires=install_requires,
package_data={
- 'wincast': ['models/wincast.model.h5', 'models/wincast.scaler.pkl']
+ 'wincast': ['models/wincast.clf.pkl', 'models/wincast.scaler.pkl']
}
) |
ebb742165ada521b7d883d3d228d69edc9b1fee7 | odictliteral.py | odictliteral.py | try:
from collections import OrderedDict
except:
from ordereddict import OrderedDict
try:
from reprlib import recursive_repr
except:
# don't cope with recursive repr calls in py2
def recursive_repr(fillvalue='...'): return (lambda f: f)
try:
from collections.abc import Iterable
except:
Iterable = tuple
__all__ = ["odict"]
__version__ = '1.0.0'
class odictType(type):
syntax_error = SyntaxError("Allowed syntax: odict[<k>: <v>(, <k>: <v>...)]")
def __getitem__(self, keys):
if isinstance(keys, slice):
keys = (keys,)
if not isinstance(keys, Iterable):
raise self.syntax_error
od = self()
for k in keys:
if not isinstance(k, slice) or k.step is not None:
raise self.syntax_error
od[k.start] = k.stop
return od
@recursive_repr(fillvalue="odict[...]")
def odict_repr(self):
if len(self) == 0:
return "odict()"
else:
return "odict[%s]" % (", ".join("%s: %s" % (k,v) for k,v in self.items()),)
odict = odictType(str('odict'), (OrderedDict,), {"__repr__": odict_repr})
| try:
from collections import OrderedDict
except:
from ordereddict import OrderedDict
try:
from reprlib import recursive_repr
except:
# don't cope with recursive repr calls in py2
def recursive_repr(fillvalue='...'): return (lambda f: f)
try:
from collections.abc import Iterable
except:
Iterable = tuple
__all__ = ["odict"]
__version__ = '1.0.0'
class odictType(type):
syntax_error = SyntaxError("Allowed syntax: odict[<k>: <v>(, <k>: <v>...)]")
def __getitem__(self, keys):
if isinstance(keys, slice):
keys = (keys,)
if not isinstance(keys, Iterable):
raise self.syntax_error
od = self()
for k in keys:
if not isinstance(k, slice) or k.step is not None:
raise self.syntax_error
od[k.start] = k.stop
return od
@recursive_repr(fillvalue="odict[...]")
def odict_repr(self):
if len(self) == 0:
return "odict()"
else:
return "odict[%s]" % (", ".join("%r: %r" % (k,v) for k,v in self.items()),)
odict = odictType(str('odict'), (OrderedDict,), {"__repr__": odict_repr})
| Use repr instead of str for odict repr | Use repr instead of str for odict repr | Python | unlicense | ajtowns/odictliteral | ---
+++
@@ -37,6 +37,6 @@
if len(self) == 0:
return "odict()"
else:
- return "odict[%s]" % (", ".join("%s: %s" % (k,v) for k,v in self.items()),)
+ return "odict[%s]" % (", ".join("%r: %r" % (k,v) for k,v in self.items()),)
odict = odictType(str('odict'), (OrderedDict,), {"__repr__": odict_repr}) |
bc65fb47a777c0ef501bb492d8003d01ce22b233 | libpam/utc-time/utc-time.py | libpam/utc-time/utc-time.py | #!/usr/bin/env python
import time
print 'Content-Type: text/javascript'
print ''
print 'var timeskew = new Date().getTime() - ' + str(time.time()*1000) + ';'
| #!/usr/bin/env python
import time
t = time.time()
u = time.gmtime(t)
s = time.strftime('%a, %e %b %Y %T GMT', u)
print 'Content-Type: text/javascript'
print 'Cache-Control: no-cache'
print 'Date: ' + s
print 'Expires: ' + s
print ''
print 'var timeskew = new Date().getTime() - ' + str(t*1000) + ';'
| Disable caching of stale time stamp information. | Disable caching of stale time stamp information.
| Python | apache-2.0 | fargly/google-authenticator,google/google-authenticator,fargly/google-authenticator,fargly/google-authenticator,google/google-authenticator | ---
+++
@@ -1,5 +1,11 @@
#!/usr/bin/env python
import time
+t = time.time()
+u = time.gmtime(t)
+s = time.strftime('%a, %e %b %Y %T GMT', u)
print 'Content-Type: text/javascript'
+print 'Cache-Control: no-cache'
+print 'Date: ' + s
+print 'Expires: ' + s
print ''
-print 'var timeskew = new Date().getTime() - ' + str(time.time()*1000) + ';'
+print 'var timeskew = new Date().getTime() - ' + str(t*1000) + ';' |
5a808dfa9ae8661382fbc00641562f2024931b57 | test/test_packages.py | test/test_packages.py | import pytest
@pytest.mark.parametrize("name", [
("apt-file"),
("apt-transport-https"),
("blktrace"),
("ca-certificates"),
("chromium-browser"),
("cron"),
("curl"),
("diod"),
("docker-ce"),
("fonts-font-awesome"),
("git"),
("gnupg"),
("handbrake"),
("handbrake-cli"),
("haveged"),
("htop"),
("i3"),
("iotop"),
("language-pack-en-base"),
("laptop-mode-tools"),
("nfs-common"),
("ntop"),
("ntp"),
("openssh-client"),
("openssh-server"),
("openssh-sftp-server"),
("openssl"),
("python"),
("python-pip"),
("software-properties-common"),
("suckless-tools"),
("sysstat"),
("tree"),
("vagrant"),
("vim"),
("vim-addon-manager"),
("vim-puppet"),
("vim-syntax-docker"),
("virtualbox"),
("vlc"),
("wget"),
("whois"),
("x264"),
("xfce4-terminal"),
("xfonts-terminus"),
("xinit"),
])
def test_packages(Package, name):
assert Package(name).is_installed
| import pytest
@pytest.mark.parametrize("name", [
("apt-file"),
("apt-transport-https"),
("blktrace"),
("ca-certificates"),
("chromium-browser"),
("cron"),
("curl"),
("diod"),
("docker-ce"),
("fonts-font-awesome"),
("git"),
("gnupg"),
("handbrake"),
("handbrake-cli"),
("haveged"),
("htop"),
("i3"),
("iotop"),
("language-pack-en-base"),
("laptop-mode-tools"),
("nfs-common"),
("ntop"),
("ntp"),
("openssh-client"),
("openssh-server"),
("openssh-sftp-server"),
("openssl"),
("python"),
("python-pip"),
("software-properties-common"),
("suckless-tools"),
("sysstat"),
("tree"),
("vagrant"),
("vim"),
("virtualbox"),
("vlc"),
("wget"),
("whois"),
("x264"),
("xfce4-terminal"),
("xfonts-terminus"),
("xinit"),
])
def test_packages(Package, name):
assert Package(name).is_installed
| Update for vim plugin changes | Update for vim plugin changes
| Python | mit | wicksy/laptop-build,wicksy/laptop-build,wicksy/laptop-build,wicksy/laptop-build | ---
+++
@@ -36,9 +36,6 @@
("tree"),
("vagrant"),
("vim"),
- ("vim-addon-manager"),
- ("vim-puppet"),
- ("vim-syntax-docker"),
("virtualbox"),
("vlc"),
("wget"), |
51413c8cb63bfdc4fba3f0a144459b0082ce2bf2 | tests/basics/list1.py | tests/basics/list1.py | # basic list functionality
x = [1, 2, 3 * 4]
print(x)
x[0] = 4
print(x)
x[1] += -4
print(x)
x.append(5)
print(x)
f = x.append
f(4)
print(x)
x.extend([100, 200])
print(x)
x += [2, 1]
print(x)
print(x[1:])
print(x[:-1])
print(x[2:3])
| # basic list functionality
x = [1, 2, 3 * 4]
print(x)
x[0] = 4
print(x)
x[1] += -4
print(x)
x.append(5)
print(x)
f = x.append
f(4)
print(x)
x.extend([100, 200])
print(x)
x += [2, 1]
print(x)
print(x[1:])
print(x[:-1])
print(x[2:3])
try:
print(x[1.0])
except TypeError:
print("TypeError")
| Add test for implicit float to int conversion (not allowed!) | tests: Add test for implicit float to int conversion (not allowed!)
| Python | mit | pramasoul/micropython,dmazzella/micropython,HenrikSolver/micropython,turbinenreiter/micropython,stonegithubs/micropython,bvernoux/micropython,ernesto-g/micropython,ernesto-g/micropython,blazewicz/micropython,cloudformdesign/micropython,puuu/micropython,slzatz/micropython,omtinez/micropython,SHA2017-badge/micropython-esp32,AriZuu/micropython,HenrikSolver/micropython,ernesto-g/micropython,Vogtinator/micropython,mgyenik/micropython,MrSurly/micropython-esp32,Vogtinator/micropython,skybird6672/micropython,Vogtinator/micropython,jimkmc/micropython,heisewangluo/micropython,redbear/micropython,orionrobots/micropython,ericsnowcurrently/micropython,neilh10/micropython,trezor/micropython,lbattraw/micropython,jimkmc/micropython,praemdonck/micropython,warner83/micropython,noahwilliamsson/micropython,adafruit/circuitpython,feilongfl/micropython,tdautc19841202/micropython,methoxid/micropystat,mpalomer/micropython,neilh10/micropython,xhat/micropython,vitiral/micropython,dhylands/micropython,Timmenem/micropython,emfcamp/micropython,alex-robbins/micropython,aitjcize/micropython,oopy/micropython,chrisdearman/micropython,methoxid/micropystat,MrSurly/micropython,xuxiaoxin/micropython,selste/micropython,adafruit/micropython,martinribelotta/micropython,dinau/micropython,cwyark/micropython,methoxid/micropystat,ericsnowcurrently/micropython,mpalomer/micropython,dmazzella/micropython,paul-xxx/micropython,cnoviello/micropython,TDAbboud/micropython,martinribelotta/micropython,mianos/micropython,neilh10/micropython,lbattraw/micropython,orionrobots/micropython,cnoviello/micropython,ceramos/micropython,Peetz0r/micropython-esp32,neilh10/micropython,redbear/micropython,slzatz/micropython,MrSurly/micropython-esp32,cwyark/micropython,aitjcize/micropython,heisewangluo/micropython,torwag/micropython,mianos/micropython,HenrikSolver/micropython,trezor/micropython,tralamazza/micropython,MrSurly/micropython,ahotam/micropython,orionrobots/micropython,oopy/micropython,alex-robbins/micropython,micropython/micropython-esp32,tdautc19841202/micropython,hiway/micropython,lowRISC/micropython,mianos/micropython,rubencabrera/micropython,orionrobots/micropython,chrisdearman/micropython,Timmenem/micropython,martinribelotta/micropython,PappaPeppar/micropython,ericsnowcurrently/micropython,hiway/micropython,PappaPeppar/micropython,TDAbboud/micropython,EcmaXp/micropython,mgyenik/micropython,matthewelse/micropython,HenrikSolver/micropython,drrk/micropython,cloudformdesign/micropython,hiway/micropython,vriera/micropython,pramasoul/micropython,praemdonck/micropython,PappaPeppar/micropython,mgyenik/micropython,suda/micropython,heisewangluo/micropython,kerneltask/micropython,martinribelotta/micropython,kerneltask/micropython,ceramos/micropython,warner83/micropython,galenhz/micropython,jimkmc/micropython,hosaka/micropython,paul-xxx/micropython,selste/micropython,ryannathans/micropython,MrSurly/micropython-esp32,emfcamp/micropython,jlillest/micropython,ernesto-g/micropython,ruffy91/micropython,turbinenreiter/micropython,pfalcon/micropython,supergis/micropython,redbear/micropython,kostyll/micropython,danicampora/micropython,dmazzella/micropython,danicampora/micropython,vitiral/micropython,henriknelson/micropython,misterdanb/micropython,mianos/micropython,MrSurly/micropython,Timmenem/micropython,SHA2017-badge/micropython-esp32,HenrikSolver/micropython,supergis/micropython,mpalomer/micropython,pfalcon/micropython,trezor/micropython,trezor/micropython,mgyenik/micropython,KISSMonX/micropython,infinnovation/micropython,rubencabrera/micropython,bvernoux/micropython,omtinez/micropython,lbattraw/micropython,blmorris/micropython,orionrobots/micropython,danicampora/micropython,dhylands/micropython,noahchense/micropython,cnoviello/micropython,selste/micropython,feilongfl/micropython,ryannathans/micropython,noahchense/micropython,micropython/micropython-esp32,cnoviello/micropython,SungEun-Steve-Kim/test-mp,puuu/micropython,aitjcize/micropython,chrisdearman/micropython,danicampora/micropython,misterdanb/micropython,tdautc19841202/micropython,hosaka/micropython,noahwilliamsson/micropython,drrk/micropython,matthewelse/micropython,ChuckM/micropython,warner83/micropython,matthewelse/micropython,toolmacher/micropython,blazewicz/micropython,cwyark/micropython,kostyll/micropython,tuc-osg/micropython,deshipu/micropython,AriZuu/micropython,Vogtinator/micropython,turbinenreiter/micropython,mhoffma/micropython,AriZuu/micropython,jlillest/micropython,jmarcelino/pycom-micropython,oopy/micropython,misterdanb/micropython,SHA2017-badge/micropython-esp32,vriera/micropython,pozetroninc/micropython,jmarcelino/pycom-micropython,vriera/micropython,xyb/micropython,cloudformdesign/micropython,suda/micropython,ganshun666/micropython,matthewelse/micropython,lbattraw/micropython,selste/micropython,henriknelson/micropython,vriera/micropython,warner83/micropython,paul-xxx/micropython,alex-march/micropython,alex-robbins/micropython,vitiral/micropython,Peetz0r/micropython-esp32,vitiral/micropython,cloudformdesign/micropython,mhoffma/micropython,emfcamp/micropython,tuc-osg/micropython,mpalomer/micropython,emfcamp/micropython,adafruit/micropython,rubencabrera/micropython,jmarcelino/pycom-micropython,aethaniel/micropython,cwyark/micropython,xuxiaoxin/micropython,EcmaXp/micropython,xyb/micropython,ganshun666/micropython,EcmaXp/micropython,pozetroninc/micropython,aethaniel/micropython,alex-march/micropython,lowRISC/micropython,tralamazza/micropython,dxxb/micropython,torwag/micropython,dhylands/micropython,dxxb/micropython,dmazzella/micropython,selste/micropython,blmorris/micropython,tuc-osg/micropython,adafruit/micropython,misterdanb/micropython,puuu/micropython,ceramos/micropython,galenhz/micropython,turbinenreiter/micropython,dxxb/micropython,stonegithubs/micropython,dhylands/micropython,SungEun-Steve-Kim/test-mp,galenhz/micropython,micropython/micropython-esp32,KISSMonX/micropython,xuxiaoxin/micropython,vitiral/micropython,hosaka/micropython,kerneltask/micropython,methoxid/micropystat,ChuckM/micropython,ruffy91/micropython,dinau/micropython,xuxiaoxin/micropython,xhat/micropython,methoxid/micropystat,adafruit/micropython,cloudformdesign/micropython,swegener/micropython,pramasoul/micropython,lowRISC/micropython,KISSMonX/micropython,ChuckM/micropython,pramasoul/micropython,adafruit/micropython,kerneltask/micropython,KISSMonX/micropython,xyb/micropython,drrk/micropython,alex-march/micropython,adamkh/micropython,tralamazza/micropython,mianos/micropython,ahotam/micropython,ruffy91/micropython,redbear/micropython,noahwilliamsson/micropython,MrSurly/micropython,noahchense/micropython,ryannathans/micropython,torwag/micropython,xhat/micropython,ganshun666/micropython,jimkmc/micropython,hiway/micropython,SungEun-Steve-Kim/test-mp,aitjcize/micropython,stonegithubs/micropython,TDAbboud/micropython,feilongfl/micropython,supergis/micropython,pfalcon/micropython,aethaniel/micropython,omtinez/micropython,henriknelson/micropython,PappaPeppar/micropython,swegener/micropython,tobbad/micropython,adafruit/circuitpython,tobbad/micropython,toolmacher/micropython,infinnovation/micropython,pramasoul/micropython,PappaPeppar/micropython,toolmacher/micropython,noahchense/micropython,mhoffma/micropython,suda/micropython,adamkh/micropython,hosaka/micropython,danicampora/micropython,chrisdearman/micropython,misterdanb/micropython,tdautc19841202/micropython,rubencabrera/micropython,praemdonck/micropython,chrisdearman/micropython,Peetz0r/micropython-esp32,drrk/micropython,adafruit/circuitpython,firstval/micropython,tobbad/micropython,Timmenem/micropython,dinau/micropython,dhylands/micropython,martinribelotta/micropython,paul-xxx/micropython,firstval/micropython,dinau/micropython,tuc-osg/micropython,pfalcon/micropython,noahwilliamsson/micropython,drrk/micropython,jmarcelino/pycom-micropython,mhoffma/micropython,torwag/micropython,kerneltask/micropython,deshipu/micropython,swegener/micropython,xyb/micropython,blazewicz/micropython,praemdonck/micropython,slzatz/micropython,utopiaprince/micropython,slzatz/micropython,supergis/micropython,heisewangluo/micropython,cwyark/micropython,TDAbboud/micropython,infinnovation/micropython,jlillest/micropython,lowRISC/micropython,supergis/micropython,swegener/micropython,ChuckM/micropython,alex-march/micropython,toolmacher/micropython,noahwilliamsson/micropython,lowRISC/micropython,suda/micropython,bvernoux/micropython,deshipu/micropython,xhat/micropython,skybird6672/micropython,redbear/micropython,utopiaprince/micropython,stonegithubs/micropython,pozetroninc/micropython,toolmacher/micropython,AriZuu/micropython,tdautc19841202/micropython,adafruit/circuitpython,infinnovation/micropython,AriZuu/micropython,kostyll/micropython,alex-robbins/micropython,galenhz/micropython,MrSurly/micropython,bvernoux/micropython,dxxb/micropython,pozetroninc/micropython,emfcamp/micropython,lbattraw/micropython,pfalcon/micropython,adafruit/circuitpython,matthewelse/micropython,deshipu/micropython,firstval/micropython,adamkh/micropython,puuu/micropython,KISSMonX/micropython,blazewicz/micropython,mpalomer/micropython,omtinez/micropython,blmorris/micropython,skybird6672/micropython,utopiaprince/micropython,xyb/micropython,aethaniel/micropython,oopy/micropython,cnoviello/micropython,adamkh/micropython,matthewelse/micropython,xhat/micropython,utopiaprince/micropython,Timmenem/micropython,deshipu/micropython,bvernoux/micropython,ryannathans/micropython,skybird6672/micropython,ericsnowcurrently/micropython,SungEun-Steve-Kim/test-mp,kostyll/micropython,tobbad/micropython,Peetz0r/micropython-esp32,ceramos/micropython,jlillest/micropython,SungEun-Steve-Kim/test-mp,blmorris/micropython,SHA2017-badge/micropython-esp32,mgyenik/micropython,jlillest/micropython,TDAbboud/micropython,adafruit/circuitpython,ruffy91/micropython,firstval/micropython,praemdonck/micropython,dinau/micropython,paul-xxx/micropython,MrSurly/micropython-esp32,ceramos/micropython,blmorris/micropython,stonegithubs/micropython,ganshun666/micropython,alex-march/micropython,hiway/micropython,xuxiaoxin/micropython,galenhz/micropython,firstval/micropython,ganshun666/micropython,warner83/micropython,tuc-osg/micropython,SHA2017-badge/micropython-esp32,adamkh/micropython,tobbad/micropython,mhoffma/micropython,torwag/micropython,hosaka/micropython,blazewicz/micropython,ernesto-g/micropython,EcmaXp/micropython,micropython/micropython-esp32,trezor/micropython,ryannathans/micropython,rubencabrera/micropython,turbinenreiter/micropython,jmarcelino/pycom-micropython,EcmaXp/micropython,henriknelson/micropython,micropython/micropython-esp32,noahchense/micropython,pozetroninc/micropython,MrSurly/micropython-esp32,skybird6672/micropython,tralamazza/micropython,Vogtinator/micropython,henriknelson/micropython,ruffy91/micropython,dxxb/micropython,jimkmc/micropython,heisewangluo/micropython,alex-robbins/micropython,ChuckM/micropython,feilongfl/micropython,slzatz/micropython,ahotam/micropython,ahotam/micropython,vriera/micropython,ahotam/micropython,puuu/micropython,oopy/micropython,swegener/micropython,neilh10/micropython,infinnovation/micropython,omtinez/micropython,kostyll/micropython,Peetz0r/micropython-esp32,utopiaprince/micropython,feilongfl/micropython,suda/micropython,aethaniel/micropython,ericsnowcurrently/micropython | ---
+++
@@ -20,3 +20,8 @@
print(x[1:])
print(x[:-1])
print(x[2:3])
+
+try:
+ print(x[1.0])
+except TypeError:
+ print("TypeError") |
ec7bbe8ac8715ea22142680f0d880a7d0b71c687 | paws/request.py | paws/request.py | from urlparse import parse_qs
from utils import cached_property, MultiDict
class Request(object):
def __init__(self, event, context):
self.event = event
self.context = context
@property
def method(self):
return self.event['httpMethod']
@property
def query(self):
return self.event['queryStringParameters']
@cached_property
def post(self):
return MultiDict(parse_qs(self.event.get('body', '') or ''))
@property
def stage(self):
return self.event['stage']
@property
def stageVar(self):
return self.event['stageVariables']
@property
def params(self):
return self.event['pathParameters']
| from Cookie import SimpleCookie
from urlparse import parse_qs
from utils import MultiDict, cached_property
class Request(object):
def __init__(self, event, context):
self.event = event
self.context = context
@property
def method(self):
return self.event['httpMethod']
@property
def query(self):
return self.event['queryStringParameters']
@cached_property
def post(self):
return MultiDict(parse_qs(self.event.get('body', '') or ''))
@cached_property
def cookies(self):
jar = SimpleCookie()
if self.event['headers'].get('Cookie'):
jar.load(self.event['headers']['Cookie'].encode('utf-8'))
return jar
@property
def stage(self):
return self.event['stage']
@property
def stageVar(self):
return self.event['stageVariables']
@property
def params(self):
return self.event['pathParameters']
| Add cookies property to Request | Add cookies property to Request
| Python | bsd-3-clause | funkybob/paws | ---
+++
@@ -1,6 +1,7 @@
+from Cookie import SimpleCookie
from urlparse import parse_qs
-from utils import cached_property, MultiDict
+from utils import MultiDict, cached_property
class Request(object):
@@ -20,6 +21,13 @@
def post(self):
return MultiDict(parse_qs(self.event.get('body', '') or ''))
+ @cached_property
+ def cookies(self):
+ jar = SimpleCookie()
+ if self.event['headers'].get('Cookie'):
+ jar.load(self.event['headers']['Cookie'].encode('utf-8'))
+ return jar
+
@property
def stage(self):
return self.event['stage'] |
423d40b8162ff2763346ddf0a115760c6efb4222 | tests/test_classes.py | tests/test_classes.py | from thinglang import run
def test_class_integration():
assert run("""
thing Person
has text name
has number age
created with name
self.name = name
self.age = 0
does grow_up
self.age = self.age + 1
does say_hello with excitement_level
Output.write("Hello from", self.name, ", who's ", self.age, "and is always up for a fun game of tag.")
thing Program
setup
text name = "yotam"
text wants_to_grow_up = true
#text name = Input.get_line("What is your name?")
#text wants_to_grow_up = Input.get_line("Do you want to grow up?")
Person person = create Person(name)
if wants_to_grow_up
person.grow_up()
person.say_hello()
""").output == """dog is dog"""
| from unittest.mock import patch
import io
from thinglang import run
def test_class_integration():
with patch('sys.stdin', io.StringIO('yotam\n19\n5')):
assert run("""
thing Person
has text name
has number age
setup with name
self.name = name
does say_hello with repeat_count
number i = 0
repeat while i < repeat_count
Output.write("Hello number", i, "from", self.name, "who's", self.age, "years old and is always excited to get some coding done.")
i = i + 1
thing Program
setup
Person person = create Person(Input.get_line("What is your name?"))
number age = Input.get_line("What is your age?") as number
if age
person.age = age
person.say_hello(Input.get_line("How excited are you?") as number)
""").output == "What is your name?\nWhat is your age?\nHow excited are you?\n" +\
"\n".join("Hello number {} from yotam who's 19 years old and is always excited to get some coding done.".format(i) for i in range(5))
| Update class integration test to use new syntax and output assertion | Update class integration test to use new syntax and output assertion
| Python | mit | ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang | ---
+++
@@ -1,34 +1,36 @@
+from unittest.mock import patch
+
+import io
+
from thinglang import run
def test_class_integration():
- assert run("""
+ with patch('sys.stdin', io.StringIO('yotam\n19\n5')):
+ assert run("""
thing Person
has text name
has number age
- created with name
+ setup with name
self.name = name
- self.age = 0
- does grow_up
- self.age = self.age + 1
-
- does say_hello with excitement_level
- Output.write("Hello from", self.name, ", who's ", self.age, "and is always up for a fun game of tag.")
+ does say_hello with repeat_count
+ number i = 0
+ repeat while i < repeat_count
+ Output.write("Hello number", i, "from", self.name, "who's", self.age, "years old and is always excited to get some coding done.")
+ i = i + 1
thing Program
setup
- text name = "yotam"
- text wants_to_grow_up = true
- #text name = Input.get_line("What is your name?")
- #text wants_to_grow_up = Input.get_line("Do you want to grow up?")
- Person person = create Person(name)
+ Person person = create Person(Input.get_line("What is your name?"))
+ number age = Input.get_line("What is your age?") as number
- if wants_to_grow_up
- person.grow_up()
+ if age
+ person.age = age
- person.say_hello()
- """).output == """dog is dog"""
+ person.say_hello(Input.get_line("How excited are you?") as number)
+ """).output == "What is your name?\nWhat is your age?\nHow excited are you?\n" +\
+ "\n".join("Hello number {} from yotam who's 19 years old and is always excited to get some coding done.".format(i) for i in range(5))
|
d825a94007c9ef2d464528572ea99aa2636ea4fa | tests/test_convert.py | tests/test_convert.py | import pytest # type: ignore
from ppb_vector import Vector2
from utils import vector_likes
@pytest.mark.parametrize('vector_like', vector_likes(), ids=lambda x: type(x).__name__) # type: ignore
def test_convert_subclass(vector_like):
class V(Vector2): pass
# test_binop_vectorlike already checks the output value is correct
assert isinstance(V.convert(vector_like), V)
| import pytest # type: ignore
from ppb_vector import Vector2
from utils import vector_likes
@pytest.mark.parametrize('vector_like', vector_likes(), ids=lambda x: type(x).__name__) # type: ignore
def test_convert_subclass(vector_like):
class V(Vector2): pass
vector = V.convert(vector_like)
assert isinstance(vector, V)
assert vector == vector_like
| Test converted vector equals original vector-like | tests/convert: Test converted vector equals original vector-like
The comment mentioned that property was tested by `test_binop_vectorlike`, but
Vector2.convert isn't a binary operator, so it isn't exercised by that test.
| Python | artistic-2.0 | ppb/ppb-vector,ppb/ppb-vector | ---
+++
@@ -7,5 +7,6 @@
def test_convert_subclass(vector_like):
class V(Vector2): pass
- # test_binop_vectorlike already checks the output value is correct
- assert isinstance(V.convert(vector_like), V)
+ vector = V.convert(vector_like)
+ assert isinstance(vector, V)
+ assert vector == vector_like |
f4bc89564828011d2306e16cdc84e053beb7c1d6 | tests/test_logging.py | tests/test_logging.py | from .fixtures import elasticsearch
import pytest
image_flavor = pytest.config.getoption('--image-flavor')
def test_elasticsearch_logs_are_in_docker_logs(elasticsearch):
elasticsearch.assert_in_docker_log('o.e.n.Node')
# eg. elasticsearch1 | [2017-07-04T00:54:22,604][INFO ][o.e.n.Node ] [docker-test-node-1] initializing ...
@pytest.mark.skipif(image_flavor != 'platinum',
reason="x-pack.security not installed in the -{} image.".format(image_flavor))
def test_security_audit_logs_are_in_docker_logs(elasticsearch):
elasticsearch.assert_in_docker_log('x.s.a.l.LoggingAuditTrail')
# eg. elasticsearch1 | [2017-07-04T01:10:19,189][INFO ][o.e.x.s.a.l.LoggingAuditTrail] [transport] [access_granted]
def test_info_level_logs_are_in_docker_logs(elasticsearch):
elasticsearch.assert_in_docker_log('INFO')
def test_no_errors_are_in_docker_logs(elasticsearch):
elasticsearch.assert_not_in_docker_log('ERROR')
| from .fixtures import elasticsearch
import pytest
image_flavor = pytest.config.getoption('--image-flavor')
def test_elasticsearch_logs_are_in_docker_logs(elasticsearch):
elasticsearch.assert_in_docker_log('o.e.n.Node')
# eg. elasticsearch1 | [2017-07-04T00:54:22,604][INFO ][o.e.n.Node ] [docker-test-node-1] initializing ...
@pytest.mark.skipif(image_flavor != 'platinum',
reason="x-pack.security not installed in the -{} image.".format(image_flavor))
def test_security_audit_logs_are_in_docker_logs(elasticsearch):
elasticsearch.assert_in_docker_log('x.s.a.l.LoggingAuditTrail')
# eg. elasticsearch1 | [2017-07-04T01:10:19,189][INFO ][o.e.x.s.a.l.LoggingAuditTrail] [transport] [access_granted]
def test_info_level_logs_are_in_docker_logs(elasticsearch):
elasticsearch.assert_in_docker_log('INFO')
| Remove test that sporadically gives false negatives | Remove test that sporadically gives false negatives
Nothing worse than an unreliable test; remove this test as there can be
errors in the logs that do not necessarily correspond to a broken
image.
Relates #119 | Python | apache-2.0 | jarpy/elasticsearch-docker,jarpy/elasticsearch-docker | ---
+++
@@ -18,7 +18,3 @@
def test_info_level_logs_are_in_docker_logs(elasticsearch):
elasticsearch.assert_in_docker_log('INFO')
-
-
-def test_no_errors_are_in_docker_logs(elasticsearch):
- elasticsearch.assert_not_in_docker_log('ERROR') |
f0157d9523f09d6a1392685f7b13cfac9f6bf6c0 | dad/worker/__init__.py | dad/worker/__init__.py | import click
from flask import Flask, current_app
app = Flask(__name__)
app.config.from_object('dad.worker.settings')
import dad.worker.server # noqa
@click.command()
@click.option('--port', '-p', default=6010)
def run(port):
with app.app_context():
dad.worker.server.register(current_app)
app.run(port=port)
| import os
import yaml
import click
from flask import Flask, current_app
app = Flask(__name__)
app.config.from_object('dad.worker.settings')
if os.environ.get('APP_SETTINGS_YAML'):
config = yaml.safe_load(open(os.environ['APP_SETTINGS_YAML']))
app.config.update(config)
import dad.worker.server # noqa
@click.command()
@click.option('--port', '-p', default=6010)
def run(port):
with app.app_context():
dad.worker.server.register(current_app)
app.run(port=port)
| Allow overriding the config with app settings yaml in the worker process. | Allow overriding the config with app settings yaml in the worker process.
| Python | bsd-3-clause | ionrock/dadd,ionrock/dadd,ionrock/dadd,ionrock/dadd | ---
+++
@@ -1,8 +1,16 @@
+import os
+
+import yaml
import click
from flask import Flask, current_app
app = Flask(__name__)
app.config.from_object('dad.worker.settings')
+
+if os.environ.get('APP_SETTINGS_YAML'):
+ config = yaml.safe_load(open(os.environ['APP_SETTINGS_YAML']))
+ app.config.update(config)
+
import dad.worker.server # noqa
|
aaeabe5d6a987a04b6e78853eb1dfd7d4b85e505 | examples/demo2.py | examples/demo2.py | import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
import logging
import threading
import xmpp2
from xmpp2 import XML
USERNAME = 'yourusername'
PASSWORD = 'yourpassword'
SERVER = 'example.com'
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('xmpp2.xml.handler').setLevel(logging.INFO)
c = xmpp2.Client(SERVER, stream_log_level=xmpp2.LOG_STREAM)
c.connect()
c.auth(USERNAME, password=PASSWORD)
c.write(XML.presence.add(XML.priority.add(1)))
for n in c.gen:
sys.stdout.write(str(n))
| import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
import logging
import threading
import xmpp2
from xmpp2 import XML
USERNAME = 'yourusername'
PASSWORD = 'yourpassword'
SERVER = 'example.com'
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('xmpp2.xml.handler').setLevel(logging.INFO)
c = xmpp2.Client(SERVER, stream_log_level=xmpp2.LOG_STREAM)
c.connect()
c.auth(USERNAME, password=PASSWORD)
c.write(XML.presence.add(XML.priority.add(1)))
for n in c.gen:
sys.stdout.write(str(n) + '\n')
| Add '\n' so it would flush. | Add '\n' so it would flush.
| Python | mit | easies/xmpp2 | ---
+++
@@ -19,4 +19,4 @@
c.write(XML.presence.add(XML.priority.add(1)))
for n in c.gen:
- sys.stdout.write(str(n))
+ sys.stdout.write(str(n) + '\n') |
26b1b92d23eddd3496aa77a97d93bc7745047f24 | dashboard/run_tests.py | dashboard/run_tests.py | #!/usr/bin/python
"""Runs the unit test suite for perf dashboard."""
import argparse
import dev_appserver
import os
import sys
import unittest
_DASHBOARD_PARENT = os.path.join(os.path.dirname(__file__))
_DASHBOARD = os.path.join(_DASHBOARD_PARENT, 'dashboard')
def _GetTests(args):
loader = unittest.TestLoader()
if args.tests:
return loader.loadTestsFromNames(args.tests)
return loader.discover(_DASHBOARD, pattern='*_test.py')
def _FixPath():
dev_appserver.fix_sys_path()
sys.path.append(os.path.dirname(__file__))
# The __init__.py in the dashboard package should add third party
# libraries to the path.
import dashboard # pylint: disable=unused-variable
def main():
_FixPath()
parser = argparse.ArgumentParser(description='Run the test suite.')
parser.add_argument(
'tests', nargs='*', help='Fully-qualified names of tests to run.')
args = parser.parse_args()
runner = unittest.TextTestRunner(verbosity=1)
result = runner.run(_GetTests(args))
if result.wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
| #!/usr/bin/python
"""Runs the unit test suite for perf dashboard."""
import argparse
import dev_appserver
import logging
import os
import sys
import unittest
_DASHBOARD_PARENT = os.path.join(os.path.dirname(__file__))
_DASHBOARD = os.path.join(_DASHBOARD_PARENT, 'dashboard')
def _GetTests(args):
loader = unittest.TestLoader()
if args.tests:
return loader.loadTestsFromNames(args.tests)
return loader.discover(_DASHBOARD, pattern='*_test.py')
def _FixPath():
dev_appserver.fix_sys_path()
sys.path.append(os.path.dirname(__file__))
# The __init__.py in the dashboard package should add third party
# libraries to the path.
import dashboard # pylint: disable=unused-variable
def main():
_FixPath()
parser = argparse.ArgumentParser(description='Run the test suite.')
parser.add_argument(
'tests', nargs='*', help='Fully-qualified names of tests to run.')
args = parser.parse_args()
runner = unittest.TextTestRunner(verbosity=1)
logging.basicConfig(level=logging.CRITICAL)
result = runner.run(_GetTests(args))
if result.wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
| Stop printing logs while running dashboard tests in catapult. | Stop printing logs while running dashboard tests in catapult.
Some of the dashboard tests invoke functions which log errors,
but these are not interesting or indicative of actual errors when
running the test.
By setting the logging level in run_tests.py, this makes it so
no log messages are printed when run_tests.py is run, so the
output is a bit cleaner.
Review URL: https://codereview.chromium.org/1180363002
| Python | bsd-3-clause | catapult-project/catapult,zeptonaut/catapult,catapult-project/catapult-csm,modulexcite/catapult,catapult-project/catapult-csm,danbeam/catapult,sahiljain/catapult,dstockwell/catapult,SummerLW/Perf-Insight-Report,SummerLW/Perf-Insight-Report,sahiljain/catapult,SummerLW/Perf-Insight-Report,benschmaus/catapult,catapult-project/catapult-csm,0x90sled/catapult,catapult-project/catapult,dstockwell/catapult,scottmcmaster/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult,benschmaus/catapult,sahiljain/catapult,0x90sled/catapult,benschmaus/catapult,catapult-project/catapult-csm,SummerLW/Perf-Insight-Report,sahiljain/catapult,modulexcite/catapult,benschmaus/catapult,catapult-project/catapult-csm,zeptonaut/catapult,sahiljain/catapult,catapult-project/catapult-csm,benschmaus/catapult,benschmaus/catapult,catapult-project/catapult,sahiljain/catapult,zeptonaut/catapult,catapult-project/catapult,modulexcite/catapult,0x90sled/catapult,benschmaus/catapult,scottmcmaster/catapult,dstockwell/catapult,catapult-project/catapult,SummerLW/Perf-Insight-Report,danbeam/catapult,danbeam/catapult,catapult-project/catapult,scottmcmaster/catapult,dstockwell/catapult,catapult-project/catapult-csm,danbeam/catapult | ---
+++
@@ -4,6 +4,7 @@
import argparse
import dev_appserver
+import logging
import os
import sys
import unittest
@@ -34,6 +35,7 @@
'tests', nargs='*', help='Fully-qualified names of tests to run.')
args = parser.parse_args()
runner = unittest.TextTestRunner(verbosity=1)
+ logging.basicConfig(level=logging.CRITICAL)
result = runner.run(_GetTests(args))
if result.wasSuccessful():
sys.exit(0) |
bd30e0596a6310962ea0cf2e83a3ccd6eb9cf4ff | dimod/package_info.py | dimod/package_info.py | # Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.8.18'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
| # Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.8.19'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
| Update version 0.8.18 -> 0.8.19 | Update version 0.8.18 -> 0.8.19
Fixes
------
* Fix osx wheels for 10.9 binary compatibility
* `ConnectedComponentComposites` now correctly fixes variables according to vartype | Python | apache-2.0 | dwavesystems/dimod,dwavesystems/dimod | ---
+++
@@ -14,7 +14,7 @@
#
# ================================================================================================
-__version__ = '0.8.18'
+__version__ = '0.8.19'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.' |
da85cfae848df4cee5ccf2cbbbc370848ea172a7 | src/txkube/_memory.py | src/txkube/_memory.py | # Copyright Least Authority Enterprises.
# See LICENSE for details.
"""
An in-memory implementation of the Kubernetes client interface.
"""
from zope.interface import implementer
from twisted.python.url import URL
from twisted.web.resource import Resource
from treq.testing import RequestTraversalAgent
from . import IKubernetes, network_kubernetes
def memory_kubernetes():
"""
Create an in-memory Kubernetes-alike service.
This serves as a places to hold state for stateful Kubernetes interactions
allowed by ``IKubernetesClient``. Only clients created against the same
instance will all share state.
:return IKubernetes: The new Kubernetes-alike service.
"""
return _MemoryKubernetes()
@implementer(IKubernetes)
class _MemoryKubernetes(object):
"""
``_MemoryKubernetes`` maintains state in-memory which approximates
the state of a real Kubernetes deployment sufficiently to expose a
subset of the external Kubernetes API.
"""
def __init__(self):
base_url = URL.fromText(u"https://localhost/")
self._resource = _kubernetes_resource()
self._kubernetes = network_kubernetes(
base_url=base_url,
credentials=None,
agent=RequestTraversalAgent(self._resource),
)
def client(self, *args, **kwargs):
"""
:return IKubernetesClient: A new client which interacts with this
object rather than a real Kubernetes deployment.
"""
return self._kubernetes.client(*args, **kwargs)
def _kubernetes_resource():
return Resource()
| # Copyright Least Authority Enterprises.
# See LICENSE for details.
"""
An in-memory implementation of the Kubernetes client interface.
"""
from zope.interface import implementer
from twisted.python.url import URL
from twisted.web.resource import Resource
from treq.testing import RequestTraversalAgent
from . import IKubernetes, network_kubernetes
def memory_kubernetes():
"""
Create an in-memory Kubernetes-alike service.
This serves as a places to hold state for stateful Kubernetes interactions
allowed by ``IKubernetesClient``. Only clients created against the same
instance will all share state.
:return IKubernetes: The new Kubernetes-alike service.
"""
return _MemoryKubernetes()
@implementer(IKubernetes)
class _MemoryKubernetes(object):
"""
``_MemoryKubernetes`` maintains state in-memory which approximates
the state of a real Kubernetes deployment sufficiently to expose a
subset of the external Kubernetes API.
"""
def __init__(self):
base_url = URL.fromText(u"https://kubernetes.example.invalid./")
self._resource = _kubernetes_resource()
self._kubernetes = network_kubernetes(
base_url=base_url,
credentials=None,
agent=RequestTraversalAgent(self._resource),
)
def client(self, *args, **kwargs):
"""
:return IKubernetesClient: A new client which interacts with this
object rather than a real Kubernetes deployment.
"""
return self._kubernetes.client(*args, **kwargs)
def _kubernetes_resource():
return Resource()
| Use a non-routeable address for this URL. | Use a non-routeable address for this URL.
We do not anticipate ever sending any traffic to this since this is the
in-memory-only implementation.
| Python | mit | LeastAuthority/txkube | ---
+++
@@ -37,7 +37,7 @@
subset of the external Kubernetes API.
"""
def __init__(self):
- base_url = URL.fromText(u"https://localhost/")
+ base_url = URL.fromText(u"https://kubernetes.example.invalid./")
self._resource = _kubernetes_resource()
self._kubernetes = network_kubernetes(
base_url=base_url, |
135d8da492c4ee763727c2df75b20274c7e17cf0 | qiprofile_rest/model/__init__.py | qiprofile_rest/model/__init__.py | """
The qiprofile Mongodb data model.
The model field choices are listed in the preferred display order,
most common to least common.
The data capture client has the following responsibility:
* Validate the data upon input as determined by the model
validation below.
* Resolve conflicts between data capture and the model, e.g. the
default value or validation.
:Note: mongoengine 0.8.7 has the following bug:
* Each mongoengine non-embedded class embedded field must specify
a class by reference rather than name, e.g.::
class SessionDetail(mongoengine.Document):
volumes = fields.ListField(field=mongoengine.EmbeddedDocumentField(Volume))
rather than::
class SessionDetail(mongoengine.Document):
volumes = fields.ListField(field=mongoengine.EmbeddedDocumentField('Volume'))
If the class is referenced by name, then the model is initialized, but
an attempt to save an object results in the following validation error::
Invalid embedded document instance provided
"""
| """
The qiprofile Mongodb data model.
The model field choices are listed in the preferred display order,
most common to least common.
The data capture client has the following responsibility:
* Validate the data upon input as determined by the model
validation documentation.
* Resolve conflicts between data capture and the model, e.g. the
default value or validation.
"""
# Note: mongoengine 0.8.7 has the following bug:
# * Each mongoengine non-embedded class embedded field must specify
# a class by reference rather than name, e.g.::
#
# class SessionDetail(mongoengine.Document):
# volumes = fields.ListField(field=mongoengine.EmbeddedDocumentField(Volume))
#
# rather than::
#
# class SessionDetail(mongoengine.Document):
# volumes = fields.ListField(field=mongoengine.EmbeddedDocumentField('Volume'))
#
# If the class is referenced by name, then the model is initialized, but
# an attempt to save an object results in the following validation error::
#
# Invalid embedded document instance provided
| Make implementation node a comment rather than public documentation. | Make implementation node a comment rather than public documentation.
| Python | bsd-2-clause | ohsu-qin/qirest,ohsu-qin/qiprofile-rest | ---
+++
@@ -6,26 +6,26 @@
The data capture client has the following responsibility:
-* Validate the data upon input as determined by the model
- validation below.
+* Validate the data upon input as determined by the model
+ validation documentation.
* Resolve conflicts between data capture and the model, e.g. the
- default value or validation.
+ default value or validation.
+"""
-:Note: mongoengine 0.8.7 has the following bug:
- * Each mongoengine non-embedded class embedded field must specify
- a class by reference rather than name, e.g.::
-
- class SessionDetail(mongoengine.Document):
- volumes = fields.ListField(field=mongoengine.EmbeddedDocumentField(Volume))
-
- rather than::
-
- class SessionDetail(mongoengine.Document):
- volumes = fields.ListField(field=mongoengine.EmbeddedDocumentField('Volume'))
-
- If the class is referenced by name, then the model is initialized, but
- an attempt to save an object results in the following validation error::
-
- Invalid embedded document instance provided
-"""
+# Note: mongoengine 0.8.7 has the following bug:
+# * Each mongoengine non-embedded class embedded field must specify
+# a class by reference rather than name, e.g.::
+#
+# class SessionDetail(mongoengine.Document):
+# volumes = fields.ListField(field=mongoengine.EmbeddedDocumentField(Volume))
+#
+# rather than::
+#
+# class SessionDetail(mongoengine.Document):
+# volumes = fields.ListField(field=mongoengine.EmbeddedDocumentField('Volume'))
+#
+# If the class is referenced by name, then the model is initialized, but
+# an attempt to save an object results in the following validation error::
+#
+# Invalid embedded document instance provided |
888fd7120f2a3a1cd5ec73da16705c27cda049fc | readthedocs/settings/postgres.py | readthedocs/settings/postgres.py | from .base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'docs',
'USER': 'postgres', # Not used with sqlite3.
'PASSWORD': '',
'HOST': 'golem',
'PORT': '',
}
}
DEBUG = False
TEMPLATE_DEBUG = False
CELERY_ALWAYS_EAGER = False
MEDIA_URL = 'http://media.readthedocs.org/'
ADMIN_MEDIA_PREFIX = MEDIA_URL + 'admin/'
CACHE_BACKEND = 'memcached://localhost:11211/'
SESSION_ENGINE = "django.contrib.sessions.backends.cached_db"
HAYSTACK_SEARCH_ENGINE = 'solr'
HAYSTACK_SOLR_URL = 'http://odin:8983/solr'
try:
from local_settings import *
except:
pass
| from .base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'docs',
'USER': 'postgres', # Not used with sqlite3.
'PASSWORD': '',
'HOST': 'golem',
'PORT': '',
}
}
DEBUG = False
TEMPLATE_DEBUG = False
CELERY_ALWAYS_EAGER = False
MEDIA_URL = 'http://media.readthedocs.org/'
ADMIN_MEDIA_PREFIX = MEDIA_URL + 'admin/'
CACHE_BACKEND = 'memcached://localhost:11211/'
#SESSION_ENGINE = "django.contrib.sessions.backends.cached_db"
HAYSTACK_SEARCH_ENGINE = 'solr'
HAYSTACK_SOLR_URL = 'http://odin:8983/solr'
try:
from local_settings import *
except:
pass
| Kill cache sessions for now. | Kill cache sessions for now. | Python | mit | asampat3090/readthedocs.org,KamranMackey/readthedocs.org,sils1297/readthedocs.org,kdkeyser/readthedocs.org,LukasBoersma/readthedocs.org,mrshoki/readthedocs.org,Carreau/readthedocs.org,fujita-shintaro/readthedocs.org,sid-kap/readthedocs.org,sunnyzwh/readthedocs.org,kenshinthebattosai/readthedocs.org,sunnyzwh/readthedocs.org,davidfischer/readthedocs.org,CedarLogic/readthedocs.org,raven47git/readthedocs.org,ojii/readthedocs.org,safwanrahman/readthedocs.org,safwanrahman/readthedocs.org,Carreau/readthedocs.org,rtfd/readthedocs.org,singingwolfboy/readthedocs.org,GovReady/readthedocs.org,titiushko/readthedocs.org,VishvajitP/readthedocs.org,sunnyzwh/readthedocs.org,soulshake/readthedocs.org,espdev/readthedocs.org,johncosta/private-readthedocs.org,nyergler/pythonslides,SteveViss/readthedocs.org,fujita-shintaro/readthedocs.org,techtonik/readthedocs.org,nikolas/readthedocs.org,dirn/readthedocs.org,LukasBoersma/readthedocs.org,CedarLogic/readthedocs.org,agjohnson/readthedocs.org,takluyver/readthedocs.org,KamranMackey/readthedocs.org,kenwang76/readthedocs.org,GovReady/readthedocs.org,clarkperkins/readthedocs.org,sid-kap/readthedocs.org,titiushko/readthedocs.org,sils1297/readthedocs.org,agjohnson/readthedocs.org,mrshoki/readthedocs.org,alex/readthedocs.org,kenwang76/readthedocs.org,sid-kap/readthedocs.org,tddv/readthedocs.org,wijerasa/readthedocs.org,raven47git/readthedocs.org,attakei/readthedocs-oauth,LukasBoersma/readthedocs.org,d0ugal/readthedocs.org,stevepiercy/readthedocs.org,emawind84/readthedocs.org,titiushko/readthedocs.org,alex/readthedocs.org,CedarLogic/readthedocs.org,soulshake/readthedocs.org,cgourlay/readthedocs.org,SteveViss/readthedocs.org,sils1297/readthedocs.org,wanghaven/readthedocs.org,CedarLogic/readthedocs.org,emawind84/readthedocs.org,attakei/readthedocs-oauth,safwanrahman/readthedocs.org,asampat3090/readthedocs.org,gjtorikian/readthedocs.org,michaelmcandrew/readthedocs.org,royalwang/readthedocs.org,michaelmcandrew/readthedocs.org,pombredanne/readthedocs.org,cgourlay/readthedocs.org,Tazer/readthedocs.org,VishvajitP/readthedocs.org,raven47git/readthedocs.org,dirn/readthedocs.org,Carreau/readthedocs.org,mrshoki/readthedocs.org,gjtorikian/readthedocs.org,sid-kap/readthedocs.org,kdkeyser/readthedocs.org,ojii/readthedocs.org,titiushko/readthedocs.org,istresearch/readthedocs.org,d0ugal/readthedocs.org,sunnyzwh/readthedocs.org,espdev/readthedocs.org,asampat3090/readthedocs.org,fujita-shintaro/readthedocs.org,laplaceliu/readthedocs.org,rtfd/readthedocs.org,atsuyim/readthedocs.org,michaelmcandrew/readthedocs.org,davidfischer/readthedocs.org,kdkeyser/readthedocs.org,kenwang76/readthedocs.org,agjohnson/readthedocs.org,gjtorikian/readthedocs.org,kenshinthebattosai/readthedocs.org,hach-que/readthedocs.org,fujita-shintaro/readthedocs.org,GovReady/readthedocs.org,d0ugal/readthedocs.org,soulshake/readthedocs.org,rtfd/readthedocs.org,istresearch/readthedocs.org,wanghaven/readthedocs.org,Carreau/readthedocs.org,singingwolfboy/readthedocs.org,dirn/readthedocs.org,attakei/readthedocs-oauth,SteveViss/readthedocs.org,Tazer/readthedocs.org,nyergler/pythonslides,wijerasa/readthedocs.org,clarkperkins/readthedocs.org,davidfischer/readthedocs.org,istresearch/readthedocs.org,takluyver/readthedocs.org,sils1297/readthedocs.org,kenshinthebattosai/readthedocs.org,alex/readthedocs.org,clarkperkins/readthedocs.org,agjohnson/readthedocs.org,takluyver/readthedocs.org,johncosta/private-readthedocs.org,singingwolfboy/readthedocs.org,GovReady/readthedocs.org,attakei/readthedocs-oauth,stevepiercy/readthedocs.org,techtonik/readthedocs.org,laplaceliu/readthedocs.org,Tazer/readthedocs.org,clarkperkins/readthedocs.org,nyergler/pythonslides,michaelmcandrew/readthedocs.org,espdev/readthedocs.org,wijerasa/readthedocs.org,soulshake/readthedocs.org,jerel/readthedocs.org,tddv/readthedocs.org,pombredanne/readthedocs.org,emawind84/readthedocs.org,techtonik/readthedocs.org,SteveViss/readthedocs.org,jerel/readthedocs.org,rtfd/readthedocs.org,atsuyim/readthedocs.org,safwanrahman/readthedocs.org,royalwang/readthedocs.org,kdkeyser/readthedocs.org,mhils/readthedocs.org,LukasBoersma/readthedocs.org,Tazer/readthedocs.org,atsuyim/readthedocs.org,wanghaven/readthedocs.org,mhils/readthedocs.org,royalwang/readthedocs.org,hach-que/readthedocs.org,gjtorikian/readthedocs.org,istresearch/readthedocs.org,d0ugal/readthedocs.org,nikolas/readthedocs.org,kenshinthebattosai/readthedocs.org,VishvajitP/readthedocs.org,kenwang76/readthedocs.org,nikolas/readthedocs.org,dirn/readthedocs.org,nikolas/readthedocs.org,stevepiercy/readthedocs.org,emawind84/readthedocs.org,ojii/readthedocs.org,espdev/readthedocs.org,tddv/readthedocs.org,johncosta/private-readthedocs.org,cgourlay/readthedocs.org,davidfischer/readthedocs.org,royalwang/readthedocs.org,atsuyim/readthedocs.org,hach-que/readthedocs.org,cgourlay/readthedocs.org,VishvajitP/readthedocs.org,laplaceliu/readthedocs.org,nyergler/pythonslides,stevepiercy/readthedocs.org,KamranMackey/readthedocs.org,alex/readthedocs.org,takluyver/readthedocs.org,techtonik/readthedocs.org,mhils/readthedocs.org,pombredanne/readthedocs.org,hach-que/readthedocs.org,singingwolfboy/readthedocs.org,asampat3090/readthedocs.org,laplaceliu/readthedocs.org,mhils/readthedocs.org,KamranMackey/readthedocs.org,jerel/readthedocs.org,ojii/readthedocs.org,jerel/readthedocs.org,mrshoki/readthedocs.org,espdev/readthedocs.org,raven47git/readthedocs.org,wanghaven/readthedocs.org,wijerasa/readthedocs.org | ---
+++
@@ -18,7 +18,7 @@
MEDIA_URL = 'http://media.readthedocs.org/'
ADMIN_MEDIA_PREFIX = MEDIA_URL + 'admin/'
CACHE_BACKEND = 'memcached://localhost:11211/'
-SESSION_ENGINE = "django.contrib.sessions.backends.cached_db"
+#SESSION_ENGINE = "django.contrib.sessions.backends.cached_db"
HAYSTACK_SEARCH_ENGINE = 'solr'
HAYSTACK_SOLR_URL = 'http://odin:8983/solr' |
6d2172a72ca3fc5de628c49bce9c0cbf191a9a61 | submitter/__main__.py | submitter/__main__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import logging
import sys
from datetime import datetime
from .config import Config
from .submit import submit
c = Config(os.environ)
if c.verbose:
level=logging.DEBUG
else:
level=logging.INFO
logging.basicConfig(format='%(message)s', level=level)
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("urllib3").setLevel(logging.WARNING)
if not c.is_valid():
logging.error('Invalid configuration. Missing the following environment variables:')
for var in c.missing():
logging.error(" " + var)
sys.exit(1)
start = datetime.utcnow()
result = submit(c)
finish = datetime.utcnow()
pattern = 'Submitted {asset_uploaded} / {asset_total} assets and ' \
'{envelope_uploaded} / {envelope_total} envelopes in {duration}.'
summary = pattern.format(
asset_uploaded=result.asset_result.uploaded,
asset_total=len(result.asset_result.asset_set),
envelope_uploaded=result.envelope_result.uploaded,
envelope_total=len(result.envelope_result.envelope_set),
duration=finish - start
)
logging.info(summary)
sys.exit(1)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import logging
import sys
from datetime import datetime
from .config import Config
from .submit import submit, SUCCESS, NOOP
c = Config(os.environ)
if c.verbose:
level=logging.DEBUG
else:
level=logging.INFO
logging.basicConfig(format='%(message)s', level=level)
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("urllib3").setLevel(logging.WARNING)
if not c.is_valid():
logging.error('Invalid configuration. Missing the following environment variables:')
for var in c.missing():
logging.error(" " + var)
sys.exit(1)
start = datetime.utcnow()
result = submit(c)
finish = datetime.utcnow()
pattern = 'Submitted {asset_uploaded} / {asset_total} assets and ' \
'{envelope_uploaded} / {envelope_total} envelopes in {duration}.'
summary = pattern.format(
asset_uploaded=result.asset_result.uploaded,
asset_total=len(result.asset_result.asset_set),
envelope_uploaded=result.envelope_result.uploaded,
envelope_total=len(result.envelope_result.envelope_set),
duration=finish - start
)
logging.info(summary)
if result.state is SUCCESS:
sys.exit(0)
elif result.state is NOOP:
# Signal to the Strider plugin that we did nothing.
sys.exit(2)
else:
# FAILURE
logging.error('Failed to upload {} envelopes.'.format(result.envelope_result.failed))
sys.exit(1)
| Exit with an appropriate status. | Exit with an appropriate status.
| Python | apache-2.0 | deconst/submitter,deconst/submitter | ---
+++
@@ -7,7 +7,7 @@
from datetime import datetime
from .config import Config
-from .submit import submit
+from .submit import submit, SUCCESS, NOOP
c = Config(os.environ)
@@ -40,4 +40,12 @@
)
logging.info(summary)
-sys.exit(1)
+if result.state is SUCCESS:
+ sys.exit(0)
+elif result.state is NOOP:
+ # Signal to the Strider plugin that we did nothing.
+ sys.exit(2)
+else:
+ # FAILURE
+ logging.error('Failed to upload {} envelopes.'.format(result.envelope_result.failed))
+ sys.exit(1) |
d0bd57cbdf8ba451e12f7e6b2574cd77132f0f3b | kboard/board/forms.py | kboard/board/forms.py | from django import forms
from django_summernote.widgets import SummernoteWidget
from django.core.exceptions import NON_FIELD_ERRORS
from .models import Post
class PostForm(forms.ModelForm):
class Meta:
model = Post
fields = ('title', 'content', 'file')
widgets = {
'title': forms.TextInput(attrs={'id': 'id_post_title', 'class': 'form-control', 'name': 'post_title_text', 'placeholder': 'Insert Title'}),
'content': SummernoteWidget(),
}
def __init__(self, *args, **kwargs):
super(PostForm, self).__init__(*args, **kwargs)
self.fields['file'].required = False
| from django import forms
from django.forms.utils import ErrorList
from django_summernote.widgets import SummernoteWidget
from .models import Post
EMPTY_TITLE_ERROR = "제목을 입력하세요"
class DivErrorList(ErrorList):
def __str__(self):
return self.as_divs()
def as_divs(self):
if not self:
return ''
return '<div class="form-group has-error">%s</div>' % ''.join(['<div class="help-block">%s</div>' % e for e in self])
class PostForm(forms.ModelForm):
class Meta:
model = Post
fields = ('title', 'content', 'file')
widgets = {
'title': forms.TextInput(attrs={'id': 'id_post_title', 'class': 'form-control', 'name': 'post_title_text', 'placeholder': 'Insert Title'}),
'content': SummernoteWidget(),
}
error_messages = {
'title': {'required': EMPTY_TITLE_ERROR}
}
def __init__(self, *args, **kwargs):
kwargs_new = {'error_class': DivErrorList}
kwargs_new.update(kwargs)
super(PostForm, self).__init__(*args, **kwargs_new)
self.fields['file'].required = False
| Customize the error list format in modelform | Customize the error list format in modelform
| Python | mit | cjh5414/kboard,darjeeling/k-board,cjh5414/kboard,hyesun03/k-board,guswnsxodlf/k-board,kboard/kboard,guswnsxodlf/k-board,kboard/kboard,hyesun03/k-board,cjh5414/kboard,guswnsxodlf/k-board,kboard/kboard,hyesun03/k-board | ---
+++
@@ -1,8 +1,20 @@
from django import forms
+from django.forms.utils import ErrorList
from django_summernote.widgets import SummernoteWidget
-from django.core.exceptions import NON_FIELD_ERRORS
from .models import Post
+
+EMPTY_TITLE_ERROR = "제목을 입력하세요"
+
+
+class DivErrorList(ErrorList):
+ def __str__(self):
+ return self.as_divs()
+
+ def as_divs(self):
+ if not self:
+ return ''
+ return '<div class="form-group has-error">%s</div>' % ''.join(['<div class="help-block">%s</div>' % e for e in self])
class PostForm(forms.ModelForm):
@@ -13,7 +25,12 @@
'title': forms.TextInput(attrs={'id': 'id_post_title', 'class': 'form-control', 'name': 'post_title_text', 'placeholder': 'Insert Title'}),
'content': SummernoteWidget(),
}
+ error_messages = {
+ 'title': {'required': EMPTY_TITLE_ERROR}
+ }
def __init__(self, *args, **kwargs):
- super(PostForm, self).__init__(*args, **kwargs)
+ kwargs_new = {'error_class': DivErrorList}
+ kwargs_new.update(kwargs)
+ super(PostForm, self).__init__(*args, **kwargs_new)
self.fields['file'].required = False |
2f29db7bd30b3db40f6577a4718623f6cd97a5cf | search/sorting.py | search/sorting.py |
def sort_by_property(prop):
def _sort_by_property(items):
return sorted(items,
key=lambda item: getattr(item, prop),
reverse=True)
return _sort_by_property
sort_by_popularity = sort_by_property('popularity')
|
def sort_by_property(prop, reverse=False):
def _sort_by_property(items):
return sorted(items,
key=lambda item: getattr(item, prop),
reverse=reverse)
return _sort_by_property
sort_by_popularity = sort_by_property('popularity', reverse=True)
| Make it possible to control reverse | Make it possible to control reverse | Python | mit | vanng822/geosearch,vanng822/geosearch,vanng822/geosearch | ---
+++
@@ -1,10 +1,10 @@
-def sort_by_property(prop):
+def sort_by_property(prop, reverse=False):
def _sort_by_property(items):
return sorted(items,
key=lambda item: getattr(item, prop),
- reverse=True)
+ reverse=reverse)
return _sort_by_property
-sort_by_popularity = sort_by_property('popularity')
+sort_by_popularity = sort_by_property('popularity', reverse=True) |
9797c47fea8f5f690ed3989142b0f7e508e13fa0 | pskb_website/cache.py | pskb_website/cache.py | """
Caching utilities
"""
import urlparse
from . import app
url = None
redis_obj = None
try:
import redis
except ImportError:
app.logger.warning('No caching available, missing redis module')
else:
try:
url = urlparse.urlparse(app.config['REDISCLOUD_URL'])
except KeyError:
app.logger.warning('No caching available, missing REDISCLOUD_URL env var')
else:
redis_obj = redis.Redis(host=url.hostname, port=url.port,
password=url.password)
def read_article(path, branch):
"""
Look for article pointed to by given path and branch in cache
:param path: Short path to article not including repo information
:param branch: Name of branch article belongs to
:returns: JSON representation of article or None if not found in cache
"""
if redis_obj is None:
return None
return redis_obj.get((path, branch))
def save_article(article):
"""
Save article JSON in cache
:param article: model.article.Article object
:returns: None
"""
if redis_obj is None:
return
redis_obj.set((article.path, article.branch), article.to_json())
| """
Caching utilities
"""
import functools
import urlparse
from . import app
url = None
redis_obj = None
try:
import redis
except ImportError:
app.logger.warning('No caching available, missing redis module')
else:
try:
url = urlparse.urlparse(app.config['REDISCLOUD_URL'])
except KeyError:
app.logger.warning('No caching available, missing REDISCLOUD_URL env var')
else:
redis_obj = redis.Redis(host=url.hostname, port=url.port,
password=url.password)
def verify_redis_instance(func):
"""
Decorator to verify redis instance exists and return None if missing redis
"""
@functools.wraps(func)
def _wrapper(*args, **kwargs):
if redis_obj is None:
return None
return func(*args, **kwargs)
return _wrapper
@verify_redis_instance
def read_article(path, branch):
"""
Look for article pointed to by given path and branch in cache
:param path: Short path to article not including repo information
:param branch: Name of branch article belongs to
:returns: JSON representation of article or None if not found in cache
"""
return redis_obj.get((path, branch))
@verify_redis_instance
def save_article(article):
"""
Save article JSON in cache
:param article: model.article.Article object
:returns: None
"""
redis_obj.set((article.path, article.branch), article.to_json())
| Add small decorator to check for valid redis instance | Add small decorator to check for valid redis instance
| Python | agpl-3.0 | paulocheque/guides-cms,pluralsight/guides-cms,pluralsight/guides-cms,pluralsight/guides-cms,paulocheque/guides-cms,paulocheque/guides-cms | ---
+++
@@ -2,6 +2,7 @@
Caching utilities
"""
+import functools
import urlparse
from . import app
@@ -23,6 +24,22 @@
password=url.password)
+def verify_redis_instance(func):
+ """
+ Decorator to verify redis instance exists and return None if missing redis
+ """
+
+ @functools.wraps(func)
+ def _wrapper(*args, **kwargs):
+ if redis_obj is None:
+ return None
+
+ return func(*args, **kwargs)
+
+ return _wrapper
+
+
+@verify_redis_instance
def read_article(path, branch):
"""
Look for article pointed to by given path and branch in cache
@@ -32,12 +49,10 @@
:returns: JSON representation of article or None if not found in cache
"""
- if redis_obj is None:
- return None
-
return redis_obj.get((path, branch))
+@verify_redis_instance
def save_article(article):
"""
Save article JSON in cache
@@ -46,7 +61,4 @@
:returns: None
"""
- if redis_obj is None:
- return
-
redis_obj.set((article.path, article.branch), article.to_json()) |
fc8c7a62b737e4f291250c4d45bf34ae944ef6da | sweettooth/upload/urls.py | sweettooth/upload/urls.py |
from django.conf.urls.defaults import patterns, url
slug_charset = "[a-zA-Z0-9-_]"
urlpatterns = patterns('upload',
url(r'^$', 'views.upload_file', dict(slug=None), name='upload-file'),
url(r'(?P<slug>%s+)/$' % (slug_charset,), 'views.upload_file', name='upload-file'),
url(r'edit-data/$', 'views.upload_edit_data', name='upload-edit-data'),
)
|
from django.conf.urls.defaults import patterns, url
slug_charset = "[a-zA-Z0-9-_]"
urlpatterns = patterns('upload',
url(r'^$', 'views.upload_file', dict(slug=None), name='upload-file'),
url(r'new-version/(?P<slug>%s+)/$' % (slug_charset,), 'views.upload_file', name='upload-file'),
url(r'edit-data/$', 'views.upload_edit_data', name='upload-edit-data'),
)
| Adjust URL for new version upload, was competing with 'edit-data'. | Adjust URL for new version upload, was competing with 'edit-data'.
| Python | agpl-3.0 | magcius/sweettooth,GNOME/extensions-web,GNOME/extensions-web,GNOME/extensions-web,magcius/sweettooth,GNOME/extensions-web | ---
+++
@@ -5,6 +5,6 @@
urlpatterns = patterns('upload',
url(r'^$', 'views.upload_file', dict(slug=None), name='upload-file'),
- url(r'(?P<slug>%s+)/$' % (slug_charset,), 'views.upload_file', name='upload-file'),
+ url(r'new-version/(?P<slug>%s+)/$' % (slug_charset,), 'views.upload_file', name='upload-file'),
url(r'edit-data/$', 'views.upload_edit_data', name='upload-edit-data'),
) |
ada3e5dca1e777187a0933bd7b83fcb38c4faf66 | nightreads/user_manager/forms.py | nightreads/user_manager/forms.py | from django import forms
class SubscribeForm(forms.Form):
email = forms.EmailField()
tags = forms.CharField()
def clean(self):
tags = self.cleaned_data['tags'].split(',')
self.cleaned_data['tags'] = [t.strip().lower() for t in tags]
return self.cleaned_data
class UnsubscribeForm(forms.Form):
email = forms.EmailField()
| from django import forms
class SubscribeForm(forms.Form):
email = forms.EmailField()
tags = forms.CharField()
def clean_tags(self):
tags = self.cleaned_data['tags'].split(',')
return [t.strip().lower() for t in tags]
class UnsubscribeForm(forms.Form):
email = forms.EmailField()
| Use `clean_tags` instead of `clean` | Use `clean_tags` instead of `clean`
| Python | mit | avinassh/nightreads,avinassh/nightreads | ---
+++
@@ -5,10 +5,9 @@
email = forms.EmailField()
tags = forms.CharField()
- def clean(self):
+ def clean_tags(self):
tags = self.cleaned_data['tags'].split(',')
- self.cleaned_data['tags'] = [t.strip().lower() for t in tags]
- return self.cleaned_data
+ return [t.strip().lower() for t in tags]
class UnsubscribeForm(forms.Form): |
d4de2b2ff0cb7ba6ad4eed7fe2c0b70801e3c057 | juriscraper/opinions/united_states/state/massappct128.py | juriscraper/opinions/united_states/state/massappct128.py | """
Scraper for Massachusetts Appeals Court
CourtID: massapp
Court Short Name: MS
Author: William Palin
Court Contact: SJCReporter@sjc.state.ma.us (617) 557-1030
Reviewer:
Date: 2020-01-17
"""
from juriscraper.opinions.united_states.state import mass
class Site(mass.Site):
def __init__(self, *args, **kwargs):
super(Site, self).__init__(*args, **kwargs)
self.url = "https://www.mass.gov/service-details/new-opinions"
self.court_id = self.__module__
self.court_identifier = "AC"
self.set_local_variables()
| """
Scraper for Massachusetts Appeals Court
CourtID: massapp
Court Short Name: MS
Author: William Palin
Court Contact: SJCReporter@sjc.state.ma.us (617) 557-1030
Reviewer:
Date: 2020-01-17
"""
from juriscraper.opinions.united_states.state import mass
class Site(mass.Site):
def __init__(self, *args, **kwargs):
super(Site, self).__init__(*args, **kwargs)
self.url = "https://128archive.com"
self.court_id = self.__module__
self.court_identifier = "AC"
self.set_local_variables()
| Change url to 128archive.com website. | fix(mass128): Change url to 128archive.com website.
| Python | bsd-2-clause | freelawproject/juriscraper,freelawproject/juriscraper | ---
+++
@@ -14,7 +14,7 @@
class Site(mass.Site):
def __init__(self, *args, **kwargs):
super(Site, self).__init__(*args, **kwargs)
- self.url = "https://www.mass.gov/service-details/new-opinions"
+ self.url = "https://128archive.com"
self.court_id = self.__module__
self.court_identifier = "AC"
self.set_local_variables() |
3f4ef89512da6a3e89f27121446ec59773869017 | setuptools/tests/test_setuptools.py | setuptools/tests/test_setuptools.py | import os
import pytest
import setuptools
@pytest.fixture
def example_source(tmpdir):
tmpdir.mkdir('foo')
(tmpdir / 'foo/bar.py').write('')
(tmpdir / 'readme.txt').write('')
return tmpdir
def test_findall(example_source):
found = list(setuptools.findall(str(example_source)))
expected = ['readme.txt', 'foo/bar.py']
expected = [example_source.join(fn) for fn in expected]
assert found == expected
def test_findall_curdir(example_source):
with example_source.as_cwd():
found = list(setuptools.findall())
expected = ['readme.txt', 'foo/bar.py']
assert found == expected
@pytest.fixture
def can_symlink(tmpdir):
"""
Skip if cannot create a symbolic link
"""
link_fn = 'link'
target_fn = 'target'
try:
os.symlink(target_fn, link_fn)
except (OSError, NotImplementedError, AttributeError):
pytest.skip("Cannot create symbolic links")
os.remove(link_fn)
def test_findall_missing_symlink(tmpdir, can_symlink):
with tmpdir.as_cwd():
os.symlink('foo', 'bar')
found = list(setuptools.findall())
assert found == []
| import os
import pytest
import setuptools
@pytest.fixture
def example_source(tmpdir):
tmpdir.mkdir('foo')
(tmpdir / 'foo/bar.py').write('')
(tmpdir / 'readme.txt').write('')
return tmpdir
def test_findall(example_source):
found = list(setuptools.findall(str(example_source)))
expected = ['readme.txt', 'foo/bar.py']
expected = [example_source.join(fn) for fn in expected]
assert found == expected
def test_findall_curdir(example_source):
with example_source.as_cwd():
found = list(setuptools.findall())
expected = ['readme.txt', os.path.join('foo', 'bar.py')]
assert found == expected
@pytest.fixture
def can_symlink(tmpdir):
"""
Skip if cannot create a symbolic link
"""
link_fn = 'link'
target_fn = 'target'
try:
os.symlink(target_fn, link_fn)
except (OSError, NotImplementedError, AttributeError):
pytest.skip("Cannot create symbolic links")
os.remove(link_fn)
def test_findall_missing_symlink(tmpdir, can_symlink):
with tmpdir.as_cwd():
os.symlink('foo', 'bar')
found = list(setuptools.findall())
assert found == []
| Fix failing test on Windows due to path separator | Fix failing test on Windows due to path separator
| Python | mit | pypa/setuptools,pypa/setuptools,pypa/setuptools | ---
+++
@@ -23,7 +23,7 @@
def test_findall_curdir(example_source):
with example_source.as_cwd():
found = list(setuptools.findall())
- expected = ['readme.txt', 'foo/bar.py']
+ expected = ['readme.txt', os.path.join('foo', 'bar.py')]
assert found == expected
|
67469ca12b6145eed4051bd85c928f88d4d13791 | get_merged_prs_since_tag.py | get_merged_prs_since_tag.py | #!/usr/bin/env python3
"""Get a markdown-formatted list of PRs merged since a certain tag."""
import github_tools
Repo = github_tools.get_repo()
# TODO: implement getting date from repo tag./
tagname = '0.8.1'
tags = Repo.get_tags()
for t in tags:
if t.name == tagname:
startdate = t.commit.commit.committer.date
print('Tag date: ' + str(startdate))
pulls = Repo.get_pulls('closed')
user_length = 0
for p in pulls:
if (p.closed_at > startdate) and p.merged:
user_length = max(user_length, len(p.user.login))
format_string = '{:<' + str(user_length) + '}'
for p in pulls:
if (p.closed_at > startdate) and p.merged:
print("* [Nr " + str(p.number) + "]( "
+ str(p.html_url) + " ) by " + format_string.format(p.user.login)
+ ': ' + str(p.title))
| #!/usr/bin/env python3
"""Get a markdown-formatted list of PRs merged since a certain tag."""
import github_tools
Repo = github_tools.get_repo()
# TODO: implement getting date from repo tag.
tagname = '0.8.1'
tags = Repo.get_tags()
for t in tags:
if t.name == tagname:
startdate = t.commit.commit.committer.date
print('Selected tag ' + tagname + ', date: ' + str(startdate))
print('Fetching data...\n\n')
pulls = Repo.get_pulls('closed')
results = []
for p in pulls:
if (p.closed_at > startdate) and p.merged:
results.append({'login': p.user.login,
'number': p.number,
'url': p.html_url,
'title': p.title})
user_maxlength = max([len(entry['login']) for entry in results])
format_string = '{:<' + str(user_maxlength) + '}'
for r in results:
print("* [Nr " + str(r['number']) + "]( "
+ str(r['url']) + " ) by "
+ format_string.format(r['login'])
+ ': ' + str(r['title']))
| Refactor script to clean up code a bit. | Refactor script to clean up code a bit. | Python | mit | bilderbuchi/OF_repo_utilities | ---
+++
@@ -6,23 +6,29 @@
Repo = github_tools.get_repo()
-# TODO: implement getting date from repo tag./
+# TODO: implement getting date from repo tag.
tagname = '0.8.1'
tags = Repo.get_tags()
for t in tags:
if t.name == tagname:
startdate = t.commit.commit.committer.date
- print('Tag date: ' + str(startdate))
+ print('Selected tag ' + tagname + ', date: ' + str(startdate))
+print('Fetching data...\n\n')
pulls = Repo.get_pulls('closed')
-user_length = 0
+results = []
for p in pulls:
if (p.closed_at > startdate) and p.merged:
- user_length = max(user_length, len(p.user.login))
-format_string = '{:<' + str(user_length) + '}'
+ results.append({'login': p.user.login,
+ 'number': p.number,
+ 'url': p.html_url,
+ 'title': p.title})
-for p in pulls:
- if (p.closed_at > startdate) and p.merged:
- print("* [Nr " + str(p.number) + "]( "
- + str(p.html_url) + " ) by " + format_string.format(p.user.login)
- + ': ' + str(p.title))
+user_maxlength = max([len(entry['login']) for entry in results])
+format_string = '{:<' + str(user_maxlength) + '}'
+
+for r in results:
+ print("* [Nr " + str(r['number']) + "]( "
+ + str(r['url']) + " ) by "
+ + format_string.format(r['login'])
+ + ': ' + str(r['title'])) |
7131b2d82a856bfb4f7348567b8795243a24074a | dsub/_dsub_version.py | dsub/_dsub_version.py | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.5'
| # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.6.dev0'
| Update dsub version to 0.4.6.dev0 | Update dsub version to 0.4.6.dev0
PiperOrigin-RevId: 393201424
| Python | apache-2.0 | DataBiosphere/dsub,DataBiosphere/dsub | ---
+++
@@ -26,4 +26,4 @@
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
-DSUB_VERSION = '0.4.5'
+DSUB_VERSION = '0.4.6.dev0' |
cddbc2acdd16047c76c7f89e294c7f4918df6e93 | elections/uk/forms.py | elections/uk/forms.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
from django.core.exceptions import ValidationError
from candidates.mapit import BaseMapItException
from popolo.models import Area
from compat import text_type
from .mapit import get_areas_from_postcode
class PostcodeForm(forms.Form):
q = forms.CharField(
label='Enter a candidate name to start',
max_length=20,
widget=forms.TextInput(attrs={'placeholder': 'Enter a name'})
)
def clean_postcode(self):
postcode = self.cleaned_data['postcode']
try:
# Go to MapIt to check if this postcode is valid and
# contained in a constituency. (If it's valid then the
# result is cached, so this doesn't cause a double lookup.)
get_areas_from_postcode(postcode)
except BaseMapItException as e:
raise ValidationError(text_type(e))
return postcode
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
from django.core.exceptions import ValidationError
from candidates.mapit import BaseMapItException
from popolo.models import Area
from compat import text_type
from .mapit import get_areas_from_postcode
class PostcodeForm(forms.Form):
q = forms.CharField(
label='Enter a candidate name to start',
max_length=200,
widget=forms.TextInput(attrs={'placeholder': 'Enter a name'})
)
def clean_postcode(self):
postcode = self.cleaned_data['postcode']
try:
# Go to MapIt to check if this postcode is valid and
# contained in a constituency. (If it's valid then the
# result is cached, so this doesn't cause a double lookup.)
get_areas_from_postcode(postcode)
except BaseMapItException as e:
raise ValidationError(text_type(e))
return postcode
| Change max length on home page search form | Change max length on home page search form
| Python | agpl-3.0 | DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative | ---
+++
@@ -15,7 +15,7 @@
class PostcodeForm(forms.Form):
q = forms.CharField(
label='Enter a candidate name to start',
- max_length=20,
+ max_length=200,
widget=forms.TextInput(attrs={'placeholder': 'Enter a name'})
)
|
00c5f9b382fd1b060a973a3a3a1db33c280f2dd7 | respite/middleware.py | respite/middleware.py | import re
from django.http import QueryDict
class HttpMethodOverrideMiddleware:
"""
Facilitate for overriding the HTTP method with the X-HTTP-Method-Override
header or a '_method' HTTP POST parameter.
"""
def process_request(self, request):
if request.META.has_key('HTTP_X_HTTP_METHOD_OVERRIDE') \
or request.POST.has_key('_method'):
request.method = (
request.META.get('HTTP_X_HTTP_METHOD_OVERRIDE') or
request.POST.get('_method')
).upper()
if '_method' in request.POST:
request._raw_post_data = re.sub(r'_method=(PUT|PATCH|DELETE)&?', '', request.raw_post_data)
class HttpPutMiddleware:
"""
Facilitate for HTTP PUT in the same way Django facilitates for HTTP GET
and HTTP POST; populate a QueryDict instance with the request body in request.PUT.
"""
def process_request(self, request):
if request.method == 'PUT':
request.PUT = QueryDict(request.raw_post_data)
class HttpPatchMiddleware:
"""
Facilitate for HTTP PATCH in the same way Django facilitates for HTTP GET
and HTTP POST; populate a QueryDict instance with the request body in request.PATCH.
"""
def process_request(self, request):
if request.method == 'PATCH':
request.PATCH = QueryDict(request.raw_post_data)
| import re
from django.http import QueryDict
class HttpMethodOverrideMiddleware:
"""
Facilitate for overriding the HTTP method with the X-HTTP-Method-Override
header or a '_method' HTTP POST parameter.
"""
def process_request(self, request):
if 'HTTP_X_HTTP_METHOD_OVERRIDE' in request.META \
or '_method' in request.POST:
request.method = (
request.META.get('HTTP_X_HTTP_METHOD_OVERRIDE') or
request.POST.get('_method')
).upper()
if '_method' in request.POST:
request._raw_post_data = re.sub(r'_method=(PUT|PATCH|DELETE)&?', '', request.raw_post_data)
class HttpPutMiddleware:
"""
Facilitate for HTTP PUT in the same way Django facilitates for HTTP GET
and HTTP POST; populate a QueryDict instance with the request body in request.PUT.
"""
def process_request(self, request):
if request.method == 'PUT':
request.PUT = QueryDict(request.raw_post_data)
class HttpPatchMiddleware:
"""
Facilitate for HTTP PATCH in the same way Django facilitates for HTTP GET
and HTTP POST; populate a QueryDict instance with the request body in request.PATCH.
"""
def process_request(self, request):
if request.method == 'PATCH':
request.PATCH = QueryDict(request.raw_post_data)
| Refactor to use 'in' instead of 'has_key' | Refactor to use 'in' instead of 'has_key'
| Python | mit | jgorset/django-respite,jgorset/django-respite,jgorset/django-respite | ---
+++
@@ -9,8 +9,8 @@
"""
def process_request(self, request):
- if request.META.has_key('HTTP_X_HTTP_METHOD_OVERRIDE') \
- or request.POST.has_key('_method'):
+ if 'HTTP_X_HTTP_METHOD_OVERRIDE' in request.META \
+ or '_method' in request.POST:
request.method = (
request.META.get('HTTP_X_HTTP_METHOD_OVERRIDE') or
request.POST.get('_method') |
b74a95c81958ca567052e27f4df6d211d75f4a90 | contentcuration/contentcuration/dev_settings.py | contentcuration/contentcuration/dev_settings.py | import logging
import os
from .test_settings import *
try:
import debug_panel
except ImportError:
# no debug panel, no use trying to add it to our middleware
pass
else:
# if debug_panel exists, add it to our INSTALLED_APPS
INSTALLED_APPS += ('debug_panel', 'debug_toolbar', 'pympler')
MIDDLEWARE_CLASSES += ('debug_panel.middleware.DebugPanelMiddleware',)
DEBUG_TOOLBAR_CONFIG = {
'SHOW_TOOLBAR_CALLBACK': lambda x: True,
}
DEBUG_TOOLBAR_PANELS = [
'debug_toolbar.panels.versions.VersionsPanel',
'debug_toolbar.panels.timer.TimerPanel',
'debug_toolbar.panels.settings.SettingsPanel',
'debug_toolbar.panels.headers.HeadersPanel',
'debug_toolbar.panels.request.RequestPanel',
'debug_toolbar.panels.sql.SQLPanel',
'debug_toolbar.panels.staticfiles.StaticFilesPanel',
'debug_toolbar.panels.templates.TemplatesPanel',
'debug_toolbar.panels.cache.CachePanel',
'debug_toolbar.panels.signals.SignalsPanel',
'debug_toolbar.panels.logging.LoggingPanel',
'debug_toolbar.panels.redirects.RedirectsPanel',
]
| import logging
import os
from .test_settings import *
try:
import debug_panel
except ImportError:
# no debug panel, no use trying to add it to our middleware
pass
else:
# if debug_panel exists, add it to our INSTALLED_APPS
INSTALLED_APPS += ('debug_panel', 'debug_toolbar', 'pympler')
MIDDLEWARE_CLASSES += ('debug_panel.middleware.DebugPanelMiddleware',)
DEBUG_TOOLBAR_CONFIG = {
'SHOW_TOOLBAR_CALLBACK': lambda x: True,
}
DEBUG_TOOLBAR_PANELS = [
'debug_toolbar.panels.versions.VersionsPanel',
'debug_toolbar.panels.timer.TimerPanel',
'debug_toolbar.panels.settings.SettingsPanel',
'debug_toolbar.panels.headers.HeadersPanel',
'debug_toolbar.panels.request.RequestPanel',
'debug_toolbar.panels.sql.SQLPanel',
'debug_toolbar.panels.staticfiles.StaticFilesPanel',
'debug_toolbar.panels.templates.TemplatesPanel',
'debug_toolbar.panels.cache.CachePanel',
'debug_toolbar.panels.signals.SignalsPanel',
'debug_toolbar.panels.logging.LoggingPanel',
'debug_toolbar.panels.redirects.RedirectsPanel',
]
# GOOGLE DRIVE SETTINGS
GOOGLE_STORAGE_REQUEST_SHEET = "16X6zcFK8FS5t5tFaGpnxbWnWTXP88h4ccpSpPbyLeA8"
| Update testing request sheet in debug mode | Update testing request sheet in debug mode
| Python | mit | jayoshih/content-curation,jayoshih/content-curation,DXCanas/content-curation,fle-internal/content-curation,DXCanas/content-curation,DXCanas/content-curation,fle-internal/content-curation,fle-internal/content-curation,jayoshih/content-curation,DXCanas/content-curation,fle-internal/content-curation,jayoshih/content-curation | ---
+++
@@ -31,3 +31,6 @@
'debug_toolbar.panels.logging.LoggingPanel',
'debug_toolbar.panels.redirects.RedirectsPanel',
]
+
+# GOOGLE DRIVE SETTINGS
+GOOGLE_STORAGE_REQUEST_SHEET = "16X6zcFK8FS5t5tFaGpnxbWnWTXP88h4ccpSpPbyLeA8" |
b5db068a5545880d5d66d0be3ce88ccc393adcb0 | h5py/_hl/__init__.py | h5py/_hl/__init__.py | # This file is part of h5py, a Python interface to the HDF5 library.
#
# http://www.h5py.org
#
# Copyright 2008-2013 Andrew Collette and contributors
#
# License: Standard 3-clause BSD; see "license.txt" for full license terms
# and contributor agreement.
from __future__ import absolute_import
| # This file is part of h5py, a Python interface to the HDF5 library.
#
# http://www.h5py.org
#
# Copyright 2008-2013 Andrew Collette and contributors
#
# License: Standard 3-clause BSD; see "license.txt" for full license terms
# and contributor agreement.
"""
This subpackage implements the high-level interface for h5py.
Don't manually import things from here; the public API lives directly
in the top-level package namespace.
"""
from __future__ import absolute_import
| Fix lint issues in _hl subpackage | Fix lint issues in _hl subpackage
| Python | bsd-3-clause | h5py/h5py,h5py/h5py,h5py/h5py | ---
+++
@@ -7,5 +7,12 @@
# License: Standard 3-clause BSD; see "license.txt" for full license terms
# and contributor agreement.
+"""
+ This subpackage implements the high-level interface for h5py.
+
+ Don't manually import things from here; the public API lives directly
+ in the top-level package namespace.
+"""
+
from __future__ import absolute_import
|
3f2f8e1cf57c44b589b053902e2945cd2486414d | src/dashboard/src/components/accounts/backends.py | src/dashboard/src/components/accounts/backends.py | import re
from django.conf import settings
from django.dispatch import receiver
from django_auth_ldap.backend import LDAPBackend, populate_user
from shibboleth.backends import ShibbolethRemoteUserBackend
from components.helpers import generate_api_key
class CustomShibbolethRemoteUserBackend(ShibbolethRemoteUserBackend):
def configure_user(self, user):
generate_api_key(user)
return user
class CustomLDAPBackend(LDAPBackend):
"""Customize LDAP config."""
def __init__(self):
super(CustomLDAPBackend, self).__init__()
self._username_suffix = settings.AUTH_LDAP_USERNAME_SUFFIX
def ldap_to_django_username(self, username):
# Replaces user creation in get_ldap_users
return re.sub(self._username_suffix + "$", "", username)
def django_to_ldap_username(self, username):
# Replaces user creation in get_ldap_users
return username + self._username_suffix
@receiver(populate_user)
def ldap_populate_user(sender, user, ldap_user, **kwargs):
generate_api_key(user)
| import re
from django.conf import settings
from django.dispatch import receiver
from django_auth_ldap.backend import LDAPBackend, populate_user
from shibboleth.backends import ShibbolethRemoteUserBackend
from components.helpers import generate_api_key
class CustomShibbolethRemoteUserBackend(ShibbolethRemoteUserBackend):
def configure_user(self, user):
generate_api_key(user)
return user
class CustomLDAPBackend(LDAPBackend):
"""Customize LDAP config."""
def __init__(self):
super(CustomLDAPBackend, self).__init__()
self._username_suffix = settings.AUTH_LDAP_USERNAME_SUFFIX
def ldap_to_django_username(self, username):
# Replaces user creation in get_ldap_users
return re.sub(self._username_suffix + "$", "", username)
def django_to_ldap_username(self, username):
# Replaces user creation in get_ldap_users
return username + self._username_suffix
@receiver(populate_user)
def ldap_populate_user(sender, user, ldap_user, **kwargs):
if user.pk is None:
user.save()
generate_api_key(user)
| Fix API key generation with LDAP | Fix API key generation with LDAP
* Only generate on first login, not every login
* Handle case where user has not yet been saved
| Python | agpl-3.0 | artefactual/archivematica,artefactual/archivematica,artefactual/archivematica,artefactual/archivematica | ---
+++
@@ -33,4 +33,6 @@
@receiver(populate_user)
def ldap_populate_user(sender, user, ldap_user, **kwargs):
- generate_api_key(user)
+ if user.pk is None:
+ user.save()
+ generate_api_key(user) |
cfa1b895b5b3e44dd66565add95f636736153536 | marvin/stats/views.py | marvin/stats/views.py |
from flask import Flask
from flask import render_template
from marvin import client
import logging
app = Flask(__name__, static_url_path='/static')
@app.route('/')
def home_page():
# get list of transfered files
files = client.list_files()
return render_template('home_page.html', files=files)
def start():
app.run(debug=True)
|
from flask import Flask
from flask import render_template
from marvin.client import connect
import logging
app = Flask(__name__, static_url_path='/static')
@app.route('/')
def home_page():
# get list of transfered files
server = connect()
files = server.list_sending()
return render_template('home_page.html', files=files)
def start():
app.run(debug=True)
| Rewrite to new client usage | Rewrite to new client usage
| Python | mit | dou-hackathon-2015-marvin/marvin,dou-hackathon-2015-marvin/marvin,dou-hackathon-2015-marvin/marvin,dou-hackathon-2015-marvin/marvin | ---
+++
@@ -1,7 +1,7 @@
from flask import Flask
from flask import render_template
-from marvin import client
+from marvin.client import connect
import logging
app = Flask(__name__, static_url_path='/static')
@@ -9,7 +9,8 @@
@app.route('/')
def home_page():
# get list of transfered files
- files = client.list_files()
+ server = connect()
+ files = server.list_sending()
return render_template('home_page.html', files=files)
def start(): |
6b870b6605827472ff86f1d180fe94fa5bb5ea5d | TimeSeriesTools/__init__.py | TimeSeriesTools/__init__.py |
__author__ = 'To\xc3\xb1o G. Quintela (tgq.spm@gmail.com)'
__version__ = '0.0.0'
#from pyCausality.TimeSeries.TS import *
#from pyCausality.TimeSeries.automatic_thresholding import *
#from pyCausality.TimeSeries.distances import *
#from pyCausality.TimeSeries.measures import *
#from pyCausality.TimeSeries.smoothing import *
#from pyCausality.TimeSeries.transformations import *
from tests import test_artificial_data
from tests import test_utils
from tests import test_measures
from tests import test_transformations
from tests import test_burstdetection
from tests import test_tsstatistics
from tests import test_regimedetection
from tests import test_feature_extraction
from tests import test_similarities
## Administrative information
import release
import version
## Not inform about warnings
import warnings
with warnings.catch_warnings():
warnings.simplefilter("ignore")
warnings.simplefilter("ignore")
def test():
## Tests of modules
test_artificial_data.test()
# test_utils.test()
# test_measures.test()
test_transformations.test()
test_burstdetection.test()
# test_tsstatistics.test()
test_regimedetection.test()
test_feature_extraction.test()
test_similarities.test()
|
__author__ = 'To\xc3\xb1o G. Quintela (tgq.spm@gmail.com)'
__version__ = '0.0.0'
#from pyCausality.TimeSeries.TS import *
#from pyCausality.TimeSeries.automatic_thresholding import *
#from pyCausality.TimeSeries.distances import *
#from pyCausality.TimeSeries.measures import *
#from pyCausality.TimeSeries.smoothing import *
#from pyCausality.TimeSeries.transformations import *
from tests import test_artificial_data
from tests import test_utils
from tests import test_measures
from tests import test_transformations
from tests import test_burstdetection
from tests import test_tsstatistics
from tests import test_regimedetection
from tests import test_feature_extraction
from tests import test_similarities
## Administrative information
import release
import version
## Not inform about warnings
import warnings
with warnings.catch_warnings():
warnings.simplefilter("ignore")
warnings.simplefilter("ignore")
def test():
## Tests of modules
test_artificial_data.test()
test_utils.test()
# test_measures.test()
test_transformations.test()
test_burstdetection.test()
# test_tsstatistics.test()
test_regimedetection.test()
test_feature_extraction.test()
test_similarities.test()
| Add utils tests into testing module. | Add utils tests into testing module.
| Python | mit | tgquintela/TimeSeriesTools,tgquintela/TimeSeriesTools | ---
+++
@@ -34,7 +34,7 @@
def test():
## Tests of modules
test_artificial_data.test()
-# test_utils.test()
+ test_utils.test()
# test_measures.test()
test_transformations.test()
test_burstdetection.test() |
e103f3bcb4e6488295503dea2f788b98c4b7d43e | domotica/alarm.py | domotica/alarm.py | import s7
class Alarm:
ALARM_DB = 12
def __init__(self, s7conn):
self._s7conn = s7conn
def arm(self):
# Toggle bit
self._s7con.writeBit(self.ALARM_DB, 0, 1, 1)
self._s7con.writeBit(self.ALARM_DB, 0, 1, 0)
def disarm(self):
# Toggle bit
self._s7con.writeBit(self.ALARM_DB, 0, 2, 1)
self._s7con.writeBit(self.ALARM_DB, 0, 2, 0)
def isArmed(self):
return False
def isAlarmTriggered(self):
return False
| import s7
class Alarm:
ALARM_DB = 12
def __init__(self, s7conn):
self._s7conn = s7conn
def arm(self):
# Toggle bit
self._s7conn.writeBit(self.ALARM_DB, 0, 1, 1)
self._s7conn.writeBit(self.ALARM_DB, 0, 1, 0)
def disarm(self):
# Toggle bit
self._s7conn.writeBit(self.ALARM_DB, 0, 2, 1)
self._s7conn.writeBit(self.ALARM_DB, 0, 2, 0)
def isArmed(self):
return False
def isAlarmTriggered(self):
return False
| Fix typo in variable name | Fix typo in variable name
s7conn vs. s7con
| Python | bsd-2-clause | kprovost/domotica,kprovost/domotica | ---
+++
@@ -8,13 +8,13 @@
def arm(self):
# Toggle bit
- self._s7con.writeBit(self.ALARM_DB, 0, 1, 1)
- self._s7con.writeBit(self.ALARM_DB, 0, 1, 0)
+ self._s7conn.writeBit(self.ALARM_DB, 0, 1, 1)
+ self._s7conn.writeBit(self.ALARM_DB, 0, 1, 0)
def disarm(self):
# Toggle bit
- self._s7con.writeBit(self.ALARM_DB, 0, 2, 1)
- self._s7con.writeBit(self.ALARM_DB, 0, 2, 0)
+ self._s7conn.writeBit(self.ALARM_DB, 0, 2, 1)
+ self._s7conn.writeBit(self.ALARM_DB, 0, 2, 0)
def isArmed(self):
return False |
57a5042b8a01c16937206678924f6fe9c5273cc9 | example/__init__.py | example/__init__.py | from flask import Flask, render_template
from flask_nav import Nav
from flask_nav.elements import *
nav = Nav()
class UserGreeting(Text):
def __init__(self):
pass
@property
def text(self):
return 'Hello, {}'.format('bob')
# registers the "top" menubar
nav.register_element('top', Navbar(
View('Widgits, Inc.', 'index'),
View('Our Mission', 'about'),
Subgroup(
'Products',
View('Wg240-Series', 'products', product='wg240'),
View('Wg250-Series', 'products', product='wg250'),
Separator(),
Text('Discontinued Products'),
View('Wg10X', 'products', product='wg10x'),
),
Link('Tech Support', 'http://techsupport.invalid/widgits_inc'),
UserGreeting(),
))
def create_app(configfile=None):
app = Flask(__name__)
nav.init_app(app)
# not good style, but like to keep our examples short
@app.route('/')
def index():
return render_template('index.html')
@app.route('/products/<product>/')
def products(product):
return render_template('index.html', msg='Buy our {}'.format(product))
@app.route('/about-us/')
def about():
return render_template('index.html')
return app
| from flask import Flask, render_template
from flask_nav import Nav
from flask_nav.elements import *
nav = Nav()
# registers the "top" menubar
nav.register_element('top', Navbar(
View('Widgits, Inc.', 'index'),
View('Our Mission', 'about'),
Subgroup(
'Products',
View('Wg240-Series', 'products', product='wg240'),
View('Wg250-Series', 'products', product='wg250'),
Separator(),
Text('Discontinued Products'),
View('Wg10X', 'products', product='wg10x'),
),
Link('Tech Support', 'http://techsupport.invalid/widgits_inc'),
))
def create_app(configfile=None):
app = Flask(__name__)
nav.init_app(app)
# not good style, but like to keep our examples short
@app.route('/')
def index():
return render_template('index.html')
@app.route('/products/<product>/')
def products(product):
return render_template('index.html', msg='Buy our {}'.format(product))
@app.route('/about-us/')
def about():
return render_template('index.html')
return app
| Remove accidentally added UserGreeting from example app. | Remove accidentally added UserGreeting from example app.
| Python | mit | mbr/flask-nav,mbr/flask-nav | ---
+++
@@ -3,15 +3,6 @@
from flask_nav.elements import *
nav = Nav()
-
-
-class UserGreeting(Text):
- def __init__(self):
- pass
-
- @property
- def text(self):
- return 'Hello, {}'.format('bob')
# registers the "top" menubar
@@ -27,7 +18,6 @@
View('Wg10X', 'products', product='wg10x'),
),
Link('Tech Support', 'http://techsupport.invalid/widgits_inc'),
- UserGreeting(),
))
|
644252445b6c9b2729e73f97ba20e96dcfadb73b | tests/test_parser.py | tests/test_parser.py | from gogoutils.parser import Parser
def test_parser_url():
"""Test parsing of url"""
urls = [
'http://github.com/gogoair/test',
'https://github.com/gogoair/test',
'http://github.com/gogoair/test.git',
'https://github.com/gogoair/test.git',
'https://username@testgithub.com/gogoair/test.git',
'git@github.com:gogoair/test.git',
]
for url in urls:
project, repo = Parser(url).parse_url()
assert project == 'gogoair'
assert repo == 'test'
| from gogoutils.parser import Parser
def test_parser_url():
"""Test parsing of url"""
urls = [
'http://github.com/gogoair/test',
'https://github.com/gogoair/test',
'http://github.com/gogoair/test.git',
'https://github.com/gogoair/test.git',
'https://username@testgithub.com/gogoair/test.git',
'git@github.com:gogoair/test.git',
'git://git@github.com/gogoair/test.git',
'file:///opt/git/gogoair/test.git',
'ssh://git@github.com/gogoair/test.git',
]
for url in urls:
project, repo = Parser(url).parse_url()
assert project == 'gogoair'
assert repo == 'test'
| Add additional git url formats | Add additional git url formats
| Python | apache-2.0 | gogoair/gogo-utils | ---
+++
@@ -11,6 +11,9 @@
'https://github.com/gogoair/test.git',
'https://username@testgithub.com/gogoair/test.git',
'git@github.com:gogoair/test.git',
+ 'git://git@github.com/gogoair/test.git',
+ 'file:///opt/git/gogoair/test.git',
+ 'ssh://git@github.com/gogoair/test.git',
]
for url in urls: |
abea380145ca0d2ebc1b68876b9cde0322be4db5 | astral/api/handlers/ping.py | astral/api/handlers/ping.py | from astral.api.handlers.base import BaseHandler
from astral.conf import settings
import logging
log = logging.getLogger(__name__)
class PingHandler(BaseHandler):
def get(self):
"""If 'bytes' is specified in the query string, return that number of
random bytes (for the purposes of a downstream bandwidth measurement).
Otherwise, returns a simple 200 OK HTTP response, to check the RTT.
"""
byte_count = self.get_argument('bytes', None)
if byte_count:
byte_count = int(byte_count)
log.debug("Returning %s bytes for a downstream bandwidth test",
byte_count)
with open('/dev/urandom') as random_file:
self.write(random_file.read(
max(byte_count, settings.DOWNSTREAM_CHECK_LIMIT)))
else:
self.write("Pong!")
log.debug("Responded to a ping")
def post(self):
"""Accept arbitrary POST data to check upstream bandwidth. Limit the
size to make sure we aren't DoS'd.
"""
log.debug("Received an upstream bandwidth check with %s bytes",
len(self.request.body))
| from astral.api.handlers.base import BaseHandler
from astral.conf import settings
import logging
log = logging.getLogger(__name__)
class PingHandler(BaseHandler):
def get(self):
"""If 'bytes' is specified in the query string, return that number of
random bytes (for the purposes of a downstream bandwidth measurement).
Otherwise, returns a simple 200 OK HTTP response, to check the RTT.
"""
byte_count = self.get_argument('bytes', None)
if byte_count:
byte_count = int(byte_count)
log.debug("Returning %s bytes for a downstream bandwidth test",
byte_count)
with open('/dev/urandom') as random_file:
self.write(random_file.read(
min(byte_count, settings.DOWNSTREAM_CHECK_LIMIT)))
else:
self.write("Pong!")
log.debug("Responded to a ping")
def post(self):
"""Accept arbitrary POST data to check upstream bandwidth. Limit the
size to make sure we aren't DoS'd.
"""
log.debug("Received an upstream bandwidth check with %s bytes",
len(self.request.body))
| Use min of specified byte count and limit, not max. | Use min of specified byte count and limit, not max.
| Python | mit | peplin/astral | ---
+++
@@ -18,7 +18,7 @@
byte_count)
with open('/dev/urandom') as random_file:
self.write(random_file.read(
- max(byte_count, settings.DOWNSTREAM_CHECK_LIMIT)))
+ min(byte_count, settings.DOWNSTREAM_CHECK_LIMIT)))
else:
self.write("Pong!")
log.debug("Responded to a ping") |
96dc9e590b81926fddb83a85a1352039c10c1509 | links/mlp.py | links/mlp.py | from __future__ import division
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import absolute_import
from builtins import super
from builtins import range
from future import standard_library
standard_library.install_aliases()
import random
import numpy as np
import chainer
from chainer import functions as F
from chainer import links as L
from chainer import cuda
class MLP(chainer.Chain):
"""Multi-Layer Perceptron"""
def __init__(self, in_size, out_size, hidden_sizes):
self.in_size = in_size
self.out_size = out_size
self.hidden_sizes = hidden_sizes
layers = {}
if hidden_sizes:
hidden_layers = []
hidden_layers.append(L.Linear(in_size, hidden_sizes[0]))
for hin, hout in zip(hidden_sizes, hidden_sizes[1:]):
hidden_layers.append(L.Linear(hin, hout))
layers['hidden_layers'] = chainer.ChainList(*hidden_layers)
layers['output'] = L.Linear(hidden_sizes[-1], out_size)
else:
layers['output'] = L.Linear(in_size, out_size)
super().__init__(**layers)
def __call__(self, x, test=False):
h = x
for l in self.hidden_layers:
h = F.relu(l(h))
return self.output(h)
| from __future__ import division
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import absolute_import
from builtins import super
from builtins import range
from future import standard_library
standard_library.install_aliases()
import random
import numpy as np
import chainer
from chainer import functions as F
from chainer import links as L
from chainer import cuda
class MLP(chainer.Chain):
"""Multi-Layer Perceptron"""
def __init__(self, in_size, out_size, hidden_sizes):
self.in_size = in_size
self.out_size = out_size
self.hidden_sizes = hidden_sizes
layers = {}
if hidden_sizes:
hidden_layers = []
hidden_layers.append(L.Linear(in_size, hidden_sizes[0]))
for hin, hout in zip(hidden_sizes, hidden_sizes[1:]):
hidden_layers.append(L.Linear(hin, hout))
layers['hidden_layers'] = chainer.ChainList(*hidden_layers)
layers['output'] = L.Linear(hidden_sizes[-1], out_size)
else:
layers['output'] = L.Linear(in_size, out_size)
super().__init__(**layers)
def __call__(self, x, test=False):
h = x
if self.hidden_sizes:
for l in self.hidden_layers:
h = F.relu(l(h))
return self.output(h)
| Support configuration with no hidden-layer | Support configuration with no hidden-layer
| Python | mit | toslunar/chainerrl,toslunar/chainerrl | ---
+++
@@ -39,7 +39,8 @@
def __call__(self, x, test=False):
h = x
- for l in self.hidden_layers:
- h = F.relu(l(h))
+ if self.hidden_sizes:
+ for l in self.hidden_layers:
+ h = F.relu(l(h))
return self.output(h)
|
5a88126b53bbd47a4c8899b50bdbf0d913183bd5 | norm/test/test_porcelain.py | norm/test/test_porcelain.py | from twisted.trial.unittest import TestCase
from twisted.internet import defer
import os
from norm.porcelain import makePool
postgres_url = os.environ.get('NORM_POSTGRESQL_URI', None)
skip_postgres = ('You must define NORM_POSTGRESQL_URI in order to run this '
'postgres test')
if postgres_url:
skip_postgres = ''
class PostgresTest(TestCase):
timeout = 2
skip = skip_postgres
@defer.inlineCallbacks
def test_basic(self):
pool = yield makePool(postgres_url)
yield pool.runOperation('''CREATE TEMPORARY TABLE porc1 (
id serial primary key,
created timestamp default current_timestamp,
name text
)''')
def interaction(cursor, name):
d = cursor.execute('insert into foo (name) values (?)', (name,))
d.addCallback(lambda _: cursor.lastRowId())
return d
rowid = yield pool.runInteraction(interaction, 'bob')
rows = yield pool.runQuery('select id, name from foo where id = ?', (rowid,))
self.assertEqual(rows, [(rowid, 'bob')])
| from twisted.trial.unittest import TestCase
from twisted.internet import defer
import os
from norm.porcelain import makePool
postgres_url = os.environ.get('NORM_POSTGRESQL_URI', None)
skip_postgres = ('You must define NORM_POSTGRESQL_URI in order to run this '
'postgres test')
if postgres_url:
skip_postgres = ''
class PostgresTest(TestCase):
timeout = 2
skip = skip_postgres
@defer.inlineCallbacks
def test_basic(self):
pool = yield makePool(postgres_url)
yield pool.runOperation('''CREATE TEMPORARY TABLE porc1 (
id serial primary key,
created timestamp default current_timestamp,
name text
)''')
def interaction(cursor, name):
d = cursor.execute('insert into porc1 (name) values (?)', (name,))
d.addCallback(lambda _: cursor.lastRowId())
return d
rowid = yield pool.runInteraction(interaction, 'bob')
rows = yield pool.runQuery('select id, name from porc1 where id = ?', (rowid,))
self.assertEqual(rows, [(rowid, 'bob')])
class SqliteTest(TestCase):
timeout = 2
@defer.inlineCallbacks
def test_basic(self):
pool = yield makePool('sqlite:')
yield pool.runOperation('''CREATE TABLE porc1 (
id integer primary key,
created timestamp default current_timestamp,
name text
)''')
def interaction(cursor, name):
d = cursor.execute('insert into porc1 (name) values (?)', (name,))
d.addCallback(lambda _: cursor.lastRowId())
return d
rowid = yield pool.runInteraction(interaction, 'bob')
rows = yield pool.runQuery('select id, name from porc1 where id = ?', (rowid,))
self.assertEqual(rows, [(rowid, 'bob')]) | Fix postgres porcelain test and add sqlite one | Fix postgres porcelain test and add sqlite one
| Python | mit | iffy/norm,iffy/norm | ---
+++
@@ -32,10 +32,34 @@
)''')
def interaction(cursor, name):
- d = cursor.execute('insert into foo (name) values (?)', (name,))
+ d = cursor.execute('insert into porc1 (name) values (?)', (name,))
d.addCallback(lambda _: cursor.lastRowId())
return d
rowid = yield pool.runInteraction(interaction, 'bob')
- rows = yield pool.runQuery('select id, name from foo where id = ?', (rowid,))
+ rows = yield pool.runQuery('select id, name from porc1 where id = ?', (rowid,))
self.assertEqual(rows, [(rowid, 'bob')])
+
+
+class SqliteTest(TestCase):
+
+
+ timeout = 2
+
+
+ @defer.inlineCallbacks
+ def test_basic(self):
+ pool = yield makePool('sqlite:')
+ yield pool.runOperation('''CREATE TABLE porc1 (
+ id integer primary key,
+ created timestamp default current_timestamp,
+ name text
+ )''')
+
+ def interaction(cursor, name):
+ d = cursor.execute('insert into porc1 (name) values (?)', (name,))
+ d.addCallback(lambda _: cursor.lastRowId())
+ return d
+ rowid = yield pool.runInteraction(interaction, 'bob')
+ rows = yield pool.runQuery('select id, name from porc1 where id = ?', (rowid,))
+ self.assertEqual(rows, [(rowid, 'bob')]) |
7d7043560f26c31346472b6452e8b191729c54a3 | offsite_storage/settings.py | offsite_storage/settings.py | from django.conf import settings
AWS_ACCESS_KEY_ID = getattr(settings, 'AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = getattr(settings, 'AWS_SECRET_ACCESS_KEY')
AWS_STATIC_BUCKET_NAME = getattr(settings, 'AWS_STATIC_BUCKET_NAME')
AWS_MEDIA_ACCESS_KEY_ID = getattr(
settings, 'AWS_MEDIA_ACCESS_KEY_ID', AWS_ACCESS_KEY_ID)
AWS_MEDIA_SECRET_ACCESS_KEY = getattr(
settings, 'AWS_MEDIA_SECRET_ACCESS_KEY', AWS_SECRET_ACCESS_KEY)
AWS_MEDIA_BUCKET_NAME = getattr(settings, 'AWS_MEDIA_BUCKET_NAME')
AWS_S3_ENDPOINT_URL = getattr(
settings, 'AWS_S3_ENDPOINT_URL', 's3.amazonaws.com')
AWS_HOST_URL = 'https://%(bucket_name)s.s3.amazonaws.com/'
AWS_POLICY = 'public-read'
IGNORE_FILES = getattr(settings, 'OFFSITE_STORAGE_IGNORE_FILES',
['*.less', '*.scss', '*.txt', 'components'])
| from django.conf import settings
AWS_ACCESS_KEY_ID = getattr(settings, 'AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = getattr(settings, 'AWS_SECRET_ACCESS_KEY')
AWS_STATIC_BUCKET_NAME = getattr(settings, 'AWS_STATIC_BUCKET_NAME')
AWS_MEDIA_ACCESS_KEY_ID = getattr(
settings, 'AWS_MEDIA_ACCESS_KEY_ID', AWS_ACCESS_KEY_ID)
AWS_MEDIA_SECRET_ACCESS_KEY = getattr(
settings, 'AWS_MEDIA_SECRET_ACCESS_KEY', AWS_SECRET_ACCESS_KEY)
AWS_MEDIA_BUCKET_NAME = getattr(settings, 'AWS_MEDIA_BUCKET_NAME')
AWS_S3_ENDPOINT_URL = getattr(
settings, 'AWS_S3_ENDPOINT_URL', 's3.amazonaws.com')
AWS_HOST_URL = 'https://%%(bucket_name)s.%s/' % AWS_S3_ENDPOINT_URL
AWS_POLICY = 'public-read'
IGNORE_FILES = getattr(settings, 'OFFSITE_STORAGE_IGNORE_FILES',
['*.less', '*.scss', '*.txt', 'components'])
| Use custom endpoint url in AWS_HOST_URL variable | Use custom endpoint url in AWS_HOST_URL variable
| Python | bsd-3-clause | mirumee/django-offsite-storage | ---
+++
@@ -13,7 +13,7 @@
AWS_S3_ENDPOINT_URL = getattr(
settings, 'AWS_S3_ENDPOINT_URL', 's3.amazonaws.com')
-AWS_HOST_URL = 'https://%(bucket_name)s.s3.amazonaws.com/'
+AWS_HOST_URL = 'https://%%(bucket_name)s.%s/' % AWS_S3_ENDPOINT_URL
AWS_POLICY = 'public-read'
IGNORE_FILES = getattr(settings, 'OFFSITE_STORAGE_IGNORE_FILES', |
e45f777f74cf8dc48c48732269be4c37c8966c76 | h2o-py/tests/testdir_munging/unop/pyunit_cor.py | h2o-py/tests/testdir_munging/unop/pyunit_cor.py | from builtins import range
import sys
sys.path.insert(1,"../../../")
import h2o
from tests import pyunit_utils
##
# Test out the cor() functionality
# If NAs in the frame, they are skipped in calculation unless na.rm = F
# If any categorical columns, throw an error
##
import numpy as np
def cor_test():
iris_h2o = h2o.import_file(path=pyunit_utils.locate("smalldata/iris/iris.csv"))
iris_np = np.genfromtxt(pyunit_utils.locate("smalldata/iris/iris.csv"),
delimiter=',',
skip_header=1,
usecols=(0, 1, 2, 3))
cor_np = h2o.H2OFrame(np.corrcoef(iris_np,rowvar=0))
cor_h2o = iris_h2o[0:4].cor()
cor_diff = abs(cor_h2o - cor_np)
print "Correlation matrix with H2O: "
print cor_h2o
print "Correlation matrix with Numpy: "
print cor_np
print "Correlation differences between H2O and Numpy: "
print cor_diff
print "Max difference in correlation calculation between H2O and Numpy: "
print cor_diff.max()
max = cor_diff.max()
assert max < .006, "expected equal correlations"
if __name__ == "__main__":
pyunit_utils.standalone_test(cor_test)
else:
cor_test() | from builtins import range
import sys
sys.path.insert(1,"../../../")
import h2o
from tests import pyunit_utils
##
# Test out the cor() functionality
# If NAs in the frame, they are skipped in calculation unless na.rm = F
# If any categorical columns, throw an error
##
import numpy as np
def cor_test():
iris_h2o = h2o.import_file(path=pyunit_utils.locate("smalldata/iris/iris.csv"))
iris_np = np.genfromtxt(pyunit_utils.locate("smalldata/iris/iris.csv"),
delimiter=',',
skip_header=1,
usecols=(0, 1, 2, 3))
cor_np = h2o.H2OFrame(np.corrcoef(iris_np,rowvar=0))
cor_h2o = iris_h2o[0:4].cor()
cor_diff = abs(cor_h2o - cor_np)
print("Correlation matrix with H2O: ")
print cor_h2o
print("Correlation matrix with Numpy: ")
print cor_np
print("Correlation differences between H2O and Numpy: ")
print cor_diff
print("Max difference in correlation calculation between H2O and Numpy: ")
print cor_diff.max()
max = cor_diff.max()
assert max < .006, "expected equal correlations"
if __name__ == "__main__":
pyunit_utils.standalone_test(cor_test)
else:
cor_test() | Add parenthesis to print statement | Add parenthesis to print statement
| Python | apache-2.0 | mathemage/h2o-3,h2oai/h2o-3,h2oai/h2o-dev,jangorecki/h2o-3,h2oai/h2o-dev,jangorecki/h2o-3,mathemage/h2o-3,h2oai/h2o-3,spennihana/h2o-3,jangorecki/h2o-3,spennihana/h2o-3,michalkurka/h2o-3,spennihana/h2o-3,spennihana/h2o-3,spennihana/h2o-3,jangorecki/h2o-3,h2oai/h2o-3,h2oai/h2o-dev,h2oai/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,h2oai/h2o-3,h2oai/h2o-dev,michalkurka/h2o-3,mathemage/h2o-3,jangorecki/h2o-3,h2oai/h2o-3,spennihana/h2o-3,mathemage/h2o-3,michalkurka/h2o-3,mathemage/h2o-3,h2oai/h2o-dev,michalkurka/h2o-3,h2oai/h2o-dev,jangorecki/h2o-3,mathemage/h2o-3,h2oai/h2o-3,mathemage/h2o-3,jangorecki/h2o-3,h2oai/h2o-dev,michalkurka/h2o-3,spennihana/h2o-3 | ---
+++
@@ -28,16 +28,16 @@
cor_h2o = iris_h2o[0:4].cor()
cor_diff = abs(cor_h2o - cor_np)
- print "Correlation matrix with H2O: "
+ print("Correlation matrix with H2O: ")
print cor_h2o
- print "Correlation matrix with Numpy: "
+ print("Correlation matrix with Numpy: ")
print cor_np
- print "Correlation differences between H2O and Numpy: "
+ print("Correlation differences between H2O and Numpy: ")
print cor_diff
- print "Max difference in correlation calculation between H2O and Numpy: "
+ print("Max difference in correlation calculation between H2O and Numpy: ")
print cor_diff.max()
max = cor_diff.max() |
1daf5825580d31e3f2825b5b5edfaa2aed8146fe | mopidy/internal/gi.py | mopidy/internal/gi.py | from __future__ import absolute_import, unicode_literals
import textwrap
try:
import gi
gi.require_version('Gst', '1.0')
gi.require_version('GstPbutils', '1.0')
from gi.repository import GLib, GObject, Gst, GstPbutils
except ImportError:
print(textwrap.dedent("""
ERROR: A GObject Python package was not found.
Mopidy requires GStreamer to work. GStreamer is a C library with a
number of dependencies itself, and cannot be installed with the regular
Python tools like pip.
Please see http://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""))
raise
else:
Gst.is_initialized() or Gst.init()
__all__ = [
'GLib',
'GObject',
'Gst',
'GstPbutils',
'gi',
]
| from __future__ import absolute_import, unicode_literals
import sys
import textwrap
try:
import gi
gi.require_version('Gst', '1.0')
gi.require_version('GstPbutils', '1.0')
from gi.repository import GLib, GObject, Gst, GstPbutils
except ImportError:
print(textwrap.dedent("""
ERROR: A GObject Python package was not found.
Mopidy requires GStreamer to work. GStreamer is a C library with a
number of dependencies itself, and cannot be installed with the regular
Python tools like pip.
Please see http://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""))
raise
else:
Gst.is_initialized() or Gst.init()
REQUIRED_GST_VERSION = (1, 2)
if Gst.version() < REQUIRED_GST_VERSION:
sys.exit(
'ERROR: Mopidy requires GStreamer >= %s, but found %s.' % (
'.'.join(map(str, REQUIRED_GST_VERSION)), Gst.version_string()))
__all__ = [
'GLib',
'GObject',
'Gst',
'GstPbutils',
'gi',
]
| Check GStreamer version on start | gst1: Check GStreamer version on start
If GStreamer is too old, it fails like this:
$ mopidy
ERROR: Mopidy requires GStreamer >= 1.2, but found GStreamer 1.0.0.
| Python | apache-2.0 | kingosticks/mopidy,jodal/mopidy,mokieyue/mopidy,tkem/mopidy,kingosticks/mopidy,tkem/mopidy,mokieyue/mopidy,adamcik/mopidy,adamcik/mopidy,jodal/mopidy,mopidy/mopidy,vrs01/mopidy,ZenithDK/mopidy,ZenithDK/mopidy,tkem/mopidy,jodal/mopidy,jcass77/mopidy,mopidy/mopidy,adamcik/mopidy,tkem/mopidy,mokieyue/mopidy,jcass77/mopidy,ZenithDK/mopidy,vrs01/mopidy,kingosticks/mopidy,mopidy/mopidy,ZenithDK/mopidy,vrs01/mopidy,jcass77/mopidy,vrs01/mopidy,mokieyue/mopidy | ---
+++
@@ -1,5 +1,6 @@
from __future__ import absolute_import, unicode_literals
+import sys
import textwrap
@@ -24,6 +25,14 @@
Gst.is_initialized() or Gst.init()
+REQUIRED_GST_VERSION = (1, 2)
+
+if Gst.version() < REQUIRED_GST_VERSION:
+ sys.exit(
+ 'ERROR: Mopidy requires GStreamer >= %s, but found %s.' % (
+ '.'.join(map(str, REQUIRED_GST_VERSION)), Gst.version_string()))
+
+
__all__ = [
'GLib',
'GObject', |
b65c95ca400f91648a53553f51979f5ceb7a0d94 | test/test-unrealcv.py | test/test-unrealcv.py | # TODO: Test robustness, test speed
import unittest, time, sys, argparse, threading
sys.path.append('./test/ipc')
from common_conf import *
from test_dev_server import TestDevServer
from test_client import TestUE4CVClient
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--travis', action='store_true') # Only run test availabe to travis CI
args = parser.parse_args()
suites = []
load = unittest.TestLoader().loadTestsFromTestCase
s = load(TestDevServer); suites.append(s)
s = load(TestUE4CVClient); suites.append(s)
suite_obj = unittest.TestSuite(suites)
ret = not unittest.TextTestRunner(verbosity = 2).run(suite_obj).wasSuccessful()
sys.exit(ret)
| '''
Run python test.py to test unrealcv
'''
import unittest, time, sys, argparse, threading
from common_conf import *
def run_full_test():
pass
def run_travis_test():
sys.path.append('./test/ipc')
from test_dev_server import TestDevServer
from test_client import TestUE4CVClient
load = unittest.TestLoader().loadTestsFromTestCase
suites = []
s = load(TestDevServer); suites.append(s)
s = load(TestUE4CVClient); suites.append(s)
suite_obj = unittest.TestSuite(suites)
ret = not unittest.TextTestRunner(verbosity = 2).run(suite_obj).wasSuccessful()
return ret
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--travis', action='store_true') # Only run test availabe to travis CI
args = parser.parse_args()
if args.travis:
ret = run_travis_test()
else:
ret = run_full_test()
sys.exit(ret)
| Clean up the portal of test script. | Clean up the portal of test script.
| Python | mit | unrealcv/unrealcv,qiuwch/unrealcv,qiuwch/unrealcv,qiuwch/unrealcv,qiuwch/unrealcv,unrealcv/unrealcv,qiuwch/unrealcv,unrealcv/unrealcv,qiuwch/unrealcv,unrealcv/unrealcv,unrealcv/unrealcv | ---
+++
@@ -1,21 +1,35 @@
-# TODO: Test robustness, test speed
+'''
+Run python test.py to test unrealcv
+'''
import unittest, time, sys, argparse, threading
-sys.path.append('./test/ipc')
from common_conf import *
-from test_dev_server import TestDevServer
-from test_client import TestUE4CVClient
+
+def run_full_test():
+ pass
+
+def run_travis_test():
+ sys.path.append('./test/ipc')
+ from test_dev_server import TestDevServer
+ from test_client import TestUE4CVClient
+ load = unittest.TestLoader().loadTestsFromTestCase
+
+ suites = []
+ s = load(TestDevServer); suites.append(s)
+ s = load(TestUE4CVClient); suites.append(s)
+ suite_obj = unittest.TestSuite(suites)
+
+ ret = not unittest.TextTestRunner(verbosity = 2).run(suite_obj).wasSuccessful()
+
+ return ret
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--travis', action='store_true') # Only run test availabe to travis CI
args = parser.parse_args()
- suites = []
+ if args.travis:
+ ret = run_travis_test()
+ else:
+ ret = run_full_test()
- load = unittest.TestLoader().loadTestsFromTestCase
- s = load(TestDevServer); suites.append(s)
- s = load(TestUE4CVClient); suites.append(s)
-
- suite_obj = unittest.TestSuite(suites)
- ret = not unittest.TextTestRunner(verbosity = 2).run(suite_obj).wasSuccessful()
sys.exit(ret) |
26250bf43e659c03576a4d7e4d986b622a18bb48 | swifpy/dictionary.py | swifpy/dictionary.py | import typing as tp
import builtins as py
from .optional import Optional, optional
K = tp.TypeVar('K')
V = tp.TypeVar('V')
class Dictionary(tp.Generic[K, V], tp.Iterable[tp.Tuple[K, V]]):
def __init__(self, entries: tp.Dict[K, V]) -> None:
self._entries: tp.Dict[K, V] = py.dict(entries)
def __getitem__(self, key: K) -> Optional[V]:
return optional(self._entries.get(key))
def __setitem__(self, key: K, value: V) -> None:
self._entries[key] = value
def for_each(self, body: tp.Callable[[K, V], None]) -> None:
for key, value in self._entries.items():
body(key, value)
@property
def count(self) -> int:
return len(self._entries)
def remove_all(self) -> None:
self._entries.clear()
def __iter__(self) -> tp.Iterator[tp.Tuple[K, V]]:
return self._entries.items().__iter__()
| import typing as tp
import builtins as py
from .optional import Optional, optional
K = tp.TypeVar('K')
V = tp.TypeVar('V')
class Dictionary(tp.Generic[K, V], tp.Iterable[tp.Tuple[K, V]]):
def __init__(self, entries: tp.Dict[K, V]) -> None:
self._entries: tp.Dict[K, V] = py.dict(entries)
def __getitem__(self, key: K) -> Optional[V]:
return optional(self._entries.get(key))
def __setitem__(self, key: K, value: V) -> None:
self._entries[key] = value
@property
def keys(self) -> tp.Iterable[K]:
return self._entries.keys()
@property
def values(self) -> tp.Iterable[V]:
return self._entries.values()
def for_each(self, body: tp.Callable[[K, V], None]) -> None:
for key, value in self._entries.items():
body(key, value)
@property
def count(self) -> int:
return len(self._entries)
def remove_all(self) -> None:
self._entries.clear()
def __iter__(self) -> tp.Iterator[tp.Tuple[K, V]]:
return self._entries.items().__iter__()
| Implement `keys` and `values` of `Dictionary` | Implement `keys` and `values` of `Dictionary`
| Python | mit | koher/swifpy | ---
+++
@@ -16,6 +16,14 @@
def __setitem__(self, key: K, value: V) -> None:
self._entries[key] = value
+ @property
+ def keys(self) -> tp.Iterable[K]:
+ return self._entries.keys()
+
+ @property
+ def values(self) -> tp.Iterable[V]:
+ return self._entries.values()
+
def for_each(self, body: tp.Callable[[K, V], None]) -> None:
for key, value in self._entries.items():
body(key, value) |
5b18bcfa03876dbfda75b14bf8239f01c1f70ec2 | fftresize/fftresize.py | fftresize/fftresize.py | #!/usr/bin/env python2
'''Resize images using the FFT
FFTresize resizes images using zero-padding in the frequency domain.
'''
from fftinterp import interp2
import imutils
from numpy import zeros as _zeros
__author__ = 'Mansour Moufid'
__copyright__ = 'Copyright 2013, Mansour Moufid'
__license__ = 'ISC'
__version__ = '0.2'
__email__ = 'mansourmoufid@gmail.com'
__status__ = 'Development'
def resize(filename, factor=1.5):
'''Resize an image by zero-padding in the frequency domain.
Return the filename of the resized image.
'''
img = imutils.read(filename)
nchannels = imutils.channels(img)
if nchannels == 1:
new = interp2(img, factor)
else:
new = None
for i in range(nchannels):
rgb = img[:, :, i]
newrgb = interp2(rgb, factor)
if new is None:
newsize = list(newrgb.shape)
newsize.append(imutils.channels(img))
new = _zeros(tuple(newsize))
new[:, :, i] = newrgb
return imutils.save(new, filename)
if '__main__' in __name__:
pass
| #!/usr/bin/env python2
'''Resize images using the FFT
FFTresize resizes images using zero-padding in the frequency domain.
'''
from numpy import zeros as _zeros
from . import fftinterp
from . import imutils
__author__ = 'Mansour Moufid'
__copyright__ = 'Copyright 2013, Mansour Moufid'
__license__ = 'ISC'
__version__ = '0.2'
__email__ = 'mansourmoufid@gmail.com'
__status__ = 'Development'
def resize(filename, factor=1.5):
'''Resize an image by zero-padding in the frequency domain.
Return the filename of the resized image.
'''
img = imutils.read(filename)
nchannels = imutils.channels(img)
if nchannels == 1:
new = fftinterp.interp2(img, factor)
else:
new = None
for i in range(nchannels):
rgb = img[:, :, i]
newrgb = fftinterp.interp2(rgb, factor)
if new is None:
newsize = list(newrgb.shape)
newsize.append(imutils.channels(img))
new = _zeros(tuple(newsize))
new[:, :, i] = newrgb
return imutils.save(new, filename)
if '__main__' in __name__:
pass
| Use relative imports in the package. | Use relative imports in the package.
| Python | isc | eliteraspberries/fftresize | ---
+++
@@ -6,9 +6,10 @@
'''
-from fftinterp import interp2
-import imutils
from numpy import zeros as _zeros
+
+from . import fftinterp
+from . import imutils
__author__ = 'Mansour Moufid'
@@ -27,12 +28,12 @@
img = imutils.read(filename)
nchannels = imutils.channels(img)
if nchannels == 1:
- new = interp2(img, factor)
+ new = fftinterp.interp2(img, factor)
else:
new = None
for i in range(nchannels):
rgb = img[:, :, i]
- newrgb = interp2(rgb, factor)
+ newrgb = fftinterp.interp2(rgb, factor)
if new is None:
newsize = list(newrgb.shape)
newsize.append(imutils.channels(img)) |
8d0325e28a1836a8afdfc33916d36a3e259ad131 | fil_finder/__init__.py | fil_finder/__init__.py | # Licensed under an MIT open source license - see LICENSE
__version__ = "1.2.2"
from .analysis import Analysis
from .filfind_class import fil_finder_2D
from .width_profiles import filament_profile
| # Licensed under an MIT open source license - see LICENSE
__version__ = "1.2.2"
from .analysis import Analysis
from .filfind_class import fil_finder_2D
| Revert importing from the top | Revert importing from the top
| Python | mit | e-koch/FilFinder | ---
+++
@@ -4,4 +4,3 @@
from .analysis import Analysis
from .filfind_class import fil_finder_2D
-from .width_profiles import filament_profile |
77c245240fcccf1c7c6f3251168801de45182b8d | klaxer/__init__.py | klaxer/__init__.py | """Entry point for all things Klaxer"""
__author__ = 'Aru Sahni, Kevin Dwyer, Justin Shelton, Dylan Bernard, Will Schneider, et al'
__version__ = '0.0.1'
__license__ = 'MIT'
APP_NAME = 'Klaxer'
| """Entry point for all things Klaxer"""
__author__ = 'Aru Sahni, Kevin Dwyer, Justin Shelton, Dylan Bernard, et al'
__version__ = '0.0.1'
__license__ = 'MIT'
APP_NAME = 'Klaxer'
| Adjust author list to only include contributors. | Adjust author list to only include contributors.
| Python | mit | klaxer/klaxer | ---
+++
@@ -1,6 +1,6 @@
"""Entry point for all things Klaxer"""
-__author__ = 'Aru Sahni, Kevin Dwyer, Justin Shelton, Dylan Bernard, Will Schneider, et al'
+__author__ = 'Aru Sahni, Kevin Dwyer, Justin Shelton, Dylan Bernard, et al'
__version__ = '0.0.1'
__license__ = 'MIT'
|
feddcfd9153da81777c25571f35ee3c97e655c64 | generate_migrations.py | generate_migrations.py | #!/usr/bin/env python
# coding: utf-8
import sys
from argparse import ArgumentParser
from os.path import abspath
from os.path import dirname
# Modify the path so that our djoauth2 app is in it.
parent_dir = dirname(abspath(__file__))
sys.path.insert(0, parent_dir)
# Load Django-related settings; necessary for tests to run and for Django
# imports to work.
import local_settings
# Now, imports from Django will work properly without raising errors related to
# missing or badly-configured settings.
from django.core import management
def generate_migrations(initial):
management.call_command('syncdb', interactive=False)
if initial:
management.call_command('schemamigration', 'djoauth2', initial=True)
else:
management.call_command('schemamigration', 'djoauth2', auto=True)
def test_migrations():
management.call_command('syncdb', interactive=False)
management.call_command('migrate', 'djoauth2')
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('--initial-migration',
action='store_true',
default=False,
dest='initial')
parser.add_argument('--test-migrations',
action='store_true',
default=False,
dest='test_migrations')
args = parser.parse_args()
if args.test_migrations:
test_migrations()
else:
generate_migrations(args.initial)
| #!/usr/bin/env python
# coding: utf-8
import sys
from argparse import ArgumentParser
from os.path import abspath
from os.path import dirname
# Modify the path so that our djoauth2 app is in it.
parent_dir = dirname(abspath(__file__))
sys.path.insert(0, parent_dir)
# Load Django-related settings; necessary for tests to run and for Django
# imports to work.
import local_settings
# Now, imports from Django will work properly without raising errors related to
# missing or badly-configured settings.
from django.core import management
from refactor_migrations import refactor
def generate_migrations(initial):
management.call_command('syncdb', interactive=False)
if initial:
management.call_command('schemamigration', 'djoauth2', initial=True)
else:
management.call_command('schemamigration', 'djoauth2', auto=True)
refactor('./djoauth2/migrations/')
def test_migrations():
management.call_command('syncdb', interactive=False)
management.call_command('migrate', 'djoauth2')
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('--initial-migration',
action='store_true',
default=False,
dest='initial')
parser.add_argument('--test-migrations',
action='store_true',
default=False,
dest='test_migrations')
args = parser.parse_args()
if args.test_migrations:
test_migrations()
else:
generate_migrations(args.initial)
| Refactor migrations after generating to ensure custom user model compatibility. | Refactor migrations after generating to ensure custom user model compatibility.
| Python | mit | vden/djoauth2-ng,Locu/djoauth2,seler/djoauth2,vden/djoauth2-ng,seler/djoauth2,Locu/djoauth2 | ---
+++
@@ -17,6 +17,8 @@
# missing or badly-configured settings.
from django.core import management
+from refactor_migrations import refactor
+
def generate_migrations(initial):
management.call_command('syncdb', interactive=False)
@@ -24,6 +26,8 @@
management.call_command('schemamigration', 'djoauth2', initial=True)
else:
management.call_command('schemamigration', 'djoauth2', auto=True)
+ refactor('./djoauth2/migrations/')
+
def test_migrations():
management.call_command('syncdb', interactive=False) |
93c6e5d39b1779f0eca9b28f5111d7c402ebc1ba | geotagging/views.py | geotagging/views.py | from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.contrib.contenttypes.models import ContentType
from geotagging.models import Point
def add_edit_point(request, content_type_id, object_id,
template=None, form_class=None):
model_class = ContentType.objects.get(id=content_type_id).model_class()
object = model_class.objects.get(id=object_id)
object_content_type = ContentType.objects.get_for_model(object)
geotag = Point.objects.get(content_type__pk=object_content_type.id,
object_id=object.id)
if request.method == "POST":
form = form_class(request.POST, instance=geotag)
if form.is_valid():
new_object = form.save(commit=False)
new_object.object = object
new_object.save()
return HttpResponseRedirect("/admin/%s/%s/%s/"
%(object_content_type.app_label,
object_content_type.model,
object.id))
form = form_class(instance=geotag)
#import ipdb; ipdb.set_trace()
context = RequestContext(request, {
'form': form,
'object' : object,
'object_content_type' : object_content_type,
'geotag' : geotag,
})
return render_to_response(template, context_instance=context )
| from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ObjectDoesNotExist
from geotagging.models import Point
def add_edit_point(request, content_type_id, object_id,
template=None, form_class=None):
model_class = ContentType.objects.get(id=content_type_id).model_class()
object = model_class.objects.get(id=object_id)
object_content_type = ContentType.objects.get_for_model(object)
try:
geotag = Point.objects.get(content_type__pk=object_content_type.id,
object_id=object.id)
except ObjectDoesNotExist:
geotag = None
if request.method == "POST":
form = form_class(request.POST, instance=geotag)
if form.is_valid():
new_object = form.save(commit=False)
new_object.object = object
new_object.save()
return HttpResponseRedirect("/admin/%s/%s/%s/"
%(object_content_type.app_label,
object_content_type.model,
object.id))
form = form_class(instance=geotag)
#import ipdb; ipdb.set_trace()
context = RequestContext(request, {
'form': form,
'object' : object,
'object_content_type' : object_content_type,
'geotag' : geotag,
})
return render_to_response(template, context_instance=context )
| Fix a bug when you try to add a geo tag to an object that does not have already one | Fix a bug when you try to add a geo tag to an object that does not have already one | Python | bsd-3-clause | lincolnloop/django-geotagging,lincolnloop/django-geotagging | ---
+++
@@ -2,6 +2,7 @@
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.contrib.contenttypes.models import ContentType
+from django.core.exceptions import ObjectDoesNotExist
from geotagging.models import Point
@@ -11,8 +12,11 @@
model_class = ContentType.objects.get(id=content_type_id).model_class()
object = model_class.objects.get(id=object_id)
object_content_type = ContentType.objects.get_for_model(object)
- geotag = Point.objects.get(content_type__pk=object_content_type.id,
+ try:
+ geotag = Point.objects.get(content_type__pk=object_content_type.id,
object_id=object.id)
+ except ObjectDoesNotExist:
+ geotag = None
if request.method == "POST":
form = form_class(request.POST, instance=geotag)
if form.is_valid(): |
c485ac39ede0bfbcc6b68fd10ed35ff692124c6d | server/api/auth.py | server/api/auth.py | """All routes regarding authentication."""
from datetime import datetime
from sqlalchemy import or_
from webargs.flaskparser import use_args
from server import user_bp
from server.extensions import db
from server.models import User
from server.responses import bad_request, ok
from server.validation.user import login_fields
from server.helpers.decorators import login_exempt
@user_bp.route('/api/auth/login', methods=['POST'])
@login_exempt
@use_args(login_fields)
def login(args):
"""Endpoint for login.
Check if we can login with the credentials. We try to get the
user by searching email and nickname for the given identifier.
"""
identifier = args['identifier']
password = args['password']
# Check if the user exists
user = db.session.query(User) \
.filter(or_(User.nickname == identifier, User.email == identifier)) \
.one_or_none()
if user is None:
return bad_request('Unknown credentials or wrong password.')
# Validate password
valid_password = user.verify_password(password)
if not valid_password:
return bad_request('Unknown credentials or wrong password.')
if user.has_valid_auth_token:
token = user.current_auth_token
else:
token = user.generate_auth_token()
user.last_login_at = datetime.utcnow()
db.session.add(user)
db.session.commit()
# return ok({"token": token,
# "pod_id": user.pod.id})
# "user_id": user.id})
return ok(f'{user.id}:{token}')
| """All routes regarding authentication."""
from datetime import datetime
from sqlalchemy import or_
from webargs.flaskparser import use_args
from server import user_bp
from server.extensions import db
from server.models import User
from server.responses import bad_request, ok
from server.validation.user import login_fields
from server.helpers.decorators import login_exempt
@user_bp.route('/api/auth/login', methods=['POST'])
@login_exempt
@use_args(login_fields)
def login(args):
"""Endpoint for login.
Check if we can login with the credentials. We try to get the
user by searching email and nickname for the given identifier.
"""
identifier = args['identifier']
password = args['password']
# Check if the user exists
user = db.session.query(User) \
.filter(or_(User.nickname == identifier, User.email == identifier)) \
.one_or_none()
if user is None:
return bad_request('Unknown credentials or wrong password.')
# Validate password
valid_password = user.verify_password(password)
if not valid_password:
return bad_request('Unknown credentials or wrong password.')
if user.has_valid_auth_token:
token = user.current_auth_token
else:
token = user.generate_auth_token()
user.last_login_at = datetime.utcnow()
db.session.add(user)
db.session.commit()
return ok({"token": token,
"user_id": user.id})
| Send user id and token | Send user id and token
| Python | mit | Nukesor/spacesurvival,Nukesor/spacesurvival,Nukesor/spacesurvival,Nukesor/spacesurvival | ---
+++
@@ -45,7 +45,5 @@
db.session.add(user)
db.session.commit()
-# return ok({"token": token,
-# "pod_id": user.pod.id})
-# "user_id": user.id})
- return ok(f'{user.id}:{token}')
+ return ok({"token": token,
+ "user_id": user.id}) |
84816dda37d071e521f65449ee59c992b5e302bc | megaprojects/blog/models.py | megaprojects/blog/models.py | from django.core.urlresolvers import reverse
from django.db import models
from django.utils import timezone
from core.models import AuthorModel, ImageModel
from .managers import PostManager, ImageManager
import util
STATUS_CHOICES = [('d', 'Draft'), ('p', 'Published'), ('w', 'Withdrawn')]
class Post(AuthorModel):
pubdate = models.DateTimeField('publication date', default=timezone.now())
status = models.CharField(max_length=1, choices=STATUS_CHOICES)
body = models.TextField()
drupal_id = models.IntegerField('drupal NID', unique=True, blank=True,
null=True, help_text='Node ID from the previous Drupal website (imported).')
objects = PostManager()
class Meta:
ordering = ['-pubdate']
class Image(ImageModel):
image = models.ImageField(upload_to=util.get_image_path)
post = models.ForeignKey(Post)
objects = ImageManager()
class Meta:
ordering = ['-post__pubdate', '-created']
| from django.core.urlresolvers import reverse
from django.db import models
from django.utils import timezone
from core.models import AuthorModel, ImageModel
from .managers import PostManager, ImageManager
import util
STATUS_CHOICES = [('d', 'Draft'), ('p', 'Published'), ('w', 'Withdrawn')]
class Post(AuthorModel):
pubdate = models.DateTimeField('publication date', default=timezone.now())
status = models.CharField(max_length=1, choices=STATUS_CHOICES)
body = models.TextField()
drupal_id = models.IntegerField('drupal NID', unique=True, blank=True,
null=True, help_text='Node ID from the previous Drupal website (imported).')
objects = PostManager()
@property
def thumbnail(self):
if self.image_set.published():
return self.image_set.published()[:1].get()
class Meta:
ordering = ['-pubdate']
class Image(ImageModel):
image = models.ImageField(upload_to=util.get_image_path)
post = models.ForeignKey(Post)
objects = ImageManager()
class Meta:
ordering = ['-post__pubdate', '-created']
| Add property for Post thumbnail | Add property for Post thumbnail
| Python | apache-2.0 | megaprojectske/megaprojects.co.ke,megaprojectske/megaprojects.co.ke,megaprojectske/megaprojects.co.ke | ---
+++
@@ -21,6 +21,11 @@
objects = PostManager()
+ @property
+ def thumbnail(self):
+ if self.image_set.published():
+ return self.image_set.published()[:1].get()
+
class Meta:
ordering = ['-pubdate']
|
3350460fdb4f0360b9251f30340ed9d1b6f97839 | shibboleth/urls.py | shibboleth/urls.py | from distutils.version import StrictVersion
import django
if StrictVersion(django.get_version()) >= StrictVersion('1.6'):
from django.conf.urls import patterns, url, include
else:
from django.conf.urls.defaults import *
from views import ShibbolethView, ShibbolethLogoutView, ShibbolethLoginView
urlpatterns = patterns('',
url(r'^login/$', ShibbolethLoginView.as_view(), name='login'),
url(r'^logout/$', ShibbolethLogoutView.as_view(), name='logout'),
url(r'^$', ShibbolethView.as_view(), name='info'),
) | from distutils.version import StrictVersion
import django
if StrictVersion(django.get_version()) < StrictVersion('1.4'):
from django.conf.urls.defaults import *
else:
from django.conf.urls import patterns, url
from views import ShibbolethView, ShibbolethLogoutView, ShibbolethLoginView
urlpatterns = patterns('',
url(r'^login/$', ShibbolethLoginView.as_view(), name='login'),
url(r'^logout/$', ShibbolethLogoutView.as_view(), name='logout'),
url(r'^$', ShibbolethView.as_view(), name='info'),
) | Change version check. Remove include import. | Change version check. Remove include import.
| Python | mit | trevoriancox/django-shibboleth-remoteuser,CloudComputingCourse/django-shibboleth-remoteuser,KonstantinSchubert/django-shibboleth-adapter,ties/django-shibboleth-remoteuser,denisvlr/django-shibboleth-remoteuser,ties/django-shibboleth-remoteuser,uchicago-library/django-shibboleth-remoteuser,trevoriancox/django-shibboleth-remoteuser,Brown-University-Library/django-shibboleth-remoteuser,kennydude/django-shibboleth-remoteuser,UCL-RITS/django-shibboleth-remoteuser,UCL-RITS/django-shibboleth-remoteuser,denisvlr/django-shibboleth-remoteuser,KonstantinSchubert/django-shibboleth-adapter,kennydude/django-shibboleth-remoteuser,uchicago-library/django-shibboleth-remoteuser,abhishekshivanna/django-shibboleth-remoteuser,CloudComputingCourse/django-shibboleth-remoteuser,Brown-University-Library/django-shibboleth-remoteuser,abhishekshivanna/django-shibboleth-remoteuser | ---
+++
@@ -1,13 +1,13 @@
from distutils.version import StrictVersion
import django
-if StrictVersion(django.get_version()) >= StrictVersion('1.6'):
- from django.conf.urls import patterns, url, include
+if StrictVersion(django.get_version()) < StrictVersion('1.4'):
+ from django.conf.urls.defaults import *
else:
- from django.conf.urls.defaults import *
+ from django.conf.urls import patterns, url
from views import ShibbolethView, ShibbolethLogoutView, ShibbolethLoginView
-urlpatterns = patterns('',
+urlpatterns = patterns('',
url(r'^login/$', ShibbolethLoginView.as_view(), name='login'),
url(r'^logout/$', ShibbolethLogoutView.as_view(), name='logout'),
url(r'^$', ShibbolethView.as_view(), name='info'), |
baf3ef0ddcb7b59973750443f4c0a3732dd0f12a | spacy/cli/__init__.py | spacy/cli/__init__.py | from .download import download
from .info import info
from .link import link
from .package import package
from .train import train, train_config
from .model import model
from .convert import convert
| from .download import download
from .info import info
from .link import link
from .package import package
from .train import train
from .model import model
from .convert import convert
| Remove import of removed train_config script | Remove import of removed train_config script
| Python | mit | spacy-io/spaCy,explosion/spaCy,aikramer2/spaCy,explosion/spaCy,recognai/spaCy,explosion/spaCy,explosion/spaCy,recognai/spaCy,aikramer2/spaCy,aikramer2/spaCy,honnibal/spaCy,spacy-io/spaCy,recognai/spaCy,honnibal/spaCy,explosion/spaCy,honnibal/spaCy,explosion/spaCy,aikramer2/spaCy,recognai/spaCy,aikramer2/spaCy,recognai/spaCy,spacy-io/spaCy,recognai/spaCy,spacy-io/spaCy,aikramer2/spaCy,spacy-io/spaCy,spacy-io/spaCy,honnibal/spaCy | ---
+++
@@ -2,6 +2,6 @@
from .info import info
from .link import link
from .package import package
-from .train import train, train_config
+from .train import train
from .model import model
from .convert import convert |
cf77d5b49416c33b05955a66450adda289a342d0 | main.py | main.py | from createCollection import createCollection
from ObjectFactories.ItemFactory import ItemFactory
from DataObjects.Collection import Collection
import datetime, json, os.path, argparse
CONST_COLLECTIONS_NAME = 'collections'
def generateArgumentsFromParser():
parser = parser = argparse.ArgumentParser(description="Runs PyTriSearch googling utility.")
return parser.parse_args()
def generateItemsCollection(collectionName, username):
items = []
now = datetime.datetime.now()
for i in range(0,10):
item = ItemFactory.factory('item', [i, 'item' + str(i), now, now])
print(item.name)
items.append(item)
return Collection(collectionName, username, items)
def main():
arguments = generateArgumentsFromParser()
username = 'agarner'
collectionName = 'Items'
createCollection(username,collectionName)
itemCollection = generateItemsCollection(collectionName, username)
collectionsFilePath = CONST_COLLECTIONS_NAME+'/'+username+'_'+CONST_COLLECTIONS_NAME+'/'+username+'_'+collectionName+'_'+'collection.dat'
if os.path.isfile(collectionsFilePath):
collectionFile = open(collectionsFilePath, 'w')
collectionFile.write(itemCollection.toJSON())
collectionFile.close()
if __name__ == '__main__':
main()
| from createCollection import createCollection
from ObjectFactories.ItemFactory import ItemFactory
from DataObjects.Collection import Collection
import datetime, json, os.path, argparse
CONST_COLLECTIONS_NAME = 'collections'
def generateArgumentsFromParser():
parser = parser = argparse.ArgumentParser(description="Runs the PyInventory utility for creating a collection of items.")
parser.add_argument('--user', dest='username', required=True)
parser.add_argument('--type', dest='collectionName', required=True)
return parser.parse_args()
def generateItemsCollection(collectionName, username):
items = []
now = datetime.datetime.now()
for i in range(0,10):
item = ItemFactory.factory('item', [i, 'item' + str(i), now, now])
print(item.name)
items.append(item)
return Collection(collectionName, username, items)
def main():
arguments = generateArgumentsFromParser()
createCollection(arguments.username, arguments.collectionName)
itemCollection = generateItemsCollection(arguments.collectionName, arguments.username)
collectionsFilePath = CONST_COLLECTIONS_NAME+'/'+arguments.username+'_'+CONST_COLLECTIONS_NAME+'/'+arguments.username+'_'+arguments.collectionName+'_'+'collection.dat'
if os.path.isfile(collectionsFilePath):
collectionFile = open(collectionsFilePath, 'w')
collectionFile.write(itemCollection.toJSON())
collectionFile.close()
if __name__ == '__main__':
main()
| Implement dynamic username and collection type | Implement dynamic username and collection type
| Python | apache-2.0 | AmosGarner/PyInventory | ---
+++
@@ -6,7 +6,9 @@
CONST_COLLECTIONS_NAME = 'collections'
def generateArgumentsFromParser():
- parser = parser = argparse.ArgumentParser(description="Runs PyTriSearch googling utility.")
+ parser = parser = argparse.ArgumentParser(description="Runs the PyInventory utility for creating a collection of items.")
+ parser.add_argument('--user', dest='username', required=True)
+ parser.add_argument('--type', dest='collectionName', required=True)
return parser.parse_args()
def generateItemsCollection(collectionName, username):
@@ -21,19 +23,15 @@
def main():
arguments = generateArgumentsFromParser()
- username = 'agarner'
- collectionName = 'Items'
+ createCollection(arguments.username, arguments.collectionName)
+ itemCollection = generateItemsCollection(arguments.collectionName, arguments.username)
- createCollection(username,collectionName)
- itemCollection = generateItemsCollection(collectionName, username)
+ collectionsFilePath = CONST_COLLECTIONS_NAME+'/'+arguments.username+'_'+CONST_COLLECTIONS_NAME+'/'+arguments.username+'_'+arguments.collectionName+'_'+'collection.dat'
- collectionsFilePath = CONST_COLLECTIONS_NAME+'/'+username+'_'+CONST_COLLECTIONS_NAME+'/'+username+'_'+collectionName+'_'+'collection.dat'
if os.path.isfile(collectionsFilePath):
collectionFile = open(collectionsFilePath, 'w')
collectionFile.write(itemCollection.toJSON())
collectionFile.close()
-
-
if __name__ == '__main__':
main() |
6eaa58149bae1c5462fc79255656321145a4b45f | hashbrown/models.py | hashbrown/models.py | from django.db import models
from .compat import User
class Switch(models.Model):
label = models.CharField(max_length=200)
description = models.TextField(
help_text='Short description of what this switch is doing', blank=True)
globally_active = models.BooleanField(default=False)
users = models.ManyToManyField(
User, null=True, related_name='available_switches')
def __unicode__(self):
return self.label
| from django.db import models
from .compat import User
class Switch(models.Model):
label = models.CharField(max_length=200)
description = models.TextField(
help_text='Short description of what this switch is doing', blank=True)
globally_active = models.BooleanField(default=False)
users = models.ManyToManyField(
User, null=True, related_name='available_switches', blank=True)
def __unicode__(self):
return self.label
| Allow no selected users when editing a switch. | Allow no selected users when editing a switch.
| Python | bsd-2-clause | potatolondon/django-hashbrown | ---
+++
@@ -11,7 +11,7 @@
globally_active = models.BooleanField(default=False)
users = models.ManyToManyField(
- User, null=True, related_name='available_switches')
+ User, null=True, related_name='available_switches', blank=True)
def __unicode__(self):
return self.label |
f4eac1d029b79c5dc5a6a07e696f9ea4c341342f | gittip/models/user.py | gittip/models/user.py | import uuid
from gittip.orm import db
from gittip.models.participant import Participant
class User(Participant):
"""Represent a website user.
Every current website user is also a participant, though if the user is
anonymous then the methods from Participant will fail with NoParticipantId.
"""
@classmethod
def from_session_token(cls, token):
user = User.query.filter_by(session_token=token).first()
if user and not user.is_suspicious:
user = user
else:
user = User()
return user
@classmethod
def from_id(cls, user_id):
user = User.query.filter_by(id=user_id).first()
if user and not user.is_suspicious:
user.session_token = uuid.uuid4().hex
db.session.add(user)
db.session.commit()
else:
user = User()
return user
@property
def ADMIN(self):
return self.id is not None and self.is_admin
@property
def ANON(self):
return self.id is None
def __unicode__(self):
return '<User: %s>' % getattr(self, 'id', 'Anonymous')
| import uuid
from gittip.orm import db
from gittip.models.participant import Participant
class User(Participant):
"""Represent a website user.
Every current website user is also a participant, though if the user is
anonymous then the methods from gittip.Participant will fail with
NoParticipantId. The methods
"""
@classmethod
def from_session_token(cls, token):
user = User.query.filter_by(session_token=token).first()
if user and not user.is_suspicious:
user = user
else:
user = User()
return user
@classmethod
def from_id(cls, user_id):
user = User.query.filter_by(id=user_id).first()
if user and not user.is_suspicious:
user.session_token = uuid.uuid4().hex
db.session.add(user)
db.session.commit()
else:
user = User()
return user
@property
def ADMIN(self):
return self.id is not None and self.is_admin
@property
def ANON(self):
return self.id is None
def __unicode__(self):
return '<User: %s>' % getattr(self, 'id', 'Anonymous')
| Clarify comment re: Participant class | Clarify comment re: Participant class
This refers to the old participant class, which is going away.
| Python | cc0-1.0 | bountysource/www.gittip.com,mccolgst/www.gittip.com,eXcomm/gratipay.com,gratipay/gratipay.com,eXcomm/gratipay.com,studio666/gratipay.com,bountysource/www.gittip.com,mccolgst/www.gittip.com,mccolgst/www.gittip.com,bountysource/www.gittip.com,mccolgst/www.gittip.com,gratipay/gratipay.com,eXcomm/gratipay.com,gratipay/gratipay.com,bountysource/www.gittip.com,studio666/gratipay.com,eXcomm/gratipay.com,studio666/gratipay.com,gratipay/gratipay.com,studio666/gratipay.com | ---
+++
@@ -8,7 +8,9 @@
"""Represent a website user.
Every current website user is also a participant, though if the user is
- anonymous then the methods from Participant will fail with NoParticipantId.
+ anonymous then the methods from gittip.Participant will fail with
+ NoParticipantId. The methods
+
"""
@classmethod |
ae9b50b72d69a06c47cff0bc78d58346399b65d3 | hdbscan/__init__.py | hdbscan/__init__.py | from .hdbscan_ import HDBSCAN, hdbscan
from .robust_single_linkage_ import RobustSingleLinkage, robust_single_linkage
| from .hdbscan_ import HDBSCAN, hdbscan
from .robust_single_linkage_ import RobustSingleLinkage, robust_single_linkage
from .prediction import approximate_predict, membership_vector, all_points_membership_vectors
| Add prediction functions to imports in init | Add prediction functions to imports in init
| Python | bsd-3-clause | scikit-learn-contrib/hdbscan,lmcinnes/hdbscan,lmcinnes/hdbscan,scikit-learn-contrib/hdbscan | ---
+++
@@ -1,2 +1,3 @@
from .hdbscan_ import HDBSCAN, hdbscan
from .robust_single_linkage_ import RobustSingleLinkage, robust_single_linkage
+from .prediction import approximate_predict, membership_vector, all_points_membership_vectors |
f93888778e1710136d3ba5cc5365711a1c658ff6 | data/pipeline/run/30_exoplanets/exoread.py | data/pipeline/run/30_exoplanets/exoread.py | #!/usr/bin/env python
import json
import sys
import csv
from pymongo import MongoClient
if len(sys.argv) < 2:
print 'usage: python read.py filepath'
sys.exit(1)
reader = csv.DictReader(open(sys.argv[1]), delimiter=',', quotechar='"')
conn = MongoClient()
db = conn.asterank
coll = db.exo
coll.drop()
coll.ensure_index('kepoi_name', unique=True) # kepid isn't actually unique...
# TODO ensure koi_sma descending index
c = 0
for row in reader:
#row['pl_fulldes'] = '%s%s' % (row['pl_hostname'], row['pl_letter'])
for key, val in row.iteritems():
try:
val = float(val)
except ValueError:
pass
row[key] = val
coll.insert(row, continue_on_error=True)
c += 1
# put in db
print 'Added', c, 'candidate exoplanets'
print 'Done.'
| #!/usr/bin/env python
import json
import sys
import csv
from pymongo import MongoClient
if len(sys.argv) < 2:
print 'usage: python read.py filepath'
sys.exit(1)
reader = csv.DictReader(open(sys.argv[1]), delimiter=',', quotechar='"')
conn = MongoClient()
db = conn.asterank
coll = db.exo
coll.drop()
coll.ensure_index('kepoi_name', unique=True) # kepid isn't actually unique...
c = 0
for row in reader:
for key, val in row.iteritems():
try:
val = float(val)
except ValueError:
pass
row[key] = val
coll.insert(row, continue_on_error=True)
c += 1
print 'Added', c, 'candidate exoplanets'
print 'Done.'
| Clean things up a little | Clean things up a little | Python | mit | typpo/asterank,typpo/asterank,typpo/asterank,typpo/asterank | ---
+++
@@ -15,11 +15,9 @@
coll = db.exo
coll.drop()
coll.ensure_index('kepoi_name', unique=True) # kepid isn't actually unique...
-# TODO ensure koi_sma descending index
c = 0
for row in reader:
- #row['pl_fulldes'] = '%s%s' % (row['pl_hostname'], row['pl_letter'])
for key, val in row.iteritems():
try:
val = float(val)
@@ -29,8 +27,5 @@
coll.insert(row, continue_on_error=True)
c += 1
-
-# put in db
-
print 'Added', c, 'candidate exoplanets'
print 'Done.' |
c600d1e1ad3cef69f6028afd64e14a04c747e1c6 | tests/test_install.py | tests/test_install.py | import sys
import os
from subprocess import check_call
from pew._utils import invoke_pew as invoke
from utils import skip_windows, connection_required
import pytest
def skip_marker(f):
return skip_windows(reason='Pythonz unavailable in Windows')(
pytest.mark.skipif(
sys.platform == 'cygwin',
reason='Pythonz unavailable in Cygwin')(
pytest.mark.skipif(os.environ.get('NIX'),
reason='Pythonz unavailable in Nix')(
connection_required(f))))
@skip_marker
def test_install():
py_version = ['2.6.1', '--type', 'pypy']
assert invoke('install', *py_version).returncode == 0
py = invoke('locate_python', *py_version).out
check_call([py, '-V'])
@skip_marker
def test_uninstall():
py_version = ['2.6.1', '--type', 'pypy']
invoke('install', *py_version)
assert invoke('uninstall', *py_version).returncode == 0
assert invoke('locate_python', *py_version).returncode != 0
| import sys
import os
from subprocess import check_call
from pew._utils import invoke_pew as invoke
from utils import skip_windows, connection_required
import pytest
def skip_marker(f):
return skip_windows(reason='Pythonz unavailable in Windows')(
pytest.mark.skipif(
sys.platform == 'cygwin',
reason='Pythonz unavailable in Cygwin')(
pytest.mark.skipif(os.environ.get('NIX'),
reason='Pythonz unavailable in Nix')(
connection_required(f))))
@skip_marker
def test_install():
py_version = ['3.5.1']
assert invoke('install', *py_version).returncode == 0
py = invoke('locate_python', *py_version).out
check_call([py, '-V'])
@skip_marker
def test_uninstall():
py_version = ['3.5.1']
invoke('install', *py_version)
assert invoke('uninstall', *py_version).returncode == 0
assert invoke('locate_python', *py_version).returncode != 0
| Replace version of Python to install in test_{un,}install test | Replace version of Python to install in test_{un,}install test
PyPy 2.6.1's download link is not working anymore.
| Python | mit | berdario/pew,berdario/pew | ---
+++
@@ -16,14 +16,14 @@
@skip_marker
def test_install():
- py_version = ['2.6.1', '--type', 'pypy']
+ py_version = ['3.5.1']
assert invoke('install', *py_version).returncode == 0
py = invoke('locate_python', *py_version).out
check_call([py, '-V'])
@skip_marker
def test_uninstall():
- py_version = ['2.6.1', '--type', 'pypy']
+ py_version = ['3.5.1']
invoke('install', *py_version)
assert invoke('uninstall', *py_version).returncode == 0
assert invoke('locate_python', *py_version).returncode != 0 |
5038c49c1597ef6182c429dd63b34045a69de945 | tests/test_version.py | tests/test_version.py | # coding=utf-8
import os, sys
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
import re
version_regex = re.compile(r'^\d+\.\d+')
import lasio.las_version
def test_verify_default_vcs_tool():
result = lasio.las_version._get_vcs_version()
if 'GITHUB_WORKFLOW' in os.environ:
assert result == ""
else:
assert version_regex.match(result)
def test_non_existent_vcs_tool():
version_cmd = ["gt", "describe", "--tags", "--match", "v*"]
result = lasio.las_version._get_vcs_version(version_cmd)
assert result == ""
def test_explicit_existent_vcs_tool():
version_cmd = ["git", "describe", "--tags", "--match", "v*"]
result = lasio.las_version._get_vcs_version(version_cmd)
if 'GITHUB_WORKFLOW' in os.environ:
assert result == ""
else:
assert version_regex.match(result)
| # coding=utf-8
import os, sys
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
import re
version_regex = re.compile(r'^\d+\.\d+')
import lasio.las_version
def test_non_existent_vcs_tool():
version_cmd = ["gt", "describe", "--tags", "--match", "v*"]
result = lasio.las_version._get_vcs_version(version_cmd)
assert result == ""
# ------------------------------------------------------------------------------
# Most of the time GITHUB_WORKFLOW will install the lasio repo with the
# '--no-tag' option. In those cases tags won't be available and the resulting
# version is expected to be an empty string.
#
# Occationally a release version will be pushed and in those cases
# GITHUB_WORKFLOW will specifically make the refs/tag/<pushed-tag> available
# and the resulting version is expected to be a regular version string.
#
# So we check for both of those cases when testing in GITHUB_WORKFLOW
# environment.
# ------------------------------------------------------------------------------
def test_verify_default_vcs_tool():
result = lasio.las_version._get_vcs_version()
if 'GITHUB_WORKFLOW' in os.environ:
assert result == "" or version_regex.match(result)
else:
assert version_regex.match(result)
def test_explicit_existent_vcs_tool():
version_cmd = ["git", "describe", "--tags", "--match", "v*"]
result = lasio.las_version._get_vcs_version(version_cmd)
if 'GITHUB_WORKFLOW' in os.environ:
assert result == "" or version_regex.match(result)
else:
assert version_regex.match(result)
| Add check for pushed tag version to version tests | Add check for pushed tag version to version tests
When getting Lasio's version in GITHUB_WORK flow environment, test for
both a '--no-tag' empty string version and a pushed release tag string
version.
| Python | mit | kinverarity1/lasio,kwinkunks/lasio,kinverarity1/las-reader | ---
+++
@@ -8,24 +8,35 @@
import lasio.las_version
-
-def test_verify_default_vcs_tool():
- result = lasio.las_version._get_vcs_version()
- if 'GITHUB_WORKFLOW' in os.environ:
- assert result == ""
- else:
- assert version_regex.match(result)
-
def test_non_existent_vcs_tool():
version_cmd = ["gt", "describe", "--tags", "--match", "v*"]
result = lasio.las_version._get_vcs_version(version_cmd)
assert result == ""
+
+# ------------------------------------------------------------------------------
+# Most of the time GITHUB_WORKFLOW will install the lasio repo with the
+# '--no-tag' option. In those cases tags won't be available and the resulting
+# version is expected to be an empty string.
+#
+# Occationally a release version will be pushed and in those cases
+# GITHUB_WORKFLOW will specifically make the refs/tag/<pushed-tag> available
+# and the resulting version is expected to be a regular version string.
+#
+# So we check for both of those cases when testing in GITHUB_WORKFLOW
+# environment.
+# ------------------------------------------------------------------------------
+def test_verify_default_vcs_tool():
+ result = lasio.las_version._get_vcs_version()
+ if 'GITHUB_WORKFLOW' in os.environ:
+ assert result == "" or version_regex.match(result)
+ else:
+ assert version_regex.match(result)
def test_explicit_existent_vcs_tool():
version_cmd = ["git", "describe", "--tags", "--match", "v*"]
result = lasio.las_version._get_vcs_version(version_cmd)
if 'GITHUB_WORKFLOW' in os.environ:
- assert result == ""
+ assert result == "" or version_regex.match(result)
else:
- assert version_regex.match(result)
+ assert version_regex.match(result) |
e5351bba6cdf7b76da895afda80c18309c7f90eb | tests/test_config.py | tests/test_config.py | from yacron import config
def test_mergedicts():
assert dict(config.mergedicts({"a": 1}, {"b": 2})) == {"a": 1, "b": 2}
def test_mergedicts_nested():
assert dict(config.mergedicts(
{"a": {'x': 1, 'y': 2, 'z': 3}},
{'a': {'y': 10}, "b": 2})) == \
{"a": {'x': 1, 'y': 10, 'z': 3}, "b": 2}
def test_mergedicts_right_none():
assert dict(config.mergedicts(
{"a": {'x': 1}},
{"a": None, "b": 2})) == \
{"a": {'x': 1}, "b": 2}
def test_mergedicts_lists():
assert dict(config.mergedicts(
{"env": [{'key': 'FOO'}]},
{"env": [{'key': 'BAR'}]})) \
== \
{"env": [{'key': 'FOO'}, {'key': 'BAR'}]}
| from yacron import config
def test_mergedicts():
assert dict(config.mergedicts({"a": 1}, {"b": 2})) == {"a": 1, "b": 2}
def test_mergedicts_nested():
assert (dict(config.mergedicts(
{"a": {'x': 1, 'y': 2, 'z': 3}},
{'a': {'y': 10}, "b": 2}
)) == {"a": {'x': 1, 'y': 10, 'z': 3}, "b": 2})
def test_mergedicts_right_none():
assert (dict(config.mergedicts(
{"a": {'x': 1}},
{"a": None, "b": 2}
)) == {"a": {'x': 1}, "b": 2})
def test_mergedicts_lists():
assert (dict(config.mergedicts(
{"env": [{'key': 'FOO'}]},
{"env": [{'key': 'BAR'}]}
)) == {"env": [{'key': 'FOO'}, {'key': 'BAR'}]})
| Replace tabs with spaces, use parens to get rid of backslashes | Replace tabs with spaces, use parens to get rid of backslashes
| Python | mit | gjcarneiro/yacron | ---
+++
@@ -2,26 +2,25 @@
def test_mergedicts():
- assert dict(config.mergedicts({"a": 1}, {"b": 2})) == {"a": 1, "b": 2}
+ assert dict(config.mergedicts({"a": 1}, {"b": 2})) == {"a": 1, "b": 2}
def test_mergedicts_nested():
- assert dict(config.mergedicts(
- {"a": {'x': 1, 'y': 2, 'z': 3}},
- {'a': {'y': 10}, "b": 2})) == \
- {"a": {'x': 1, 'y': 10, 'z': 3}, "b": 2}
+ assert (dict(config.mergedicts(
+ {"a": {'x': 1, 'y': 2, 'z': 3}},
+ {'a': {'y': 10}, "b": 2}
+ )) == {"a": {'x': 1, 'y': 10, 'z': 3}, "b": 2})
def test_mergedicts_right_none():
- assert dict(config.mergedicts(
- {"a": {'x': 1}},
- {"a": None, "b": 2})) == \
- {"a": {'x': 1}, "b": 2}
+ assert (dict(config.mergedicts(
+ {"a": {'x': 1}},
+ {"a": None, "b": 2}
+ )) == {"a": {'x': 1}, "b": 2})
def test_mergedicts_lists():
- assert dict(config.mergedicts(
- {"env": [{'key': 'FOO'}]},
- {"env": [{'key': 'BAR'}]})) \
- == \
- {"env": [{'key': 'FOO'}, {'key': 'BAR'}]}
+ assert (dict(config.mergedicts(
+ {"env": [{'key': 'FOO'}]},
+ {"env": [{'key': 'BAR'}]}
+ )) == {"env": [{'key': 'FOO'}, {'key': 'BAR'}]}) |
eca2199c90fa169acef8672458df0df3e6d65fad | tests/test_device.py | tests/test_device.py | def test_test():
assert True
| import pytest
from xbee_helper import device, exceptions, ZigBee
def test_raise_if_error_no_status():
"""
Should return None without raising if there's no "status" key in frame.
"""
assert device.raise_if_error({}) is None
def test_raise_if_error_zero():
"""
Should return None without raising if "status" is set to b"\x00".
"""
assert device.raise_if_error(dict(status=b"\x00")) is None
def test_raise_if_error_unknown():
"""
Should raise ZigBeeUnknownError if "status" is set to b"\x01".
"""
with pytest.raises(exceptions.ZigBeeUnknownError):
device.raise_if_error(dict(status=b"\x01"))
def test_raise_if_error_invalid_cmd():
"""
Should raise ZigBeeInvalidCommand if "status" is set to b"\x02".
"""
with pytest.raises(exceptions.ZigBeeInvalidCommand):
device.raise_if_error(dict(status=b"\x02"))
def test_raise_if_error_invalid_param():
"""
Should raise ZigBeeInvalidParameter if "status" is set to b"\x03".
"""
with pytest.raises(exceptions.ZigBeeInvalidParameter):
device.raise_if_error(dict(status=b"\x03"))
def test_raise_if_error_tx_failure():
"""
Should raise ZigBeeTxFailure if "status" is set to b"\x04".
"""
with pytest.raises(exceptions.ZigBeeTxFailure):
device.raise_if_error(dict(status=b"\x04"))
def test_raise_if_error_unknown_status():
"""
Should raise ZigBeeUnknownStatus if "status" is unrecognised.
"""
with pytest.raises(exceptions.ZigBeeUnknownStatus):
device.raise_if_error(dict(status=b"\xFF"))
| Add tests for raise_if_error function | Add tests for raise_if_error function
| Python | mit | flyte/xbee-helper | ---
+++
@@ -1,2 +1,57 @@
-def test_test():
- assert True
+import pytest
+
+from xbee_helper import device, exceptions, ZigBee
+
+
+def test_raise_if_error_no_status():
+ """
+ Should return None without raising if there's no "status" key in frame.
+ """
+ assert device.raise_if_error({}) is None
+
+
+def test_raise_if_error_zero():
+ """
+ Should return None without raising if "status" is set to b"\x00".
+ """
+ assert device.raise_if_error(dict(status=b"\x00")) is None
+
+
+def test_raise_if_error_unknown():
+ """
+ Should raise ZigBeeUnknownError if "status" is set to b"\x01".
+ """
+ with pytest.raises(exceptions.ZigBeeUnknownError):
+ device.raise_if_error(dict(status=b"\x01"))
+
+
+def test_raise_if_error_invalid_cmd():
+ """
+ Should raise ZigBeeInvalidCommand if "status" is set to b"\x02".
+ """
+ with pytest.raises(exceptions.ZigBeeInvalidCommand):
+ device.raise_if_error(dict(status=b"\x02"))
+
+
+def test_raise_if_error_invalid_param():
+ """
+ Should raise ZigBeeInvalidParameter if "status" is set to b"\x03".
+ """
+ with pytest.raises(exceptions.ZigBeeInvalidParameter):
+ device.raise_if_error(dict(status=b"\x03"))
+
+
+def test_raise_if_error_tx_failure():
+ """
+ Should raise ZigBeeTxFailure if "status" is set to b"\x04".
+ """
+ with pytest.raises(exceptions.ZigBeeTxFailure):
+ device.raise_if_error(dict(status=b"\x04"))
+
+
+def test_raise_if_error_unknown_status():
+ """
+ Should raise ZigBeeUnknownStatus if "status" is unrecognised.
+ """
+ with pytest.raises(exceptions.ZigBeeUnknownStatus):
+ device.raise_if_error(dict(status=b"\xFF")) |
fd909f383ab8a930c8a858144e0566075821f019 | tests/test_search.py | tests/test_search.py | from sharepa.search import ShareSearch
from sharepa.search import basic_search
import elasticsearch_dsl
import types
def test_basic_search():
results = basic_search.execute()
assert results.hits
assert results.aggregations
def test_no_title_search():
my_search = ShareSearch()
my_search = my_search.query(
'query_string',
query='NOT title:*',
analyze_wildcard=True
)
results = my_search.execute()
for result in results:
assert not result.get('title')
def test_execute():
my_search = ShareSearch()
result = my_search.execute()
assert isinstance(result, elasticsearch_dsl.result.Response)
def test_count():
count = basic_search.count()
assert isinstance(count, int)
def test_query():
assert isinstance(basic_search._query(basic_search.to_dict()), dict)
def test_scan():
my_search = ShareSearch()
my_search = my_search.query(
'query_string',
query='science AND cows AND milk'
)
scan = my_search.scan()
scan_list = [item for item in scan]
assert isinstance(scan, types.GeneratorType)
assert scan_list
| from sharepa.search import ShareSearch
from sharepa.search import basic_search
import vcr
import types
import elasticsearch_dsl
def test_basic_search():
results = basic_search.execute()
assert results.hits
assert results.aggregations
def test_no_title_search():
my_search = ShareSearch()
my_search = my_search.query(
'query_string',
query='NOT title:*',
analyze_wildcard=True
)
results = my_search.execute()
for result in results:
assert not result.get('title')
@vcr.use_cassette('tests/vcr/simple_execute.yaml')
def test_execute():
my_search = ShareSearch()
result = my_search.execute()
first_result = result.hits[0].to_dict()
assert len(result.hits) == 10
assert result.to_dict().keys() == ['hits', '_shards', 'took', 'timed_out', 'time']
assert isinstance(result, elasticsearch_dsl.result.Response)
assert first_result['title'] == 'Avian community structure and incidence of human West Nile infection'
def test_count():
count = basic_search.count()
assert isinstance(count, int)
def test_query():
assert isinstance(basic_search._query(basic_search.to_dict()), dict)
@vcr.use_cassette('tests/vcr/scan.yaml')
def test_scan():
my_search = ShareSearch()
my_search = my_search.query(
'query_string',
query='squared AND circle'
)
scan = my_search.scan()
scan_list = [item for item in scan]
assert len(scan_list) == 3
assert scan_list[0].title == '<p>The ellipsoids in the figure are isolines of constant density of bivariate Gaussian distributions.</p>'
| Add vcr to scan test | Add vcr to scan test
| Python | mit | fabianvf/sharepa,erinspace/sharepa,samanehsan/sharepa,CenterForOpenScience/sharepa | ---
+++
@@ -1,8 +1,9 @@
from sharepa.search import ShareSearch
from sharepa.search import basic_search
+import vcr
+import types
import elasticsearch_dsl
-import types
def test_basic_search():
@@ -24,11 +25,16 @@
assert not result.get('title')
+@vcr.use_cassette('tests/vcr/simple_execute.yaml')
def test_execute():
my_search = ShareSearch()
+ result = my_search.execute()
+ first_result = result.hits[0].to_dict()
- result = my_search.execute()
+ assert len(result.hits) == 10
+ assert result.to_dict().keys() == ['hits', '_shards', 'took', 'timed_out', 'time']
assert isinstance(result, elasticsearch_dsl.result.Response)
+ assert first_result['title'] == 'Avian community structure and incidence of human West Nile infection'
def test_count():
@@ -40,15 +46,14 @@
assert isinstance(basic_search._query(basic_search.to_dict()), dict)
+@vcr.use_cassette('tests/vcr/scan.yaml')
def test_scan():
my_search = ShareSearch()
my_search = my_search.query(
'query_string',
- query='science AND cows AND milk'
+ query='squared AND circle'
)
-
scan = my_search.scan()
scan_list = [item for item in scan]
-
- assert isinstance(scan, types.GeneratorType)
- assert scan_list
+ assert len(scan_list) == 3
+ assert scan_list[0].title == '<p>The ellipsoids in the figure are isolines of constant density of bivariate Gaussian distributions.</p>' |
1efb717cec51ce5d2aa67d668528cb0fcdde94e8 | scuevals_api/auth/decorators.py | scuevals_api/auth/decorators.py | from functools import wraps
from flask_jwt_extended import get_jwt_identity, jwt_required, current_user
from werkzeug.exceptions import Unauthorized
from scuevals_api.models import User
def optional_arg_decorator(fn):
def wrapped_decorator(*args):
if len(args) == 1 and callable(args[0]):
return fn(args[0])
else:
def real_decorator(decoratee):
return fn(decoratee, *args)
return real_decorator
return wrapped_decorator
@optional_arg_decorator
def auth_required(fn, permission=None):
"""
Decorating a view with this ensures that the requester provided a JWT
and that the requester has permission to access the view.
"""
@wraps(fn)
def wrapper(*args, **kwargs):
jwt_required(lambda: None)()
identity = get_jwt_identity()
if identity['type'] == User.Normal:
# fail if the user is still suspended
if current_user.suspended():
raise Unauthorized('user is suspended')
if identity['type'] == User.Student:
# make sure the read access is synced up
current_user.check_read_access()
# verify that the user has the correct permissions for this view
if permission is not None and permission not in current_user.permissions_list:
raise Unauthorized()
return fn(*args, **kwargs)
return wrapper
| from functools import wraps
from flask_jwt_extended import get_jwt_identity, jwt_required, current_user
from werkzeug.exceptions import Unauthorized
from scuevals_api.models import User
def optional_arg_decorator(fn):
def wrapped_decorator(*args):
if len(args) == 1 and callable(args[0]):
return fn(args[0])
else:
def real_decorator(decoratee):
return fn(decoratee, *args)
return real_decorator
return wrapped_decorator
@optional_arg_decorator
def auth_required(fn, *permissions):
"""
Decorating a view with this ensures that the requester provided a JWT
and that the requester has any of the permissions needed to access the view.
"""
@wraps(fn)
def wrapper(*args, **kwargs):
jwt_required(lambda: None)()
identity = get_jwt_identity()
if identity['type'] == User.Normal:
# fail if the user is still suspended
if current_user.suspended():
raise Unauthorized('user is suspended')
if identity['type'] == User.Student:
# make sure the read access is synced up
current_user.check_read_access()
# verify that the user has the correct permissions for this view
if permissions and len(set(permissions).intersection(current_user.permissions_list)) == 0:
raise Unauthorized()
return fn(*args, **kwargs)
return wrapper
| Allow multiple permissions for an endpoint | Allow multiple permissions for an endpoint
| Python | agpl-3.0 | SCUEvals/scuevals-api,SCUEvals/scuevals-api | ---
+++
@@ -20,10 +20,10 @@
@optional_arg_decorator
-def auth_required(fn, permission=None):
+def auth_required(fn, *permissions):
"""
Decorating a view with this ensures that the requester provided a JWT
- and that the requester has permission to access the view.
+ and that the requester has any of the permissions needed to access the view.
"""
@wraps(fn)
def wrapper(*args, **kwargs):
@@ -42,7 +42,7 @@
current_user.check_read_access()
# verify that the user has the correct permissions for this view
- if permission is not None and permission not in current_user.permissions_list:
+ if permissions and len(set(permissions).intersection(current_user.permissions_list)) == 0:
raise Unauthorized()
return fn(*args, **kwargs) |
4fa9cff24867d3288d4f46c669151e823e934df0 | nmea.py | nmea.py | import re
def gpgga_get_position(gpgga_sentence):
gps = re.search("\$GPGGA,,([0-9]+\.[0-9]+),([NS]),([0-9]+\.[0-9]+),([WE]),,,,([0-9]+),M,+\*(\w+)", gpgga_sentence)
position = {}
position['lat'] = gps.group(1)
position['lat_coord'] = gps.group(2)
position['long'] = gps.group(3)
position['long_coord'] = gps.group(4)
position['height'] = str(int(int(gps.group(5)) * 3.28084)).zfill(6)
return position
if __name__ == "__main__":
print gpgga_get_position("$GPGGA,,3434.28,S,05829.35,W,,,,176,M,,,,,*39")
| import re
def gpgga_get_position(gpgga_sentence):
sentence = gpgga_sentence.split(",")
position = {}
position['lat'] = sentence[2]
position['lat_coord'] = sentence[3]
position['lon'] = sentence[4]
position['lon_coord'] = sentence[5]
position['height'] = str(int(float(sentence[11]) * 3.28084)).zfill(6)
return position
if __name__ == "__main__":
print gpgga_get_position("$GPGGA,142353.00,3436.93,S,05822.72,W,1,06,2.4,55.5,M,13.2,M,,*54")
| Simplify how we get lat and long from GPGGA sentence. | Simplify how we get lat and long from GPGGA sentence.
| Python | mit | elielsardanons/dstar_sniffer,elielsardanons/dstar_sniffer | ---
+++
@@ -1,15 +1,15 @@
import re
def gpgga_get_position(gpgga_sentence):
- gps = re.search("\$GPGGA,,([0-9]+\.[0-9]+),([NS]),([0-9]+\.[0-9]+),([WE]),,,,([0-9]+),M,+\*(\w+)", gpgga_sentence)
+ sentence = gpgga_sentence.split(",")
position = {}
- position['lat'] = gps.group(1)
- position['lat_coord'] = gps.group(2)
- position['long'] = gps.group(3)
- position['long_coord'] = gps.group(4)
- position['height'] = str(int(int(gps.group(5)) * 3.28084)).zfill(6)
+ position['lat'] = sentence[2]
+ position['lat_coord'] = sentence[3]
+ position['lon'] = sentence[4]
+ position['lon_coord'] = sentence[5]
+ position['height'] = str(int(float(sentence[11]) * 3.28084)).zfill(6)
return position
if __name__ == "__main__":
- print gpgga_get_position("$GPGGA,,3434.28,S,05829.35,W,,,,176,M,,,,,*39")
+ print gpgga_get_position("$GPGGA,142353.00,3436.93,S,05822.72,W,1,06,2.4,55.5,M,13.2,M,,*54")
|
c0312176d9a53eeb6ffdc7e44e3a5c240072b33c | trac/upgrades/db20.py | trac/upgrades/db20.py | from trac.db import Table, Column, Index, DatabaseManager
from trac.core import TracError
from trac.versioncontrol.cache import CACHE_YOUNGEST_REV
def do_upgrade(env, ver, cursor):
"""Modify the repository cache scheme (if needed)
Now we use the 'youngest_rev' entry in the system table
to explicitly store the youngest rev in the cache.
"""
db = env.get_db_cnx()
try:
repos = env.get_repository()
youngest = repos.get_youngest_rev_in_cache(db) or ''
# deleting first, for the 0.11dev and 0.10.4dev users
cursor.execute("DELETE FROM system WHERE name=%s",
(CACHE_YOUNGEST_REV,))
cursor.execute("INSERT INTO system (name, value) VALUES (%s, %s)",
(CACHE_YOUNGEST_REV, youngest))
except TracError: # no repository available
pass
| from trac.db import Table, Column, Index, DatabaseManager
from trac.core import TracError
from trac.versioncontrol.cache import CACHE_YOUNGEST_REV
def do_upgrade(env, ver, cursor):
"""Modify the repository cache scheme (if needed)
Now we use the 'youngest_rev' entry in the system table
to explicitly store the youngest rev in the cache.
"""
db = env.get_db_cnx()
try:
repos = env.get_repository()
youngest = repos.get_youngest_rev_in_cache(db) or ''
except TracError: # no repository available
youngest = ''
# deleting first, for the 0.11dev and 0.10.4dev users
cursor.execute("DELETE FROM system WHERE name=%s",
(CACHE_YOUNGEST_REV,))
cursor.execute("INSERT INTO system (name, value) VALUES (%s, %s)",
(CACHE_YOUNGEST_REV, youngest))
| Make db upgrade step 20 more robust. | Make db upgrade step 20 more robust.
git-svn-id: 0d96b0c1a6983ccc08b3732614f4d6bfcf9cbb42@5815 af82e41b-90c4-0310-8c96-b1721e28e2e2
| Python | bsd-3-clause | rbaumg/trac,rbaumg/trac,rbaumg/trac,rbaumg/trac | ---
+++
@@ -12,11 +12,11 @@
try:
repos = env.get_repository()
youngest = repos.get_youngest_rev_in_cache(db) or ''
- # deleting first, for the 0.11dev and 0.10.4dev users
- cursor.execute("DELETE FROM system WHERE name=%s",
- (CACHE_YOUNGEST_REV,))
- cursor.execute("INSERT INTO system (name, value) VALUES (%s, %s)",
- (CACHE_YOUNGEST_REV, youngest))
except TracError: # no repository available
- pass
+ youngest = ''
+ # deleting first, for the 0.11dev and 0.10.4dev users
+ cursor.execute("DELETE FROM system WHERE name=%s",
+ (CACHE_YOUNGEST_REV,))
+ cursor.execute("INSERT INTO system (name, value) VALUES (%s, %s)",
+ (CACHE_YOUNGEST_REV, youngest))
|
72302898ba5a6fc74dcedfc6f2049f4a0c1299ac | src/conftest.py | src/conftest.py | import pytest
def pytest_addoption(parser):
parser.addoption("--filter-project-name",
action="store",
default=None,
help="pass a project name to filter a test file to run only tests related to it")
@pytest.fixture
def filter_project_name(request):
return request.config.getoption('--filter-project-name')
| import logging
import pytest
import buildercore.config
LOG = logging.getLogger("conftest")
def pytest_addoption(parser):
parser.addoption("--filter-project-name",
action="store",
default=None,
help="pass a project name to filter a test file to run only tests related to it")
@pytest.fixture
def filter_project_name(request):
return request.config.getoption('--filter-project-name')
def pytest_runtest_setup(item):
LOG.info("Setting up %s::%s", item.cls, item.name)
def pytest_runtest_teardown(item, nextitem):
LOG.info("Tearing down up %s::%s", item.cls, item.name)
| Add logs for test start and end | Add logs for test start and end
| Python | mit | elifesciences/builder,elifesciences/builder | ---
+++
@@ -1,4 +1,8 @@
+import logging
import pytest
+import buildercore.config
+
+LOG = logging.getLogger("conftest")
def pytest_addoption(parser):
parser.addoption("--filter-project-name",
@@ -9,3 +13,9 @@
@pytest.fixture
def filter_project_name(request):
return request.config.getoption('--filter-project-name')
+
+def pytest_runtest_setup(item):
+ LOG.info("Setting up %s::%s", item.cls, item.name)
+
+def pytest_runtest_teardown(item, nextitem):
+ LOG.info("Tearing down up %s::%s", item.cls, item.name) |
74f0cfae2e56157c4bd3e5dde6d68cd8e40cf412 | feincms/module/page/extensions/symlinks.py | feincms/module/page/extensions/symlinks.py | """
This introduces a new page type, which has no content of its own but inherits
all content from the linked page.
"""
from django.db import models
from django.utils.translation import ugettext_lazy as _
from feincms._internal import monkeypatch_property
def register(cls, admin_cls):
cls.add_to_class('symlinked_page', models.ForeignKey('self', blank=True, null=True,
related_name='%(app_label)s_%(class)s_symlinks',
verbose_name=_('symlinked page'),
help_text=_('All content is inherited from this page if given.')))
@monkeypatch_property(cls)
def content(self):
if not hasattr(self, '_content_proxy'):
if self.symlinked_page:
self._content_proxy = self.content_proxy_class(self.symlinked_page)
else:
self._content_proxy = self.content_proxy_class(self)
return self._content_proxy
admin_cls.raw_id_fields.append('symlinked_page')
admin_cls.fieldsets.append((_('Symlinked page'), {
'fields': ('symlinked_page',),
'classes': ('collapse',),
}))
| """
This introduces a new page type, which has no content of its own but inherits
all content from the linked page.
"""
from django.db import models
from django.utils.translation import ugettext_lazy as _
from feincms._internal import monkeypatch_property
def register(cls, admin_cls):
cls.add_to_class('symlinked_page', models.ForeignKey('self', blank=True, null=True,
related_name='%(app_label)s_%(class)s_symlinks',
verbose_name=_('symlinked page'),
help_text=_('All content is inherited from this page if given.')))
@monkeypatch_property(cls)
def content(self):
if not hasattr(self, '_content_proxy'):
if self.symlinked_page:
self._content_proxy = self.content_proxy_class(self.symlinked_page)
else:
self._content_proxy = self.content_proxy_class(self)
return self._content_proxy
admin_cls.raw_id_fields.append('symlinked_page')
| Move symlinked page field to "Other options" | Move symlinked page field to "Other options"
| Python | bsd-3-clause | feincms/feincms,nickburlett/feincms,michaelkuty/feincms,feincms/feincms,nickburlett/feincms,matthiask/feincms2-content,matthiask/feincms2-content,joshuajonah/feincms,matthiask/django-content-editor,pjdelport/feincms,joshuajonah/feincms,matthiask/feincms2-content,michaelkuty/feincms,michaelkuty/feincms,nickburlett/feincms,pjdelport/feincms,mjl/feincms,mjl/feincms,matthiask/django-content-editor,matthiask/django-content-editor,joshuajonah/feincms,matthiask/django-content-editor,nickburlett/feincms,mjl/feincms,feincms/feincms,pjdelport/feincms,joshuajonah/feincms,michaelkuty/feincms | ---
+++
@@ -26,8 +26,3 @@
return self._content_proxy
admin_cls.raw_id_fields.append('symlinked_page')
-
- admin_cls.fieldsets.append((_('Symlinked page'), {
- 'fields': ('symlinked_page',),
- 'classes': ('collapse',),
- })) |
697ffec14e11e3558c8ebd33637aeebd7119a772 | mltsp/__init__.py | mltsp/__init__.py | """Machine Learning Time-Series Platform (MLTSP)
See http://mltsp.io for more information.
"""
__version__ = '0.3dev'
def install():
"""Install MLTSP config file in ~/.config/mltsp/mltsp.yaml.
"""
import os
import shutil
cfg = os.path.expanduser('~/.config/mltsp/mltsp.yaml')
cfg_dir = os.path.dirname(cfg)
if os.path.exists(cfg):
print('Existing configuration at {} -- not overwriting.'.format(cfg))
return
if not os.path.exists(cfg_dir):
os.makedirs(cfg_dir)
shutil.copyfile(os.path.join(os.path.dirname(__file__),
'mltsp.yaml.example'),
cfg)
print('Installed {}'.format(cfg))
print('Please customize this file with authentication tokens, etc.')
| """Machine Learning Time-Series Platform (MLTSP)
See http://mltsp.io for more information.
"""
__version__ = '0.3dev'
def install():
"""Install MLTSP config file in ~/.config/mltsp/mltsp.yaml.
"""
import os
import shutil
from distutils.dir_util import copy_tree
data_src = os.path.join(os.path.dirname(os.path.dirname(__file__)),
"data")
data_dst = os.path.expanduser('~/.local/mltsp/')
copy_tree(data_src, data_dst, update=1)
print("Created data directory at {} and copied sample data.".format(
os.path.expanduser('~/.local/mltsp/')))
cfg = os.path.expanduser('~/.config/mltsp/mltsp.yaml')
cfg_dir = os.path.dirname(cfg)
if os.path.exists(cfg):
print('Existing configuration at {} -- not overwriting.'.format(cfg))
return
if not os.path.exists(cfg_dir):
os.makedirs(cfg_dir)
shutil.copyfile(os.path.join(os.path.dirname(os.path.dirname(__file__)),
'mltsp.yaml.example'),
cfg)
print('Installed {}'.format(cfg))
print('Please customize this file with authentication tokens, etc.')
| Copy data directory to ~/.local/mltsp during install; fix path to mltsp.yaml.example | Copy data directory to ~/.local/mltsp during install; fix path to mltsp.yaml.example
| Python | bsd-3-clause | mltsp/mltsp,bnaul/mltsp,acrellin/mltsp,bnaul/mltsp,bnaul/mltsp,acrellin/mltsp,acrellin/mltsp,bnaul/mltsp,mltsp/mltsp,mltsp/mltsp,mltsp/mltsp,bnaul/mltsp,acrellin/mltsp,bnaul/mltsp,acrellin/mltsp,mltsp/mltsp,acrellin/mltsp,mltsp/mltsp | ---
+++
@@ -12,6 +12,14 @@
"""
import os
import shutil
+ from distutils.dir_util import copy_tree
+
+ data_src = os.path.join(os.path.dirname(os.path.dirname(__file__)),
+ "data")
+ data_dst = os.path.expanduser('~/.local/mltsp/')
+ copy_tree(data_src, data_dst, update=1)
+ print("Created data directory at {} and copied sample data.".format(
+ os.path.expanduser('~/.local/mltsp/')))
cfg = os.path.expanduser('~/.config/mltsp/mltsp.yaml')
cfg_dir = os.path.dirname(cfg)
@@ -23,7 +31,7 @@
if not os.path.exists(cfg_dir):
os.makedirs(cfg_dir)
- shutil.copyfile(os.path.join(os.path.dirname(__file__),
+ shutil.copyfile(os.path.join(os.path.dirname(os.path.dirname(__file__)),
'mltsp.yaml.example'),
cfg)
|
8a0fc8a9241a7d090f801101cd5324d15e7ae990 | heutagogy/views.py | heutagogy/views.py | from heutagogy import app
import heutagogy.persistence
from flask import request, jsonify, Response
import json
import datetime
import sqlite3
heutagogy.persistence.initialize()
@app.route('/')
def index():
return 'Hello, world!'
@app.route('/api/v1/bookmarks', methods=['POST'])
def bookmarks_post():
r = request.get_json()
bookmark = dict()
try:
bookmark['url'] = r['url']
except:
return jsonify(message='url field is mandatory'), 400
bookmark['title'] = r['title'] if 'title' in r else bookmark['url']
bookmark['timestamp'] = r['timestamp'] if 'timestamp' in r else datetime.datetime.utcnow().isoformat(' ')
result = heutagogy.persistence.save_bookmark(bookmark)
return jsonify(**result), 201
@app.route('/api/v1/bookmarks', methods=['GET'])
def bookmarks_get():
result = heutagogy.persistence.get_bookmarks()
return Response(json.dumps(result), mimetype='application/json')
| from heutagogy import app
import heutagogy.persistence
from flask import request, jsonify, Response
import json
import datetime
import sqlite3
heutagogy.persistence.initialize()
@app.route('/')
def index():
return 'Hello, world!'
@app.route('/api/v1/bookmarks', methods=['POST'])
def bookmarks_post():
r = request.get_json(force=True)
bookmark = dict()
try:
bookmark['url'] = r['url']
except:
return jsonify(message='url field is mandatory'), 400
bookmark['title'] = r['title'] if 'title' in r else bookmark['url']
bookmark['timestamp'] = r['timestamp'] if 'timestamp' in r else datetime.datetime.utcnow().isoformat(' ')
result = heutagogy.persistence.save_bookmark(bookmark)
return jsonify(**result), 201
@app.route('/api/v1/bookmarks', methods=['GET'])
def bookmarks_get():
result = heutagogy.persistence.get_bookmarks()
return Response(json.dumps(result), mimetype='application/json')
| Fix reading json from client (ignore mimetype). | Fix reading json from client (ignore mimetype).
• http://stackoverflow.com/a/14112400/2517622
| Python | agpl-3.0 | heutagogy/heutagogy-backend,heutagogy/heutagogy-backend | ---
+++
@@ -14,7 +14,7 @@
@app.route('/api/v1/bookmarks', methods=['POST'])
def bookmarks_post():
- r = request.get_json()
+ r = request.get_json(force=True)
bookmark = dict()
try: |
2f2f18abbcde94e61c38a519b3b8d959be2e0301 | modules/roles.py | modules/roles.py | import discord
rolesTriggerString = '!role'
async def parse_roles_command(message, client):
msg = 'Role!'
await client.send_message(message.channel, msg)
| import discord
import shlex
rolesTriggerString = '!role'
async def parse_roles_command(message, client):
msg = shlex.split(message.content)
if len(msg) != 1
await client.send_message(message.channel, msg[1])
else:
break
| Modify to use shlex for getting command arguments | Modify to use shlex for getting command arguments
| Python | mit | suclearnub/scubot | ---
+++
@@ -1,7 +1,11 @@
import discord
+import shlex
rolesTriggerString = '!role'
async def parse_roles_command(message, client):
- msg = 'Role!'
- await client.send_message(message.channel, msg)
+ msg = shlex.split(message.content)
+ if len(msg) != 1
+ await client.send_message(message.channel, msg[1])
+ else:
+ break |
62d2a3ebfb2fb5cd9784be9b5020043e15726702 | impala/defaults.py | impala/defaults.py | DEBUG = False
SECRET_KEY = "changeme"
SQLALCHEMY_DATABASE_URI = "postgresql://impala:changeme@localhost/impala"
| DEBUG = False
SECRET_KEY = "changeme"
SQLALCHEMY_DATABASE_URI = "postgresql://impala:changeme@localhost/impala"
# smuggler:hunter2
M2M_USERS = {'smuggler': { 'access': ['librarian'], 'password_hash': '$pbkdf2-sha256$29000$g3Du3Zuz1hoDYKx1Tsm5tw$3hZ/b6WaNWaP4Q4zgIpL8nKjaTumekv5l97TkJx4ZBo'}}
| Add auth example to default config | Add auth example to default config
| Python | agpl-3.0 | wuvt/impala,wuvt/impala | ---
+++
@@ -1,3 +1,5 @@
DEBUG = False
SECRET_KEY = "changeme"
SQLALCHEMY_DATABASE_URI = "postgresql://impala:changeme@localhost/impala"
+# smuggler:hunter2
+M2M_USERS = {'smuggler': { 'access': ['librarian'], 'password_hash': '$pbkdf2-sha256$29000$g3Du3Zuz1hoDYKx1Tsm5tw$3hZ/b6WaNWaP4Q4zgIpL8nKjaTumekv5l97TkJx4ZBo'}} |
21cb063ec63792ddeb45a62570a8565c69f2091b | tests/functional/test_product.py | tests/functional/test_product.py | from .base import FunctionalTest
from store.tests.factories import *
class ProductTest(FunctionalTest):
def test_product_navigation(self):
# Create a product
product = ProductFactory.create()
# Get the product detail page
self.browser.get(self.live_server_url + product.get_absolute_url())
# Assert that the title is as expected
self.assertIn(product.name, self.browser.title)
def test_product_navigation_from_homepage(self):
# Create a sample product
product1 = ProductFactory.create()
# Get the homepage
self.browser.get(self.live_server_url)
# Navigate to the Product Page
self.browser.find_element_by_link_text(product1.name).click()
# Assert that the page is the one expected
self.assertIn(product1.name, self.browser.title)
| from .base import FunctionalTest
from store.tests.factories import *
class ProductTest(FunctionalTest):
def setUp(self):
super(ProductTest, self).setUp()
# Create a product
self.product = ProductFactory.create()
def test_product_navigation(self):
# Get the product detail page
self.browser.get(self.live_server_url + self.product.get_absolute_url())
# Assert that the title is as expected
self.assertIn(self.product.name, self.browser.title)
def test_product_navigation_from_homepage(self):
# Get the homepage
self.browser.get(self.live_server_url)
# Navigate to the Product Page
self.browser.find_element_by_link_text(self.product.name).click()
# Assert that the page is the one expected
self.assertIn(self.product.name, self.browser.title)
| Create product in setUp() method for DRY | Create product in setUp() method for DRY
Create the product to be reused in the test methods in the setUp method
that is run before each test method
| Python | bsd-3-clause | kevgathuku/compshop,andela-kndungu/compshop,andela-kndungu/compshop,andela-kndungu/compshop,kevgathuku/compshop,andela-kndungu/compshop,kevgathuku/compshop,kevgathuku/compshop | ---
+++
@@ -5,26 +5,25 @@
class ProductTest(FunctionalTest):
+ def setUp(self):
+ super(ProductTest, self).setUp()
+ # Create a product
+ self.product = ProductFactory.create()
+
def test_product_navigation(self):
- # Create a product
- product = ProductFactory.create()
-
# Get the product detail page
- self.browser.get(self.live_server_url + product.get_absolute_url())
+ self.browser.get(self.live_server_url + self.product.get_absolute_url())
# Assert that the title is as expected
- self.assertIn(product.name, self.browser.title)
+ self.assertIn(self.product.name, self.browser.title)
def test_product_navigation_from_homepage(self):
- # Create a sample product
- product1 = ProductFactory.create()
-
# Get the homepage
self.browser.get(self.live_server_url)
# Navigate to the Product Page
- self.browser.find_element_by_link_text(product1.name).click()
+ self.browser.find_element_by_link_text(self.product.name).click()
# Assert that the page is the one expected
- self.assertIn(product1.name, self.browser.title)
+ self.assertIn(self.product.name, self.browser.title) |
f3687b849dd01e9af32e6786ed6a813448cf8a38 | kaitaistructures.py | kaitaistructures.py | from struct import unpack
class KaitaiStruct:
def read_u1(self):
return unpack('B', self._io.read(1))[0]
| from struct import unpack
class KaitaiStruct:
def close(self):
self._io.close()
def read_u1(self):
return unpack('B', self._io.read(1))[0]
| Fix formatting as per PEP8 | Fix formatting as per PEP8
| Python | mit | kaitai-io/kaitai_struct_python_runtime | ---
+++
@@ -1,5 +1,8 @@
from struct import unpack
class KaitaiStruct:
- def read_u1(self):
- return unpack('B', self._io.read(1))[0]
+ def close(self):
+ self._io.close()
+
+ def read_u1(self):
+ return unpack('B', self._io.read(1))[0] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.