commit
stringlengths
40
40
old_file
stringlengths
4
150
new_file
stringlengths
4
150
old_contents
stringlengths
0
3.26k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
501
message
stringlengths
15
4.06k
lang
stringclasses
4 values
license
stringclasses
13 values
repos
stringlengths
5
91.5k
diff
stringlengths
0
4.35k
98ff82c3ad36ef6dbcc40005b6f88d9c0d569744
setup.py
setup.py
#!/usr/bin/env python import os from setuptools import find_packages, setup def read(filename): with open(os.path.join(os.path.dirname(__file__), filename)) as f: return f.read() setup( name="devsoc-contentfiles", version="0.3a1", description="DEV Content Files", long_description=read("README.rst"), long_description_content_type="text/x-rst", url="https://github.com/developersociety/devsoc-contentfiles", maintainer="The Developer Society", maintainer_email="studio@dev.ngo", platforms=["any"], packages=find_packages(exclude=["tests"]), python_requires=">=3.5", classifiers=[ "Environment :: Web Environment", "Framework :: Django", "Framework :: Django :: 1.11", "Framework :: Django :: 2.2", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", ], license="BSD", )
#!/usr/bin/env python import os from setuptools import find_packages, setup def read(filename): with open(os.path.join(os.path.dirname(__file__), filename)) as f: return f.read() setup( name="devsoc-contentfiles", version="0.3a1", description="DEV Content Files", long_description=read("README.rst"), long_description_content_type="text/x-rst", url="https://github.com/developersociety/devsoc-contentfiles", maintainer="The Developer Society", maintainer_email="studio@dev.ngo", platforms=["any"], packages=find_packages(exclude=["tests"]), include_package_data=True, python_requires=">=3.5", classifiers=[ "Environment :: Web Environment", "Framework :: Django", "Framework :: Django :: 1.11", "Framework :: Django :: 2.2", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", ], license="BSD", )
Include package data specified in MANIFEST
Include package data specified in MANIFEST Not really needed, but adding it incase we do bundle other things in future
Python
bsd-3-clause
blancltd/blanc-contentfiles
--- +++ @@ -20,6 +20,7 @@ maintainer_email="studio@dev.ngo", platforms=["any"], packages=find_packages(exclude=["tests"]), + include_package_data=True, python_requires=">=3.5", classifiers=[ "Environment :: Web Environment",
23f5297ab47328a7f9c881530a00cc24461b5d98
setup.py
setup.py
from setuptools import setup from sys import version if version < '2.6.0': raise Exception("This module doesn't support any version less than 2.6") import sys sys.path.append("./test") with open('README.rst', 'r') as f: long_description = f.read() classifiers = [ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', "Programming Language :: Python", 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.0', 'Programming Language :: Python :: 3.1', 'Programming Language :: Python :: 3.2', 'Topic :: Software Development :: Libraries :: Python Modules' ] setup( author='Keita Oouchi', author_email='keita.oouchi@gmail.com', url = 'https://github.com/keitaoouchi/seleniumwrapper', name = 'seleniumwrapper', version = '0.1.5', package_dir={"":"src"}, packages = ['seleniumwrapper'], test_suite = "test_seleniumwrapper.suite", license='BSD License', classifiers=classifiers, description = 'selenium webdriver wrapper to make manipulation easier.', long_description=long_description, )
from setuptools import setup from sys import version if version < '2.6.0': raise Exception("This module doesn't support any version less than 2.6") import sys sys.path.append("./test") with open('README.rst', 'r') as f: long_description = f.read() classifiers = [ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', "Programming Language :: Python", 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.0', 'Programming Language :: Python :: 3.1', 'Programming Language :: Python :: 3.2', 'Topic :: Software Development :: Libraries :: Python Modules' ] setup( author='Keita Oouchi', author_email='keita.oouchi@gmail.com', url = 'https://github.com/keitaoouchi/seleniumwrapper', name = 'seleniumwrapper', version = '0.1.6', package_dir={"":"src"}, packages = ['seleniumwrapper'], test_suite = "test_seleniumwrapper.suite", license='BSD License', classifiers=classifiers, description = 'selenium webdriver wrapper to make manipulation easier.', long_description=long_description, )
Change version 0.1.5 to 0.1.6
Change version 0.1.5 to 0.1.6
Python
bsd-3-clause
keitaoouchi/seleniumwrapper
--- +++ @@ -32,7 +32,7 @@ author_email='keita.oouchi@gmail.com', url = 'https://github.com/keitaoouchi/seleniumwrapper', name = 'seleniumwrapper', - version = '0.1.5', + version = '0.1.6', package_dir={"":"src"}, packages = ['seleniumwrapper'], test_suite = "test_seleniumwrapper.suite",
62857c10b729177c1d6b3f8fd0fa4b475bc9b98f
testlog_etl/transforms/jscov_to_es.py
testlog_etl/transforms/jscov_to_es.py
# encoding: utf-8 # # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. # # Author: Trung Do (chin.bimbo@gmail.com) # from __future__ import division from __future__ import unicode_literals import json from pyLibrary.dot import wrap DEBUG = True def process(source_key, source, destination, resources, please_stop=None): with open(source) as json_file: json_data = wrap(json.load(json_file)) output_lines = [] for obj in json_data: for line in obj.covered: new_line = { "test": { "testUrl": obj.testUrl }, "source": { "sourceFile": obj.sourceFile, "lineCovered": line } } output_lines.append(new_line) return output_lines
# encoding: utf-8 # # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. # # Author: Trung Do (chin.bimbo@gmail.com) # from __future__ import division from __future__ import unicode_literals import json from pyLibrary.dot import wrap DEBUG = True def process(source_key, source, destination, resources, please_stop=None): with open(source) as json_file: json_data = wrap(json.load(json_file)) output_lines = [] for obj in json_data: # get the test name. Just use the test file name at the moment # TODO: change this when needed last_slash_index = obj.testUrl.rfind("/") test_name = obj.testUrl[last_slash_index + 1:] for line in obj.covered: new_line = { "test": { "name": test_name, "url": obj.testUrl }, "source": { "sourceFile": obj.sourceFile, "lineCovered": line } } output_lines.append(new_line) return output_lines
Add the test name and change testUrl to just url
Add the test name and change testUrl to just url
Python
mpl-2.0
klahnakoski/ActiveData-ETL,klahnakoski/TestLog-ETL,klahnakoski/ActiveData-ETL,klahnakoski/TestLog-ETL
--- +++ @@ -20,10 +20,16 @@ json_data = wrap(json.load(json_file)) output_lines = [] for obj in json_data: + # get the test name. Just use the test file name at the moment + # TODO: change this when needed + last_slash_index = obj.testUrl.rfind("/") + test_name = obj.testUrl[last_slash_index + 1:] + for line in obj.covered: new_line = { "test": { - "testUrl": obj.testUrl + "name": test_name, + "url": obj.testUrl }, "source": { "sourceFile": obj.sourceFile,
0286126ef0f7876c3ae58a081d48964e49b3a093
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup, find_packages import conveyor install_requires = [ "APScheduler", "PyYAML", "redis", "slumber", "xmlrpc2", ] setup( name="conveyor", version=conveyor.__version__, description="Warehouse and PyPI Synchronization", long_description=open("README.rst").read(), url="https://github.com/crateio/conveyor/", license=open("LICENSE").read(), author="Donald Stufft", author_email="donald.stufft@gmail.com", install_requires=install_requires, packages=find_packages(exclude=["tests"]), zip_safe=False, )
#!/usr/bin/env python from setuptools import setup, find_packages import conveyor install_requires = [ "APScheduler", "forklift", "PyYAML", "redis", "xmlrpc2", ] setup( name="conveyor", version=conveyor.__version__, description="Warehouse and PyPI Synchronization", long_description=open("README.rst").read(), url="https://github.com/crateio/conveyor/", license=open("LICENSE").read(), author="Donald Stufft", author_email="donald.stufft@gmail.com", install_requires=install_requires, packages=find_packages(exclude=["tests"]), zip_safe=False, )
Switch slumber out for forklift
Switch slumber out for forklift
Python
bsd-2-clause
crateio/carrier
--- +++ @@ -6,9 +6,9 @@ install_requires = [ "APScheduler", + "forklift", "PyYAML", "redis", - "slumber", "xmlrpc2", ]
d7f4294f7a218bac15d6ef1b59465203b26f650b
setup.py
setup.py
#!/usr/bin/env python import os from setuptools import setup def readreq(filename): result = [] with open(filename) as f: for req in f: req = req.partition('#')[0].strip() if not req: continue result.append(req) return result def readfile(filename): with open(filename) as f: return f.read() setup( name='cli_tools', version='0.2.1', author='Kevin L. Mitchell', author_email='klmitch@mit.edu', description="Command Line Interface Tools", py_modules=['cli_tools'], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: GNU General Public License v3 or ' 'later (GPLv3+)', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: User Interfaces', ], url='https://github.com/klmitch/cli_utils', long_description=readfile('README.rst'), install_requires=readreq('.requires'), tests_require=readreq('.test-requires'), )
#!/usr/bin/env python import os from setuptools import setup def readreq(filename): result = [] with open(filename) as f: for req in f: req = req.partition('#')[0].strip() if not req: continue result.append(req) return result def readfile(filename): with open(filename) as f: return f.read() setup( name='cli_tools', version='0.2.2', author='Kevin L. Mitchell', author_email='klmitch@mit.edu', description="Command Line Interface Tools", py_modules=['cli_tools'], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: GNU General Public License v3 or ' 'later (GPLv3+)', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: User Interfaces', ], url='https://github.com/klmitch/cli_utils', long_description=readfile('README.rst'), install_requires=readreq('.requires'), tests_require=readreq('.test-requires'), )
Bump version number for release.
Bump version number for release.
Python
apache-2.0
klmitch/cli_tools
--- +++ @@ -23,7 +23,7 @@ setup( name='cli_tools', - version='0.2.1', + version='0.2.2', author='Kevin L. Mitchell', author_email='klmitch@mit.edu', description="Command Line Interface Tools",
7f4d233c48bcdcd327286f3c2ce4f3e2942e6c3c
data_test.py
data_test.py
import data from client import authentication_request_url, GoogleAPIClient c = GoogleAPIClient() if c.access_token is None: print 'Open the following URL in your Web browser and grant access' print authentication_request_url print print 'Enter the authorization code here:' code = raw_input('> ') c.get_token_pair(code) data.Channel.fetch_user_channel(c) s = data.Session() me = s.query(data.Channel).first() del s me.fetch_normal_playlists(c)
import data from client import authentication_request_url, GoogleAPIClient c = GoogleAPIClient() if c.access_token is None: print 'Open the following URL in your Web browser and grant access' print authentication_request_url print print 'Enter the authorization code here:' code = raw_input('> ') c.get_token_pair(code) data.Channel.fetch_user_channel(c) s = data.Session() me = s.query(data.Channel).first() del s me.fetch_normal_playlists(c) s = data.Session() for playlist in s.query(data.Playlist): playlist.fetch_playlist_videos(c)
Test getting playlist videos, too
Test getting playlist videos, too
Python
mit
drkitty/metatube,drkitty/metatube
--- +++ @@ -21,3 +21,7 @@ del s me.fetch_normal_playlists(c) + +s = data.Session() +for playlist in s.query(data.Playlist): + playlist.fetch_playlist_videos(c)
450a1f64a21afce008392e321fff2d268bb9fc41
setup.py
setup.py
from distutils.core import setup from distutils.extension import Extension from Cython.Distutils import build_ext import numpy ALGPATH = "clusterpy/core/toolboxes/cluster/componentsAlg/" ALGPKG = "clusterpy.core.toolboxes.cluster.componentsAlg." CLUSPATH = "clusterpy/core/toolboxes/cluster/" CLUSPKG = "clusterpy.core.toolboxes.cluster." setup( name='clusterPy', version='0.9.9', description='Library of spatially constrained clustering algorithms', long_description=""" clusterPy is a Python library with algorithms for spatially constrained clustering. clusterPy offers you some of the most cited algorithms for spatial aggregation.""", author='RiSE Group', author_email='software@rise-group.org', url='http://www.rise-group.org/section/Software/clusterPy/', packages=['clusterpy','clusterpy.core','clusterpy.core.data', 'clusterpy.core.geometry','clusterpy.core.toolboxes', 'clusterpy.core.toolboxes.cluster', 'clusterpy.core.toolboxes.cluster.componentsAlg'], ext_modules = [Extension(CLUSPKG+"arisel", [CLUSPATH+"arisel.pyx"], extra_link_args=['-fopenmp'], extra_compile_args=['-fopenmp'] ), Extension(ALGPKG+"distanceFunctions", [ALGPATH+"distanceFunctions.pyx"]), Extension(ALGPKG+"dist2Regions", [ALGPATH+"dist2Regions.pyx"]), Extension(ALGPKG+"selectionTypeFunctions", [ALGPATH+"selectionTypeFunctions.pyx"]), Extension(ALGPKG+"init", [ALGPATH+"init.pyx"]), Extension(ALGPKG+"objFunctions", [ALGPATH+"objFunctions.pyx"]) ], cmdclass = {'build_ext': build_ext} )
from distutils.core import setup from distutils.extension import Extension setup( name='clusterPy', version='0.9.9', description='Library of spatially constrained clustering algorithms', long_description=""" clusterPy is a Python library with algorithms for spatially constrained clustering. clusterPy offers you some of the most cited algorithms for spatial aggregation.""", author='RiSE Group', author_email='software@rise-group.org', url='http://www.rise-group.org/section/Software/clusterPy/', packages=['clusterpy','clusterpy.core','clusterpy.core.data', 'clusterpy.core.geometry','clusterpy.core.toolboxes', 'clusterpy.core.toolboxes.cluster', 'clusterpy.core.toolboxes.cluster.componentsAlg'], )
Remove cython Extension builder and build_ext from Setup
Remove cython Extension builder and build_ext from Setup
Python
bsd-3-clause
clusterpy/clusterpy,clusterpy/clusterpy
--- +++ @@ -1,15 +1,5 @@ from distutils.core import setup from distutils.extension import Extension -from Cython.Distutils import build_ext - -import numpy - -ALGPATH = "clusterpy/core/toolboxes/cluster/componentsAlg/" -ALGPKG = "clusterpy.core.toolboxes.cluster.componentsAlg." - -CLUSPATH = "clusterpy/core/toolboxes/cluster/" -CLUSPKG = "clusterpy.core.toolboxes.cluster." - setup( name='clusterPy', @@ -24,17 +14,4 @@ 'clusterpy.core.geometry','clusterpy.core.toolboxes', 'clusterpy.core.toolboxes.cluster', 'clusterpy.core.toolboxes.cluster.componentsAlg'], - ext_modules = [Extension(CLUSPKG+"arisel", [CLUSPATH+"arisel.pyx"], - extra_link_args=['-fopenmp'], - extra_compile_args=['-fopenmp'] - ), - Extension(ALGPKG+"distanceFunctions", [ALGPATH+"distanceFunctions.pyx"]), - Extension(ALGPKG+"dist2Regions", [ALGPATH+"dist2Regions.pyx"]), - Extension(ALGPKG+"selectionTypeFunctions", [ALGPATH+"selectionTypeFunctions.pyx"]), - Extension(ALGPKG+"init", [ALGPATH+"init.pyx"]), - Extension(ALGPKG+"objFunctions", [ALGPATH+"objFunctions.pyx"]) - ], - cmdclass = {'build_ext': build_ext} - - )
39b58dadab6b223e7832c9c48e66d026303b243d
setup.py
setup.py
""" pybitcoin ============== """ from setuptools import setup, find_packages setup( name='pybitcoin', version='0.9.8', url='https://github.com/blockstack/pybitcoin', license='MIT', author='Blockstack Developers', author_email='hello@onename.com', description="""Library for Bitcoin & other cryptocurrencies. Tools are provided for blockchain transactions, RPC calls, and private keys, public keys, and addresses.""", keywords='bitcoin btc litecoin namecoin dogecoin cryptocurrency', packages=find_packages(), zip_safe=False, install_requires=[ 'requests>=2.4.3', 'ecdsa>=0.13', 'commontools==0.1.0', 'utilitybelt>=0.2.1', 'pybitcointools==1.1.15', 'python-bitcoinrpc==0.1', 'keychain>=0.1.4', 'bitcoin>=1.1.39' ], classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet', 'Topic :: Security :: Cryptography', 'Topic :: Software Development :: Libraries :: Python Modules', ], )
""" pybitcoin ============== """ from setuptools import setup, find_packages setup( name='pybitcoin', version='0.9.8', url='https://github.com/blockstack/pybitcoin', license='MIT', author='Blockstack Developers', author_email='hello@onename.com', description="""Library for Bitcoin & other cryptocurrencies. Tools are provided for blockchain transactions, RPC calls, and private keys, public keys, and addresses.""", keywords='bitcoin btc litecoin namecoin dogecoin cryptocurrency', packages=find_packages(), zip_safe=False, install_requires=[ 'requests>=2.4.3', 'ecdsa>=0.13', 'commontools==0.1.0', 'utilitybelt>=0.2.1', 'python-bitcoinrpc==0.1', 'keychain>=0.1.4', 'bitcoin>=1.1.39' ], classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet', 'Topic :: Security :: Cryptography', 'Topic :: Software Development :: Libraries :: Python Modules', ], )
Remove pybitcointools in favor of bitcoin
Remove pybitcointools in favor of bitcoin
Python
mit
blockstack/pybitcoin
--- +++ @@ -22,7 +22,6 @@ 'ecdsa>=0.13', 'commontools==0.1.0', 'utilitybelt>=0.2.1', - 'pybitcointools==1.1.15', 'python-bitcoinrpc==0.1', 'keychain>=0.1.4', 'bitcoin>=1.1.39'
494d2a2cfcfa5dc9058f087588fb1371021174d4
src/python/twitter/mesos/location.py
src/python/twitter/mesos/location.py
import re import socket class Location(object): """Determine which cluster the code is running in, or CORP if we are not in prod.""" PROD_SUFFIXES = [ '.corpdc.twitter.com', '.prod.twitter.com', ] @staticmethod def is_corp(): """ Returns true if this job is in corp and requires an ssh tunnel for scheduler RPCs. """ hostname = socket.gethostname() for suffix in Location.PROD_SUFFIXES: if hostname.endswith(suffix): return False return True @staticmethod def is_prod(): return not Location.is_corp()
import re import socket class Location(object): """Determine which cluster the code is running in, or CORP if we are not in prod.""" PROD_SUFFIXES = [ '.corpdc.twitter.com', '.prod.twitter.com', '.devel.twitter.com' ] @staticmethod def is_corp(): """ Returns true if this job is in corp and requires an ssh tunnel for scheduler RPCs. """ hostname = socket.gethostname() for suffix in Location.PROD_SUFFIXES: if hostname.endswith(suffix): return False return True @staticmethod def is_prod(): return not Location.is_corp()
Add devel.twitter.com to PROD suffixes
Add devel.twitter.com to PROD suffixes
Python
apache-2.0
rosmo/aurora,rosmo/aurora,kidaa/aurora,crashlytics/aurora,wfarner/aurora,kidaa/aurora,mkhutornenko/incubator-aurora,apache/aurora,kidaa/aurora,medallia/aurora,mschenck/aurora,wfarner/aurora,mschenck/aurora,apache/aurora,protochron/aurora,shahankhatch/aurora,crashlytics/aurora,shahankhatch/aurora,mkhutornenko/incubator-aurora,crashlytics/aurora,wfarner/aurora,rdelval/aurora,rosmo/aurora,rosmo/aurora,protochron/aurora,thinker0/aurora,wfarner/aurora,protochron/aurora,thinker0/aurora,mschenck/aurora,protochron/aurora,rdelval/aurora,protochron/aurora,mkhutornenko/incubator-aurora,rdelval/aurora,apache/aurora,wfarner/aurora,medallia/aurora,rosmo/aurora,wfarner/aurora,shahankhatch/aurora,apache/aurora,medallia/aurora,shahankhatch/aurora,thinker0/aurora,medallia/aurora,mkhutornenko/incubator-aurora,medallia/aurora,thinker0/aurora,rdelval/aurora,medallia/aurora,apache/aurora,protochron/aurora,rdelval/aurora,mschenck/aurora,shahankhatch/aurora,apache/aurora,mschenck/aurora,thinker0/aurora,kidaa/aurora,crashlytics/aurora,crashlytics/aurora,rosmo/aurora,rdelval/aurora,thinker0/aurora,mschenck/aurora,kidaa/aurora,shahankhatch/aurora,kidaa/aurora,crashlytics/aurora,mkhutornenko/incubator-aurora
--- +++ @@ -7,6 +7,7 @@ PROD_SUFFIXES = [ '.corpdc.twitter.com', '.prod.twitter.com', + '.devel.twitter.com' ] @staticmethod
f82a9994bfe782a575136af506b92c72fd6ac60e
src/python/twitter/mesos/location.py
src/python/twitter/mesos/location.py
import re import socket class Location(object): " Determine which cluster the code is running in, or CORP if we are not in prod. " CORP = "corp" @staticmethod def get_location(): hostname = socket.gethostname() prod_matcher = re.match('^(\w{3}).*.twitter\.com$', hostname) if re.search('.+\.local$', hostname): return Location.CORP elif prod_matcher is not None: return prod_matcher.group(1) else: print 'Can\'t determine location (prod vs. corp). Hostname=%s' % hostname return None
import re import socket class Location(object): """Determine which cluster the code is running in, or CORP if we are not in prod.""" CORP = "corp" @staticmethod def get_location(): hostname = socket.gethostname() prod_matcher = re.match('^(\w{3}\d+).*\.twitter\.com$', hostname) prod_host = prod_matcher.group(1) if prod_matcher else None if hostname.endswith('.local') or hostname.endswith('.office.twttr.net') or 'sfo0' == prod_host: return Location.CORP elif prod_host: return prod_host else: print 'Can\'t determine location (prod vs. corp). Hostname=%s' % hostname return None
Update Location determination to current hostname standards.
Update Location determination to current hostname standards.
Python
apache-2.0
apache/aurora,shahankhatch/aurora,kidaa/aurora,protochron/aurora,thinker0/aurora,medallia/aurora,apache/aurora,medallia/aurora,mkhutornenko/incubator-aurora,rosmo/aurora,crashlytics/aurora,thinker0/aurora,protochron/aurora,thinker0/aurora,mschenck/aurora,medallia/aurora,wfarner/aurora,wfarner/aurora,kidaa/aurora,kidaa/aurora,mschenck/aurora,wfarner/aurora,crashlytics/aurora,rdelval/aurora,rdelval/aurora,mkhutornenko/incubator-aurora,rosmo/aurora,protochron/aurora,rdelval/aurora,shahankhatch/aurora,rdelval/aurora,thinker0/aurora,rdelval/aurora,medallia/aurora,apache/aurora,apache/aurora,protochron/aurora,apache/aurora,mkhutornenko/incubator-aurora,kidaa/aurora,mschenck/aurora,shahankhatch/aurora,rosmo/aurora,kidaa/aurora,mschenck/aurora,shahankhatch/aurora,medallia/aurora,rosmo/aurora,apache/aurora,mschenck/aurora,protochron/aurora,mkhutornenko/incubator-aurora,shahankhatch/aurora,medallia/aurora,crashlytics/aurora,kidaa/aurora,crashlytics/aurora,wfarner/aurora,thinker0/aurora,crashlytics/aurora,protochron/aurora,rdelval/aurora,shahankhatch/aurora,thinker0/aurora,rosmo/aurora,wfarner/aurora,rosmo/aurora,crashlytics/aurora,mschenck/aurora,wfarner/aurora,mkhutornenko/incubator-aurora
--- +++ @@ -2,19 +2,19 @@ import socket class Location(object): - " Determine which cluster the code is running in, or CORP if we are not in prod. " + """Determine which cluster the code is running in, or CORP if we are not in prod.""" CORP = "corp" @staticmethod def get_location(): hostname = socket.gethostname() - prod_matcher = re.match('^(\w{3}).*.twitter\.com$', hostname) - - if re.search('.+\.local$', hostname): + prod_matcher = re.match('^(\w{3}\d+).*\.twitter\.com$', hostname) + prod_host = prod_matcher.group(1) if prod_matcher else None + if hostname.endswith('.local') or hostname.endswith('.office.twttr.net') or 'sfo0' == prod_host: return Location.CORP - elif prod_matcher is not None: - return prod_matcher.group(1) + elif prod_host: + return prod_host else: print 'Can\'t determine location (prod vs. corp). Hostname=%s' % hostname return None
238c6b73115f6493246f25d45268e7a675980397
build/build.py
build/build.py
# # Copyright (c) 2004 Specifix, Inc. # All rights reserved # import os import shutil import util class Make: def doBuild(self, dir): os.system("cd %s; make" % dir) class MakeInstall: def doInstall(self, dir, root): os.system("cd %s; make %s=%s install" % (dir, self.rootVar, root)) def __init__(self, rootVar = "DESTDIR"): self.rootVar = rootVar class InstallFile: def doInstall(self, dir, root): dest = root + self.toFile util.mkdirChain(os.path.dirname(dest)) shutil.copyfile(self.toFile, dest) os.chmod(dest, self.mode) def __init__(self, fromFile, toFile, perms = 0644): self.toFile = toFile self.file = fromFile self.mode = perms
# # Copyright (c) 2004 Specifix, Inc. # All rights reserved # import os import shutil import util class ManualConfigure: def doBuild(self, dir): os.system("cd %s; ./configure %s" % (dir, self.extraflags)) def __init__(self, extraflags=""): self.extraflags = extraflags class Configure: def doBuild(self, dir): os.system("cd %s; ./configure --prefix=/usr --sysconfdir=/etc %s" % (dir, self.extraflags)) def __init__(self, extraflags=""): self.extraflags = extraflags class Make: def doBuild(self, dir): os.system("cd %s; make" % dir) class MakeInstall: def doInstall(self, dir, root): os.system("cd %s; make %s=%s install" % (dir, self.rootVar, root)) def __init__(self, rootVar = "DESTDIR"): self.rootVar = rootVar class InstallFile: def doInstall(self, dir, root): dest = root + self.toFile util.mkdirChain(os.path.dirname(dest)) shutil.copyfile(self.toFile, dest) os.chmod(dest, self.mode) def __init__(self, fromFile, toFile, perms = 0644): self.toFile = toFile self.file = fromFile self.mode = perms
Add classes to run ./configure
Add classes to run ./configure
Python
apache-2.0
sassoftware/conary,sassoftware/conary,sassoftware/conary,sassoftware/conary,sassoftware/conary
--- +++ @@ -5,6 +5,22 @@ import os import shutil import util + +class ManualConfigure: + + def doBuild(self, dir): + os.system("cd %s; ./configure %s" % (dir, self.extraflags)) + + def __init__(self, extraflags=""): + self.extraflags = extraflags + +class Configure: + + def doBuild(self, dir): + os.system("cd %s; ./configure --prefix=/usr --sysconfdir=/etc %s" % (dir, self.extraflags)) + + def __init__(self, extraflags=""): + self.extraflags = extraflags class Make:
7bc1f87b24abdfbcbf68f5fc6b3266fc3e6799f0
setup.py
setup.py
from setuptools import setup, find_packages setup( name='prometheus-kafka-consumer-group-exporter', version='0.5.0.dev1', description='Kafka consumer group Prometheus exporter', url='https://github.com/Braedon/prometheus-kafka-consumer-group-exporter', author='Braedon Vickers', author_email='braedon.vickers@gmail.com', license='MIT', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'Topic :: System :: Monitoring', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', ], keywords='monitoring prometheus exporter kafka consumer group', packages=find_packages(), install_requires=[ 'kafka-python >= 1.3', 'jog', 'prometheus-client', 'javaproperties' ], entry_points={ 'console_scripts': [ 'prometheus-kafka-consumer-group-exporter=prometheus_kafka_consumer_group_exporter:main', ], }, )
from setuptools import setup, find_packages setup( name='prometheus-kafka-consumer-group-exporter', version='0.5.0.dev1', description='Kafka consumer group Prometheus exporter', url='https://github.com/Braedon/prometheus-kafka-consumer-group-exporter', author='Braedon Vickers', author_email='braedon.vickers@gmail.com', license='MIT', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'Topic :: System :: Monitoring', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', ], keywords='monitoring prometheus exporter kafka consumer group', packages=find_packages(), install_requires=[ 'kafka-python >= 1.3', 'jog', 'prometheus-client >= 0.6.0', 'javaproperties' ], entry_points={ 'console_scripts': [ 'prometheus-kafka-consumer-group-exporter=prometheus_kafka_consumer_group_exporter:main', ], }, )
Upgrade Prometheus client to fix memory leak with Python 3.7
Upgrade Prometheus client to fix memory leak with Python 3.7 https://github.com/prometheus/client_python/issues/340
Python
mit
braedon/prometheus-kafka-consumer-group-exporter
--- +++ @@ -22,7 +22,7 @@ install_requires=[ 'kafka-python >= 1.3', 'jog', - 'prometheus-client', + 'prometheus-client >= 0.6.0', 'javaproperties' ], entry_points={
f730137926b5a59671347e35ced1b43804393ec3
setup.py
setup.py
import os from setuptools import setup def read(fname): with open(os.path.join(os.path.dirname(__file__), fname)) as f: return f.read() setup(name='timy', version='0.4.1', description='Minimalist measurement of python code time', long_description=read('README.md'), url='https://github.com/ramonsaraiva/timy', author='Ramon Saraiva', author_email='ramonsaraiva@gmail.com', license='MIT', packages=['timy'], zip_safe=False)
import os from setuptools import setup def read(fname): with open(os.path.join(os.path.dirname(__file__), fname)) as f: return f.read() setup(name='timy', version='0.4.1', description='Minimalist measurement of python code time', long_description=read('README.md'), long_description_content_type='text/markdown', url='https://github.com/ramonsaraiva/timy', author='Ramon Saraiva', author_email='ramonsaraiva@gmail.com', license='MIT', packages=['timy'], zip_safe=False)
Add long description content type
Add long description content type
Python
mit
ramonsaraiva/timy
--- +++ @@ -11,6 +11,7 @@ version='0.4.1', description='Minimalist measurement of python code time', long_description=read('README.md'), + long_description_content_type='text/markdown', url='https://github.com/ramonsaraiva/timy', author='Ramon Saraiva', author_email='ramonsaraiva@gmail.com',
24764af1c7dc0a016b8e36c9a7659e9ac516e026
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup import setuptools with open("README.md", 'r') as f: long_description = f.read() setup( name='chess_py', version='2.8.0', description='Python chess client', long_description=long_description, platforms='MacOS X, Windows, Linux', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 2.7', ], author='Aubhro Sengupta', author_email='aubhrosengupta@gmail.com', url='https://github.com/LordDarkula/chess_py', license='MIT', packages=setuptools.find_packages() )
#!/usr/bin/env python from setuptools import setup import setuptools with open("README.md", 'r') as f: long_description = f.read() setup( name='chess_py', version='2.8.1', description='Python chess client', long_description=long_description, long_description_content_type='text/markdown', platforms='MacOS X, Windows, Linux', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 2.7', ], author='Aubhro Sengupta', author_email='aubhrosengupta@gmail.com', url='https://github.com/LordDarkula/chess_py', license='MIT', packages=setuptools.find_packages() )
Change README content type to markdown for pypi
Change README content type to markdown for pypi
Python
mit
LordDarkula/chess_py
--- +++ @@ -8,9 +8,10 @@ setup( name='chess_py', - version='2.8.0', + version='2.8.1', description='Python chess client', long_description=long_description, + long_description_content_type='text/markdown', platforms='MacOS X, Windows, Linux', classifiers=[ 'Development Status :: 5 - Production/Stable',
bf0d12e6470d6fb8b453cf8b08910b4b88847134
setup.py
setup.py
from setuptools import setup, find_packages try: # HACK: Avoid "TypeError: 'NoneType' object is not callable" # Related to issue http://bugs.python.org/issue15881 # https://hg.python.org/cpython/rev/0a58fa8e9bac import multiprocessing except ImportError: pass setup( name='di-py', description='Dependency injection library', version='{VERSION}', url='https://www.github.com/juandebravo/di-py', author='Telefonica Digital', author_email='connect-dev@tid.es', packages=find_packages(), include_package_data=True, install_requires=[], tests_require=['nose', 'pyshould'], test_suite='nose.collector', zip_safe=False, )
from setuptools import setup, find_packages try: # HACK: Avoid "TypeError: 'NoneType' object is not callable" # Related to issue http://bugs.python.org/issue15881 # https://hg.python.org/cpython/rev/0a58fa8e9bac import multiprocessing except ImportError: pass setup( name='di-py', description='Dependency injection library', version='{VERSION}', url='https://www.github.com/telefonicaid/di-py', author='Telefonica Digital', author_email='connect-dev@tid.es', packages=find_packages(), include_package_data=True, install_requires=[], tests_require=['nose', 'pyshould'], test_suite='nose.collector', zip_safe=False, )
Revert "move temporally URL to juandebravo"
Revert "move temporally URL to juandebravo" This reverts commit 9325f6884c5110cf57ab8becb26eb876c7deb59c.
Python
apache-2.0
telefonicaid/di-py
--- +++ @@ -12,7 +12,7 @@ name='di-py', description='Dependency injection library', version='{VERSION}', - url='https://www.github.com/juandebravo/di-py', + url='https://www.github.com/telefonicaid/di-py', author='Telefonica Digital', author_email='connect-dev@tid.es', packages=find_packages(),
719c6a851778107731ab3545242d52093eeb3b97
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup, find_packages setup(name='relengapi-mapper', version='0.2.3', description='hg to git mapper', author='Chris AtLee', author_email='chris@atlee.ca', url='https://github.com/petemoore/mapper', packages=find_packages(), namespace_packages=['relengapi', 'relengapi.blueprints'], entry_points={ "relengapi.blueprints": [ 'mapper = relengapi.blueprints.mapper:bp', ], }, install_requires=[ 'Flask', 'relengapi>=0.3', 'IPy', 'python-dateutil', ], license='MPL2', extras_require={ 'test': [ 'nose', 'mock' ] })
#!/usr/bin/env python from setuptools import setup, find_packages setup(name='relengapi-mapper', version='0.2.1', description='hg to git mapper', author='Chris AtLee', author_email='chris@atlee.ca', url='https://github.com/petemoore/mapper', packages=find_packages(), namespace_packages=['relengapi', 'relengapi.blueprints'], entry_points={ "relengapi.blueprints": [ 'mapper = relengapi.blueprints.mapper:bp', ], }, install_requires=[ 'Flask', 'relengapi>=0.3', 'IPy', 'python-dateutil', ], license='MPL2', extras_require={ 'test': [ 'nose', 'mock' ] })
Reset version number to 0.2.1 in preparation for making release 0.2.2 (release script bumps version number)
Reset version number to 0.2.1 in preparation for making release 0.2.2 (release script bumps version number)
Python
mpl-2.0
lundjordan/build-relengapi,garbas/mozilla-releng-services,lundjordan/services,hwine/build-relengapi,La0/mozilla-relengapi,djmitche/build-relengapi,mozilla/build-relengapi,mozilla/build-relengapi,mozilla-releng/services,lundjordan/services,La0/mozilla-relengapi,Callek/build-relengapi,Callek/build-relengapi,lundjordan/services,andrei987/services,mozilla/build-relengapi,srfraser/services,mozilla-releng/services,La0/mozilla-relengapi,lundjordan/build-relengapi,mozilla-releng/services,srfraser/services,srfraser/services,mozilla-releng/services,djmitche/build-relengapi,garbas/mozilla-releng-services,andrei987/services,lundjordan/build-relengapi,andrei987/services,djmitche/build-relengapi,Callek/build-relengapi,La0/mozilla-relengapi,srfraser/services,djmitche/build-relengapi,hwine/build-relengapi,andrei987/services,garbas/mozilla-releng-services,lundjordan/services,mozilla/build-relengapi,lundjordan/build-relengapi,hwine/build-relengapi,Callek/build-relengapi,hwine/build-relengapi,garbas/mozilla-releng-services
--- +++ @@ -3,7 +3,7 @@ from setuptools import setup, find_packages setup(name='relengapi-mapper', - version='0.2.3', + version='0.2.1', description='hg to git mapper', author='Chris AtLee', author_email='chris@atlee.ca',
b363536d92a955150eef1b412c1e34cba66caeeb
setup.py
setup.py
#!/usr/bin/env python from distutils.core import setup setup(name='psycopg-nestedtransactions', version='1.0', description='Database transaction manager for psycopg2 database connections with seamless support for nested transactions.', url='https://github.com/asqui/psycopg-nestedtransactions', packages=['nestedtransactions'], install_requires=['psycopg2', 'pg8000<=1.12.4;python_version<"3"'], extras_require=dict( test=['pytest', 'testing.postgresql'] ) )
#!/usr/bin/env python from distutils.core import setup setup(name='psycopg-nestedtransactions', version='1.0', description='Database transaction manager for psycopg2 database connections with seamless support for nested transactions.', url='https://github.com/asqui/psycopg-nestedtransactions', packages=['nestedtransactions'], install_requires=['psycopg2'], extras_require=dict( test=['pytest', 'testing.postgresql'] ) )
Revert "Fix Python 2.7 build" and un-pin pg8000 version on Python 2
Revert "Fix Python 2.7 build" and un-pin pg8000 version on Python 2 The underlying issue has been resolved in pg8000: https://github.com/tlocke/pg8000/issues/7 This reverts commit f27e5a3f.
Python
mit
asqui/psycopg-nestedtransactions
--- +++ @@ -7,7 +7,7 @@ description='Database transaction manager for psycopg2 database connections with seamless support for nested transactions.', url='https://github.com/asqui/psycopg-nestedtransactions', packages=['nestedtransactions'], - install_requires=['psycopg2', 'pg8000<=1.12.4;python_version<"3"'], + install_requires=['psycopg2'], extras_require=dict( test=['pytest', 'testing.postgresql'] )
fc4fa5d06ea0ca557d69112d1c8d0f10c8e594e0
diet_gtfs.py
diet_gtfs.py
import csv import sys # agency.txt done # feed_info.txt nothing to change # calendar_dates.txt depends on service_id. # routes.txt depends on agency.txt # shapes.txt depends on trips.txt # stops.txt depends on stop_times.txt # stop_times.txt depends on trip_id. # transfers.txt depends on stop_id from and to, routes. # trips.txt contains shape_id, also route_id to trip_id. def clean_agency_file(*agencies): with open('agency.txt', 'r') as f: reader = csv.reader(f) next(f) for row in reader: if row[0] in agencies: print(row) def main(): agencies = sys.argv[1:] clean_agency_file(*agencies) if __name__ == '__main__': main()
import csv import sys # agency.txt done # feed_info.txt nothing to change # calendar_dates.txt depends on service_id. # routes.txt depends on agency.txt # shapes.txt depends on trips.txt # stops.txt depends on stop_times.txt # stop_times.txt depends on trip_id. # transfers.txt depends on stop_id from and to, routes. # trips.txt contains shape_id, also route_id to trip_id. def clean_agency_file(*agencies): with open('agency.txt', 'r') as f: reader = csv.reader(f) filtered_rows = [] filtered_rows.append(next(reader)) for row in reader: if row[0] in agencies: filtered_rows.append(row) with open('cleaned/agency.txt', 'w') as f: writer = csv.writer(f) writer.writerows(filtered_rows) def main(): agencies = sys.argv[1:] clean_agency_file(*agencies) if __name__ == '__main__': main()
Create a complete filtered output agency.txt
Create a complete filtered output agency.txt Filter based on arguments passed from shell.
Python
bsd-2-clause
sensiblecodeio/diet-gtfs
--- +++ @@ -12,13 +12,20 @@ # transfers.txt depends on stop_id from and to, routes. # trips.txt contains shape_id, also route_id to trip_id. + def clean_agency_file(*agencies): with open('agency.txt', 'r') as f: reader = csv.reader(f) - next(f) + filtered_rows = [] + filtered_rows.append(next(reader)) + for row in reader: if row[0] in agencies: - print(row) + filtered_rows.append(row) + + with open('cleaned/agency.txt', 'w') as f: + writer = csv.writer(f) + writer.writerows(filtered_rows) def main():
37f28dba866ffa3457a4f14a7d3e74e8e88a1dd0
testing/get_value_test.py
testing/get_value_test.py
#!/usr/bin/env python from __future__ import print_function import sys import numpy as np from bmi import MyBMI def print_var_values (bmi, var_name): s = ', '.join ([str (x) for x in bmi.get_value (var_name)]) print ('%s' % s) def run (): bmi = MyBMI () bmi.initialize (None) print ('%s' % bmi.get_component_name ()) for i in range (10): print ('Time %d: ' % i, end='') print_var_values (bmi, 'height_above_sea_floor') bmi.update () print ('Time %d: ' % i, end='') print_var_values (bmi, 'height_above_sea_floor') bmi.finalize () if __name__ == '__main__': run ()
#!/usr/bin/env python from __future__ import print_function import sys import numpy as np from poisson import BmiPoisson def main(): model = BmiPoisson() model.initialize() print('%s' % model.get_component_name ()) for i in xrange(10): print('Time %d' % i) np.savetxt(sys.stdout, model.get_value('land_surface__elevation'), fmt='%.3f') model.update() print('Time %d' % i) np.savetxt(sys.stdout, model.get_value('land_surface__elevation'), fmt='%.3f') model.finalize() if __name__ == '__main__': main()
Update to use new bmi model.
Update to use new bmi model.
Python
mit
mperignon/bmi-STM,mperignon/bmi-delta,mperignon/bmi-STM,mperignon/bmi-delta
--- +++ @@ -5,29 +5,28 @@ import sys import numpy as np -from bmi import MyBMI +from poisson import BmiPoisson -def print_var_values (bmi, var_name): - s = ', '.join ([str (x) for x in bmi.get_value (var_name)]) - print ('%s' % s) -def run (): - bmi = MyBMI () +def main(): + model = BmiPoisson() - bmi.initialize (None) + model.initialize() - print ('%s' % bmi.get_component_name ()) + print('%s' % model.get_component_name ()) - for i in range (10): - print ('Time %d: ' % i, end='') - print_var_values (bmi, 'height_above_sea_floor') - bmi.update () + for i in xrange(10): + print('Time %d' % i) + np.savetxt(sys.stdout, model.get_value('land_surface__elevation'), + fmt='%.3f') + model.update() - print ('Time %d: ' % i, end='') - print_var_values (bmi, 'height_above_sea_floor') + print('Time %d' % i) + np.savetxt(sys.stdout, model.get_value('land_surface__elevation'), + fmt='%.3f') - bmi.finalize () + model.finalize() + if __name__ == '__main__': - run () - + main()
3fdc0db2608427ccb63b53b827e2a78aee40366a
tests/test_ui_elements.py
tests/test_ui_elements.py
import pytest @pytest.fixture def pyglui_ui_instance(): import glfw from pyglui import cygl, ui glfw.ERROR_REPORTING = "raise" glfw_init_successful = glfw.init() assert glfw_init_successful, "Failed to initialize GLFW" glfw.window_hint(glfw.VISIBLE, glfw.FALSE) try: window = glfw.create_window(200, 200, "Test window", None, None) except glfw.GLFWError as err: # Skip if CI does not allow creating a window, e.g. on macOS: # > NSGL: Failed to find a suitable pixel format glfw.terminate() pytest.skip(str(err)) glfw.make_context_current(window) # required for GLEW init cygl.utils.init() global_ui_instance = ui.UI() yield global_ui_instance global_ui_instance.terminate() glfw.destroy_window(window) glfw.terminate() @pytest.fixture def attribute_context(): return {"test": 5} def test_Color_Legend(pyglui_ui_instance, attribute_context): import pyglui.ui black = (0.0, 0.0, 0.0, 1.0) pyglui.ui.Color_Legend(black, "test color")
import pytest @pytest.fixture def pyglui_ui_instance(): import glfw from pyglui import cygl, ui glfw.ERROR_REPORTING = "raise" try: glfw_init_successful = glfw.init() assert glfw_init_successful, "Failed to initialize GLFW" glfw.window_hint(glfw.VISIBLE, glfw.FALSE) window = glfw.create_window(200, 200, "Test window", None, None) except glfw.GLFWError as err: # Skip if CI does not allow creating a window, e.g. on macOS: # > NSGL: Failed to find a suitable pixel format glfw.terminate() pytest.skip(str(err)) glfw.make_context_current(window) # required for GLEW init cygl.utils.init() global_ui_instance = ui.UI() yield global_ui_instance global_ui_instance.terminate() glfw.destroy_window(window) glfw.terminate() @pytest.fixture def attribute_context(): return {"test": 5} def test_Color_Legend(pyglui_ui_instance, attribute_context): import pyglui.ui black = (0.0, 0.0, 0.0, 1.0) pyglui.ui.Color_Legend(black, "test color")
Allow glfw.init() to fail on CI as well
Allow glfw.init() to fail on CI as well
Python
mit
pupil-labs/pyglui,pupil-labs/pyglui
--- +++ @@ -8,10 +8,10 @@ from pyglui import cygl, ui glfw.ERROR_REPORTING = "raise" - glfw_init_successful = glfw.init() - assert glfw_init_successful, "Failed to initialize GLFW" - glfw.window_hint(glfw.VISIBLE, glfw.FALSE) try: + glfw_init_successful = glfw.init() + assert glfw_init_successful, "Failed to initialize GLFW" + glfw.window_hint(glfw.VISIBLE, glfw.FALSE) window = glfw.create_window(200, 200, "Test window", None, None) except glfw.GLFWError as err: # Skip if CI does not allow creating a window, e.g. on macOS:
fcec71d285236dc3906611323ab74ecd89337081
metakernel/magics/tests/test_edit_magic.py
metakernel/magics/tests/test_edit_magic.py
from metakernel.tests.utils import (get_kernel, get_log_text, clear_log_text, EvalKernel) def test_edit_magic(): kernel = get_kernel(EvalKernel) results = kernel.do_execute("%edit LICENSE.txt") text = results["payload"][0]["text"] assert '%%file LICENSE.txt\n\n# Copyright (c) Ipython Kernel Development Team.\n# Distributed under the terms of the Modified BSD License.\n\n\n# Copyright (c) Metakernel Development Team.\n# Distributed under the terms of the Modified BSD License.\n\n' == text, text
from metakernel.tests.utils import (get_kernel, get_log_text, clear_log_text, EvalKernel) def test_edit_magic(): kernel = get_kernel(EvalKernel) results = kernel.do_execute("%%edit %s" % __file__) text = results["payload"][0]["text"] assert text.startswith('%%file') assert 'def test_edit_magic' in text
Fix test edit magic test
Fix test edit magic test
Python
bsd-3-clause
Calysto/metakernel
--- +++ @@ -4,7 +4,8 @@ def test_edit_magic(): kernel = get_kernel(EvalKernel) - results = kernel.do_execute("%edit LICENSE.txt") + + results = kernel.do_execute("%%edit %s" % __file__) text = results["payload"][0]["text"] - assert '%%file LICENSE.txt\n\n# Copyright (c) Ipython Kernel Development Team.\n# Distributed under the terms of the Modified BSD License.\n\n\n# Copyright (c) Metakernel Development Team.\n# Distributed under the terms of the Modified BSD License.\n\n' == text, text - + assert text.startswith('%%file') + assert 'def test_edit_magic' in text
7a21d2bccbcff2eb6a8b7cfd00c38a28553c0bcd
gratipay/models/country.py
gratipay/models/country.py
from __future__ import absolute_import, division, print_function, unicode_literals from postgres.orm import Model class Country(Model): """Represent country records from our database (read-only). :var int id: the record's primary key in our ``countries`` table :var unicode code: the country's `ISO 3166-1 alpha-2`_ code .. _ISO 3166-1 alpha-2 : https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2 """ typname = 'countries'
from __future__ import absolute_import, division, print_function, unicode_literals from postgres.orm import Model class Country(Model): """Represent country records from our database (read-only). :var int id: the record's primary key in our ``countries`` table :var unicode code: the country's `ISO 3166-1 alpha-2`_ code .. _ISO 3166-1 alpha-2 : https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2 """ typname = 'countries' @classmethod def from_code2(cls, code2): return cls.db.one("SELECT countries.*::countries FROM countries WHERE code2=%s", (code2,))
Add a helper to Country; should go upstream prolly
Add a helper to Country; should go upstream prolly
Python
mit
gratipay/gratipay.com,gratipay/gratipay.com,gratipay/gratipay.com,gratipay/gratipay.com
--- +++ @@ -13,3 +13,7 @@ """ typname = 'countries' + + @classmethod + def from_code2(cls, code2): + return cls.db.one("SELECT countries.*::countries FROM countries WHERE code2=%s", (code2,))
90eafe038adfeddf5379e950b03ec0727d0c5d36
ci/__init__.py
ci/__init__.py
from cisd import CISD
from pyscf.ci import cisd def CISD(mf, frozen=[], mo_coeff=None, mo_occ=None): from pyscf import scf if isinstance(mf, (scf.uhf.UHF, scf.rohf.ROHF)): raise NotImplementedError('RO-CISD, UCISD are not available in this pyscf version') return cisd.CISD(mf, frozen, mo_coeff, mo_occ)
Improve error message for CISD
Improve error message for CISD
Python
apache-2.0
gkc1000/pyscf,sunqm/pyscf,sunqm/pyscf,sunqm/pyscf,gkc1000/pyscf,gkc1000/pyscf,gkc1000/pyscf,sunqm/pyscf,gkc1000/pyscf
--- +++ @@ -1 +1,7 @@ -from cisd import CISD +from pyscf.ci import cisd + +def CISD(mf, frozen=[], mo_coeff=None, mo_occ=None): + from pyscf import scf + if isinstance(mf, (scf.uhf.UHF, scf.rohf.ROHF)): + raise NotImplementedError('RO-CISD, UCISD are not available in this pyscf version') + return cisd.CISD(mf, frozen, mo_coeff, mo_occ)
a19b043c910274277ce5a6a777f686030f6ef7d0
media_manager/migrations/0001_initial.py
media_manager/migrations/0001_initial.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ] operations = [ migrations.CreateModel( name='Course', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('lti_context_id', models.CharField(max_length=128, null=True)), ('lti_tool_consumer_instance_guid', models.CharField(max_length=1024, null=True)), ('api_course_id', models.IntegerField(null=True)), ('created', models.DateTimeField(auto_now_add=True)), ('updated', models.DateTimeField(auto_now=True)), ], options={ 'verbose_name': 'course', 'verbose_name_plural': 'courses', }, ), migrations.AlterUniqueTogether( name='course', unique_together=set([('lti_context_id', 'lti_tool_consumer_instance_guid')]), ), ]
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Course', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('lti_context_id', models.CharField(max_length=128, null=True)), ('lti_tool_consumer_instance_guid', models.CharField(max_length=1024, null=True)), ('api_course_id', models.IntegerField(null=True)), ('created', models.DateTimeField(auto_now_add=True)), ('updated', models.DateTimeField(auto_now=True)), ], options={ 'verbose_name': 'course', 'verbose_name_plural': 'courses', }, ), migrations.AlterUniqueTogether( name='course', unique_together=set([('lti_context_id', 'lti_tool_consumer_instance_guid')]), ), ]
Mark initial migration so django knows to skip it.
Mark initial migration so django knows to skip it.
Python
bsd-3-clause
Harvard-ATG/media_management_lti,Harvard-ATG/media_management_lti,Harvard-ATG/media_management_lti,Harvard-ATG/media_management_lti
--- +++ @@ -5,6 +5,7 @@ class Migration(migrations.Migration): + initial = True dependencies = [ ]
5e6f2828ec36a57a46f8220cc9263b643792573b
ereuse_devicehub/scripts/updates/snapshot_software.py
ereuse_devicehub/scripts/updates/snapshot_software.py
from pydash import find from ereuse_devicehub.resources.device.domain import DeviceDomain from ereuse_devicehub.resources.event.device import DeviceEventDomain from ereuse_devicehub.scripts.updates.update import Update class SnapshotSoftware(Update): """ Changes the values of SnapshotSoftware and adds it to the materialized one in devices """ def execute(self, database): SNAPSHOT_SOFTWARE = { 'DDI': 'Workbench', 'Scan': 'AndroidApp', 'DeviceHubClient': 'Web' } for snapshot in DeviceEventDomain.get({'@type': "devices:Snapshot"}): snapshot['snapshotSoftware'] = SNAPSHOT_SOFTWARE[snapshot['snapshotSoftware']] DeviceEventDomain.update_one_raw(snapshot['_id'], {'$set': {'snapshotSoftware': snapshot['snapshotSoftware']}}) for device in DeviceDomain.get({'events._id': snapshot['_id']}): materialized_snapshot = find(device['events'], lambda event: event['_id'] == snapshot['_id']) materialized_snapshot['snapshotSoftware'] = snapshot['snapshotSoftware'] DeviceDomain.update_one_raw(device['_id'], {'$set': {'events': device['events']}})
from pydash import find from ereuse_devicehub.resources.device.domain import DeviceDomain from ereuse_devicehub.resources.event.device import DeviceEventDomain from ereuse_devicehub.scripts.updates.update import Update class SnapshotSoftware(Update): """ Changes the values of SnapshotSoftware and adds it to the materialized one in devices """ def execute(self, database): SNAPSHOT_SOFTWARE = { 'DDI': 'Workbench', 'Scan': 'AndroidApp', 'DeviceHubClient': 'Web' } for snapshot in DeviceEventDomain.get({'@type': "devices:Snapshot"}): snapshot['snapshotSoftware'] = SNAPSHOT_SOFTWARE[snapshot.get('snapshotSoftware', 'DDI')] DeviceEventDomain.update_one_raw(snapshot['_id'], {'$set': {'snapshotSoftware': snapshot['snapshotSoftware']}}) for device in DeviceDomain.get({'events._id': snapshot['_id']}): materialized_snapshot = find(device['events'], lambda event: event['_id'] == snapshot['_id']) materialized_snapshot['snapshotSoftware'] = snapshot['snapshotSoftware'] DeviceDomain.update_one_raw(device['_id'], {'$set': {'events': device['events']}})
Fix getting snapshotsoftware on old snapshots
Fix getting snapshotsoftware on old snapshots
Python
agpl-3.0
eReuse/DeviceHub,eReuse/DeviceHub
--- +++ @@ -17,7 +17,7 @@ 'DeviceHubClient': 'Web' } for snapshot in DeviceEventDomain.get({'@type': "devices:Snapshot"}): - snapshot['snapshotSoftware'] = SNAPSHOT_SOFTWARE[snapshot['snapshotSoftware']] + snapshot['snapshotSoftware'] = SNAPSHOT_SOFTWARE[snapshot.get('snapshotSoftware', 'DDI')] DeviceEventDomain.update_one_raw(snapshot['_id'], {'$set': {'snapshotSoftware': snapshot['snapshotSoftware']}}) for device in DeviceDomain.get({'events._id': snapshot['_id']}): materialized_snapshot = find(device['events'], lambda event: event['_id'] == snapshot['_id'])
0612ea6aea5a10e5639a710500c321e3c9e02495
interfaces/python/setup.py
interfaces/python/setup.py
#!/usr/bin/env python """ setup.py file for compiling Infomap module """ from distutils.core import setup, Extension import fnmatch import os import re cppSources = [] for root, dirnames, filenames in os.walk('.'): if root == 'src': cppSources.append(os.path.join(root, 'Infomap.cpp')) else: for filename in fnmatch.filter(filenames, '*.cpp'): cppSources.append(os.path.join(root, filename)) # Extract Infomap version infomapVersion = '' with open(os.path.join('src', 'io', 'version.cpp')) as f: for line in f: m = re.match( r'.+INFOMAP_VERSION = \"(.+)\"', line) if m: infomapVersion = m.groups()[0] infomap_module = Extension('_infomap', sources=cppSources, extra_compile_args=['-DAS_LIB'] ) setup (name = 'infomap', version = infomapVersion, author = "Team at mapequation.org", description = """Infomap clustering algorithm""", url = "www.mapequation.org", ext_modules = [infomap_module], py_modules = ["infomap"], )
#!/usr/bin/env python """ setup.py file for compiling Infomap module """ from distutils.core import setup, Extension from distutils.file_util import copy_file import sysconfig import fnmatch import os import re cppSources = [] for root, dirnames, filenames in os.walk('.'): if root == 'src': cppSources.append(os.path.join(root, 'Infomap.cpp')) else: for filename in fnmatch.filter(filenames, '*.cpp'): cppSources.append(os.path.join(root, filename)) # Extract Infomap version infomapVersion = '' with open(os.path.join('src', 'io', 'version.cpp')) as f: for line in f: m = re.match( r'.+INFOMAP_VERSION = \"(.+)\"', line) if m: infomapVersion = m.groups()[0] infomap_module = Extension('_infomap', sources=cppSources, extra_compile_args=['-DAS_LIB'] ) setup (name = 'infomap', version = infomapVersion, author = "Team at mapequation.org", description = """Infomap clustering algorithm""", url = "www.mapequation.org", ext_modules = [infomap_module], py_modules = ["infomap"], ) # Clean ABI Version Tagged .so Files libFilename = '_infomap{}'.format(sysconfig.get_config_var('EXT_SUFFIX')) copy_file(libFilename, '_infomap.so')
Fix python library problem due to ABI tagged .so files
Fix python library problem due to ABI tagged .so files
Python
agpl-3.0
mapequation/infomap,mapequation/infomap,mapequation/infomap,mapequation/infomap
--- +++ @@ -5,6 +5,8 @@ """ from distutils.core import setup, Extension +from distutils.file_util import copy_file +import sysconfig import fnmatch import os import re @@ -36,3 +38,7 @@ ext_modules = [infomap_module], py_modules = ["infomap"], ) + +# Clean ABI Version Tagged .so Files +libFilename = '_infomap{}'.format(sysconfig.get_config_var('EXT_SUFFIX')) +copy_file(libFilename, '_infomap.so')
c7030e461026e718c46b86dadecc9681d226c27c
cupy/util.py
cupy/util.py
import atexit import functools from cupy import cuda _memoized_funcs = [] def memoize(for_each_device=False): """Makes a function memoizing the result for each argument and device. This decorator provides automatic memoization of the function result. Args: for_each_device (bool): If True, it memoizes the results for each device. Otherwise, it memoizes the results only based on the arguments. """ def decorator(f): global _memoized_funcs f._cupy_memo = {} _memoized_funcs.append(f) @functools.wraps(f) def ret(*args, **kwargs): arg_key = (args, frozenset(kwargs.items())) if for_each_device: arg_key = (cuda.Device().id, arg_key) memo = f._cupy_memo result = memo.get(arg_key, None) if result is None: result = f(*args, **kwargs) memo[arg_key] = result return result return ret return decorator @atexit.register def clear_memo(): """Clears the memoized results for all functions decorated by memoize.""" global _memoized_funcs for f in _memoized_funcs: del f._cupy_memo _memoized_funcs = []
import atexit import functools from cupy import cuda _memos = [] def memoize(for_each_device=False): """Makes a function memoizing the result for each argument and device. This decorator provides automatic memoization of the function result. Args: for_each_device (bool): If True, it memoizes the results for each device. Otherwise, it memoizes the results only based on the arguments. """ def decorator(f): memo = {} _memos.append(memo) @functools.wraps(f) def ret(*args, **kwargs): arg_key = (args, frozenset(kwargs.items())) if for_each_device: arg_key = (cuda.Device().id, arg_key) result = memo.get(arg_key, None) if result is None: result = f(*args, **kwargs) memo[arg_key] = result return result return ret return decorator @atexit.register def clear_memo(): """Clears the memoized results for all functions decorated by memoize.""" for memo in _memos: memo.clear()
Fix unintended late finalization of memoized functions
Fix unintended late finalization of memoized functions
Python
mit
ktnyt/chainer,niboshi/chainer,niboshi/chainer,laysakura/chainer,tscohen/chainer,benob/chainer,chainer/chainer,cupy/cupy,aonotas/chainer,cupy/cupy,jnishi/chainer,cupy/cupy,tkerola/chainer,cemoody/chainer,ktnyt/chainer,chainer/chainer,jnishi/chainer,jnishi/chainer,keisuke-umezawa/chainer,truongdq/chainer,kashif/chainer,wkentaro/chainer,ronekko/chainer,pfnet/chainer,delta2323/chainer,okuta/chainer,cupy/cupy,muupan/chainer,okuta/chainer,ysekky/chainer,hvy/chainer,chainer/chainer,truongdq/chainer,jnishi/chainer,keisuke-umezawa/chainer,t-abe/chainer,chainer/chainer,wkentaro/chainer,tigerneil/chainer,AlpacaDB/chainer,okuta/chainer,wkentaro/chainer,hvy/chainer,niboshi/chainer,ytoyama/yans_chainer_hackathon,sinhrks/chainer,keisuke-umezawa/chainer,ktnyt/chainer,kikusu/chainer,niboshi/chainer,rezoo/chainer,hvy/chainer,benob/chainer,sinhrks/chainer,anaruse/chainer,1986ks/chainer,ktnyt/chainer,sou81821/chainer,kikusu/chainer,okuta/chainer,kiyukuta/chainer,wkentaro/chainer,muupan/chainer,t-abe/chainer,AlpacaDB/chainer,Kaisuke5/chainer,minhpqn/chainer,hvy/chainer,keisuke-umezawa/chainer
--- +++ @@ -4,7 +4,7 @@ from cupy import cuda -_memoized_funcs = [] +_memos = [] def memoize(for_each_device=False): @@ -19,9 +19,8 @@ """ def decorator(f): - global _memoized_funcs - f._cupy_memo = {} - _memoized_funcs.append(f) + memo = {} + _memos.append(memo) @functools.wraps(f) def ret(*args, **kwargs): @@ -29,7 +28,6 @@ if for_each_device: arg_key = (cuda.Device().id, arg_key) - memo = f._cupy_memo result = memo.get(arg_key, None) if result is None: result = f(*args, **kwargs) @@ -44,7 +42,5 @@ @atexit.register def clear_memo(): """Clears the memoized results for all functions decorated by memoize.""" - global _memoized_funcs - for f in _memoized_funcs: - del f._cupy_memo - _memoized_funcs = [] + for memo in _memos: + memo.clear()
4aec042f4a271725451825bf03d48beac000ec9c
core/urls.py
core/urls.py
from django.conf.urls import url import core.views urlpatterns = [ url(r'^u/(?P<slug>[\w-\.]+)/?$', core.views.run_fn, name="run_fn"), ]
from django.conf.urls import url import core.views urlpatterns = [ url(r'^u/(?P<slug>[\w\-\+\.]+)/?$', core.views.run_fn, name="run_fn"), ]
Add more patterns to be handled in the slug
Add more patterns to be handled in the slug
Python
mit
theju/urlscript
--- +++ @@ -3,5 +3,5 @@ import core.views urlpatterns = [ - url(r'^u/(?P<slug>[\w-\.]+)/?$', core.views.run_fn, name="run_fn"), + url(r'^u/(?P<slug>[\w\-\+\.]+)/?$', core.views.run_fn, name="run_fn"), ]
b8a3fcac063436b4b6e402c9026c03f3e094c14a
docs/conf.py
docs/conf.py
# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals from sphinx_celery import conf globals().update(conf.build_config( 'kombu', __file__, project='Kombu', version_dev='4.3', version_stable='4.2', canonical_url='https://kombu.readthedocs.io/', webdomain='kombu.readthedocs.io', github_project='celery/kombu', author='Ask Solem & contributors', author_name='Ask Solem', copyright='2009-2016', publisher='Celery Project', html_logo='images/kombusmall.jpg', html_favicon='images/favicon.ico', html_prepend_sidebars=['sidebardonations.html'], extra_extensions=['sphinx.ext.napoleon'], apicheck_ignore_modules=[ 'kombu.entity', 'kombu.messaging', 'kombu.asynchronous.aws.ext', 'kombu.asynchronous.aws.sqs.ext', 'kombu.transport.qpid_patches', 'kombu.utils', 'kombu.transport.virtual.base', ], ))
# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals from sphinx_celery import conf globals().update(conf.build_config( 'kombu', __file__, project='Kombu', version_dev='4.4', version_stable='4.3', canonical_url='https://kombu.readthedocs.io/', webdomain='kombu.readthedocs.io', github_project='celery/kombu', author='Ask Solem & contributors', author_name='Ask Solem', copyright='2009-2019', publisher='Celery Project', html_logo='images/kombusmall.jpg', html_favicon='images/favicon.ico', html_prepend_sidebars=['sidebardonations.html'], extra_extensions=['sphinx.ext.napoleon'], apicheck_ignore_modules=[ 'kombu.entity', 'kombu.messaging', 'kombu.asynchronous.aws.ext', 'kombu.asynchronous.aws.sqs.ext', 'kombu.transport.qpid_patches', 'kombu.utils', 'kombu.transport.virtual.base', ], ))
Update version and copyright year.
Update version and copyright year.
Python
bsd-3-clause
ZoranPavlovic/kombu,celery/kombu,ZoranPavlovic/kombu,urbn/kombu
--- +++ @@ -6,14 +6,14 @@ globals().update(conf.build_config( 'kombu', __file__, project='Kombu', - version_dev='4.3', - version_stable='4.2', + version_dev='4.4', + version_stable='4.3', canonical_url='https://kombu.readthedocs.io/', webdomain='kombu.readthedocs.io', github_project='celery/kombu', author='Ask Solem & contributors', author_name='Ask Solem', - copyright='2009-2016', + copyright='2009-2019', publisher='Celery Project', html_logo='images/kombusmall.jpg', html_favicon='images/favicon.ico',
c4fc5e1f89d756cdd9a7fe19baa04aeae844a26c
sip/execution_control/master_controller/rest/tests/test.py
sip/execution_control/master_controller/rest/tests/test.py
# -*- coding: utf-8 -*- """Unit tests for the Master Controller REST variant. - http://flask.pocoo.org/docs/0.12/testing/ """ import unittest import json from app.app import APP class MasterControllerTests(unittest.TestCase): """Tests of the Master Controller""" def setUp(self): """Executed prior to each test.""" APP.config['TESTING'] = True APP.config['DEBUG'] = False APP.config['JSONIFY_PRETTYPRINT_REGULAR'] = False self.app = APP.test_client() self.assertEqual(APP.debug, False) def tearDown(self): """Executed after each test.""" pass def test_get_state_successful(self): """Test of successfully returning the SDP state.""" states = ['OFF', 'INIT', 'STANDBY', 'ON', 'DISABLE', 'FAULT', 'ALARM', 'UNKNOWN'] response = self.app.get('/state') self.assertEqual(response.mimetype, 'application/json') self.assertEqual(response.status_code, 200) data = json.loads(response.get_data()) self.assertTrue(data['state'] in states)
# -*- coding: utf-8 -*- """Unit tests for the Master Controller REST variant. - http://flask.pocoo.org/docs/0.12/testing/ """ import unittest import json from app.app import APP class MasterControllerTests(unittest.TestCase): """Tests of the Master Controller""" def setUp(self): """Executed prior to each test.""" APP.config['TESTING'] = True APP.config['DEBUG'] = False APP.config['JSONIFY_PRETTYPRINT_REGULAR'] = False self.app = APP.test_client() self.assertEqual(APP.debug, False) def tearDown(self): """Executed after each test.""" pass def test_get_state_successful(self): """Test of successfully returning the SDP state.""" states = ['OFF', 'INIT', 'STANDBY', 'ON', 'DISABLE', 'FAULT', 'ALARM', 'UNKNOWN'] response = self.app.get('/state') self.assertEqual(response.mimetype, 'application/json') self.assertEqual(response.status_code, 200) data = json.loads(response.get_data().decode('utf-8')) self.assertTrue(data['state'] in states)
Convert reply from bytes to string
Convert reply from bytes to string The GET message reply is a byte string and it needs to be converted to a string before being passed to the JSON parser.
Python
bsd-3-clause
SKA-ScienceDataProcessor/integration-prototype,SKA-ScienceDataProcessor/integration-prototype,SKA-ScienceDataProcessor/integration-prototype,SKA-ScienceDataProcessor/integration-prototype
--- +++ @@ -32,5 +32,5 @@ self.assertEqual(response.mimetype, 'application/json') self.assertEqual(response.status_code, 200) - data = json.loads(response.get_data()) + data = json.loads(response.get_data().decode('utf-8')) self.assertTrue(data['state'] in states)
fb8a70c16bb836405f2f811a203955db28f01d04
tests/matchers/test_contain.py
tests/matchers/test_contain.py
import unittest from robber import expect from robber.matchers.contain import Contain class TestAbove(unittest.TestCase): def test_matches(self): expect(Contain({'key': 'value'}, 'key').matches()) == True expect(Contain([1, 2, 3], 2).matches()) == True expect(Contain((1, 2, 3), 3).matches()) == True expect(Contain({'key': 'value'}, 'other').matches()) == False expect(Contain([1, 2, 3], 4).matches()) == False expect(Contain((1, 2, 3), 4).matches()) == False def test_failure_message(self): contain = Contain([1, 2, 3], 4) expect(contain.failure_message()) == 'Expected {} to contain 4'.format([1, 2, 3]) def test_register(self): expect(expect.matcher('contain')) == Contain
import unittest from robber import expect from robber.matchers.contain import Contain class TestAbove(unittest.TestCase): def test_matches(self): expect(Contain({'key': 'value'}, 'key').matches()) == True expect(Contain([1, 2, 3], 2).matches()) == True expect(Contain((1, 2, 3), 3).matches()) == True expect(Contain({'key': 'value'}, 'other').matches()) == False expect(Contain([1, 2, 3], 4).matches()) == False expect(Contain((1, 2, 3), 4).matches()) == False def test_failure_message(self): contain = Contain([1, 2, 3], 4) expect(contain.failure_message()) == 'Expected {0} to contain 4'.format([1, 2, 3]) def test_register(self): expect(expect.matcher('contain')) == Contain
Add number in string format For python 2.6 compatibility
Add number in string format For python 2.6 compatibility
Python
mit
taoenator/robber.py,vesln/robber.py
--- +++ @@ -14,7 +14,7 @@ def test_failure_message(self): contain = Contain([1, 2, 3], 4) - expect(contain.failure_message()) == 'Expected {} to contain 4'.format([1, 2, 3]) + expect(contain.failure_message()) == 'Expected {0} to contain 4'.format([1, 2, 3]) def test_register(self): expect(expect.matcher('contain')) == Contain
88e680302b6880c17111a678440156ac104507ed
aleph/authz.py
aleph/authz.py
from flask import request from flask.ext.login import current_user from werkzeug.exceptions import Forbidden def request_collections(action): try: return request.collection_slugs.get(action) except: return [] def collection_read(name): return name in request_collections('read') def collection_write(name): return name in request_collections('write') def logged_in(): return current_user.is_authenticated() def require(pred): if not pred: raise Forbidden("Sorry, you're not permitted to do this!")
from flask import request from flask.ext.login import current_user from werkzeug.exceptions import Forbidden def request_collections(action): try: return request.collection_slugs.get(action) or [] except: return [] def collection_read(name): return name in request_collections('read') def collection_write(name): return name in request_collections('write') def logged_in(): return current_user.is_authenticated() def require(pred): if not pred: raise Forbidden("Sorry, you're not permitted to do this!")
Return empty list for non-logged in users.
Return empty list for non-logged in users.
Python
mit
smmbllsm/aleph,gazeti/aleph,gazeti/aleph,nightsh/aleph,gazeti/aleph,Luthien123/aleph,gazeti/aleph,nightsh/aleph,OpenGazettes/aleph,mgax/aleph,smmbllsm/aleph,mgax/aleph,alephdata/aleph,alephdata/aleph,pudo/aleph,smmbllsm/aleph,alephdata/aleph,OpenGazettes/aleph,alephdata/aleph,OpenGazettes/aleph,Luthien123/aleph,pudo/aleph,OpenGazettes/aleph,nightsh/aleph,Luthien123/aleph,pudo/aleph,alephdata/aleph,mgax/aleph
--- +++ @@ -5,7 +5,7 @@ def request_collections(action): try: - return request.collection_slugs.get(action) + return request.collection_slugs.get(action) or [] except: return []
447a9c82e229eb801df028d2531764d89b28c923
genderbot.py
genderbot.py
import random import re from twitterbot import TwitterBot import wikipedia class Genderbot(TwitterBot): boring_article_regex = (r"municipality|village|town|football|genus|family|" "administrative|district|community|region|hamlet|" "school|actor|mountain|basketball|city|species|film|" "county|located|politician|professional|settlement") def tweet(self): article = self.__random_wikipedia_article() match = re.search(r"\bis [^.?]+", article.content, re.UNICODE) if match: status = self.__format_status(match.group(0), article.url) if self.__is_interesting(status): self.post_tweet(status) def __format_status(self, is_phrase, url): status = 'gender %s' % (is_phrase) if len(status) > 114: status = status[0:113] + '...' return status + ' %s' % (url) def __is_interesting(self, status): boring_match = re.search(Genderbot.boring_article_regex, status, re.UNICODE) return boring_match is None def __random_wikipedia_article(self): random_title = wikipedia.random(pages=1) return wikipedia.page(title=random_title) if __name__ == "__main__": try: Genderbot("CustomGender").tweet() except: pass
import re from twitterbot import TwitterBot import wikipedia class Genderbot(TwitterBot): boring_article_regex = (r"municipality|village|town|football|genus|family|" "administrative|district|community|region|hamlet|" "school|actor|mountain|basketball|city|species|film|" "county|located|politician|professional|settlement") def tweet(self): article = self.__random_wikipedia_article() match = re.search(r"\bis [^.?]+", article.content, re.UNICODE) if match: status = self.__format_status(match.group(0), article.url) if self.__is_interesting(status): self.post_tweet(status) def __format_status(self, is_phrase, url): status = 'gender %s' % (is_phrase) if len(status) > 114: status = status[0:113] + '...' return status + ' %s' % (url) def __is_interesting(self, status): boring_match = re.search(Genderbot.boring_article_regex, status, re.UNICODE) return boring_match is None def __random_wikipedia_article(self): random_title = wikipedia.random(pages=1) return wikipedia.page(title=random_title) if __name__ == "__main__": try: Genderbot("CustomGender").tweet() except: pass
Remove import random now that it's not being used
Remove import random now that it's not being used
Python
mit
DanielleSucher/genderbot
--- +++ @@ -1,4 +1,3 @@ -import random import re from twitterbot import TwitterBot import wikipedia
02b1611f26f5d6e572e4c818eab8ae4d9bcbcf2f
test/mac/gyptest-rebuild.py
test/mac/gyptest-rebuild.py
#!/usr/bin/env python # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Verifies that app bundles are rebuilt correctly. """ import TestGyp import os import sys if sys.platform == 'darwin': test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode']) test.run_gyp('test.gyp', chdir='app-bundle') test.build('test.gyp', test.ALL, chdir='app-bundle') # Touch a source file, rebuild, and check that the app target is up-to-date. os.utime('app-bundle/TestApp/main.m', None) test.build('test.gyp', test.ALL, chdir='app-bundle') test.up_to_date('test.gyp', test.ALL, chdir='app-bundle') test.pass_test()
#!/usr/bin/env python # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Verifies that app bundles are rebuilt correctly. """ import TestGyp import os import sys if sys.platform == 'darwin': test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode']) test.run_gyp('test.gyp', chdir='app-bundle') test.build('test.gyp', test.ALL, chdir='app-bundle') # Touch a source file, rebuild, and check that the app target is up-to-date. test.touch('app-bundle/TestApp/main.m', None) test.build('test.gyp', test.ALL, chdir='app-bundle') test.up_to_date('test.gyp', test.ALL, chdir='app-bundle') test.pass_test()
Use test.touch() instead of os.utime() in a test.
Use test.touch() instead of os.utime() in a test. No intended functionality change. TBR=evan Review URL: https://chromiumcodereview.appspot.com/9234034
Python
bsd-3-clause
witwall/gyp,witwall/gyp,witwall/gyp,witwall/gyp,witwall/gyp
--- +++ @@ -21,7 +21,7 @@ test.build('test.gyp', test.ALL, chdir='app-bundle') # Touch a source file, rebuild, and check that the app target is up-to-date. - os.utime('app-bundle/TestApp/main.m', None) + test.touch('app-bundle/TestApp/main.m', None) test.build('test.gyp', test.ALL, chdir='app-bundle') test.up_to_date('test.gyp', test.ALL, chdir='app-bundle')
3c8d3bfea2ef1c82a62ef1b7455c29c044c7cfa3
ensure_zero_padding_in_numbering_of_files.py
ensure_zero_padding_in_numbering_of_files.py
import argparse import os import re import sys def main(cmdline): parser = argparse.ArgumentParser( description='Ensure zero padding in numbering of files.') parser.add_argument('path', type=str, help='path to the directory containing the files') args = parser.parse_args() path = args.path numbered = re.compile(r'(.*?)(\d+)\.(.*)') numbered_fnames = [fname for fname in os.listdir(path) if numbered.search(fname)] max_digits = max(len(numbered.search(fname).group(2)) for fname in numbered_fnames) for fname in numbered_fnames: _, prefix, num, ext, _ = numbered.split(fname, maxsplit=1) num = num.zfill(max_digits) new_fname = "{}{}.{}".format(prefix, num, ext) if fname != new_fname: os.rename(os.path.join(path, fname), os.path.join(path, new_fname)) print "Renamed {} to {}".format(fname, new_fname) else: print "{} seems fine".format(fname) if __name__ == "__main__": sys.exit(main(sys.argv[1:]))
#!/usr/bin/env python3 import argparse import os import re import sys def main(): parser = argparse.ArgumentParser( description='Ensure zero padding in numbering of files.') parser.add_argument( 'path', type=str, help='path to the directory containing the files') args = parser.parse_args() path = args.path numbered = re.compile(r'(.*?)(\d+)\.(.*)') numbered_fnames = [fname for fname in os.listdir(path) if numbered.search(fname)] max_digits = max(len(numbered.search(fname).group(2)) for fname in numbered_fnames) for fname in numbered_fnames: _, prefix, num, ext, _ = numbered.split(fname, maxsplit=1) num = num.zfill(max_digits) new_fname = "{}{}.{}".format(prefix, num, ext) if fname != new_fname: os.rename( os.path.join(path, fname), os.path.join(path, new_fname)) print("Renamed {} to {}".format(fname, new_fname)) else: print("{} seems fine".format(fname)) if __name__ == "__main__": sys.exit(main())
Convert to py3, fix style issues (flake8, pylint).
Convert to py3, fix style issues (flake8, pylint).
Python
mit
dwinston/cli-utils
--- +++ @@ -1,13 +1,18 @@ +#!/usr/bin/env python3 + import argparse import os import re import sys -def main(cmdline): + +def main(): parser = argparse.ArgumentParser( description='Ensure zero padding in numbering of files.') - parser.add_argument('path', type=str, + parser.add_argument( + 'path', + type=str, help='path to the directory containing the files') args = parser.parse_args() path = args.path @@ -21,15 +26,16 @@ for fname in numbered_fnames) for fname in numbered_fnames: - _, prefix, num, ext, _ = numbered.split(fname, maxsplit=1) + _, prefix, num, ext, _ = numbered.split(fname, maxsplit=1) num = num.zfill(max_digits) new_fname = "{}{}.{}".format(prefix, num, ext) if fname != new_fname: - os.rename(os.path.join(path, fname), os.path.join(path, new_fname)) - print "Renamed {} to {}".format(fname, new_fname) + os.rename( + os.path.join(path, fname), + os.path.join(path, new_fname)) + print("Renamed {} to {}".format(fname, new_fname)) else: - print "{} seems fine".format(fname) + print("{} seems fine".format(fname)) if __name__ == "__main__": - sys.exit(main(sys.argv[1:])) - + sys.exit(main())
a3d528d58ca3f5df291400a808950661820feb05
RPi/GPIO/definitions/functions/common.py
RPi/GPIO/definitions/functions/common.py
def cleanup(channel_or_chan_list): pass def gpio_function(pin): pass
def cleanup(channel_or_chan_list=None): pass def gpio_function(pin): pass
Make channel an optional parameter
Make channel an optional parameter RPi.GPIO supports calling cleanup() without parameters to cleanup all gpios used in the session. By adding "=None" warnings about missing parameters are ignored.
Python
mit
Def4l71diot/RPi.GPIO-def
--- +++ @@ -1,4 +1,4 @@ -def cleanup(channel_or_chan_list): +def cleanup(channel_or_chan_list=None): pass
eadf481f352e4277001f3b9e83c7ffbbd58c789c
openstack/tests/functional/network/v2/test_service_provider.py
openstack/tests/functional/network/v2/test_service_provider.py
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import six from openstack.tests.functional import base class TestServiceProvider(base.BaseFunctionalTest): def test_list(self): providers = list(self.conn.network.service_providers()) for provide in providers: self.assertIsInstance(provide.name, six.string_type) self.assertIsInstance(provide.service_type, six.string_types)
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from openstack.tests.functional import base class TestServiceProvider(base.BaseFunctionalTest): def test_list(self): providers = list(self.conn.network.service_providers()) names = [o.name for o in providers] service_types = [o.service_type for o in providers] self.assertIn('ha', names) self.assertIn('L3_ROUTER_NAT', service_types)
Fix the network service provider test
Fix the network service provider test This test was pretty lame before since it just verified that the result was a string. Now it verifies that at least one service provider exists and I think I picked one that should be aroudn for a while. The test failure message also prints the list of providers now so it should be easier to debug. Partial-bug: #1665495 Change-Id: Ief96558770d81dca81091e235b8d370883b14b94
Python
apache-2.0
stackforge/python-openstacksdk,briancurtin/python-openstacksdk,dtroyer/python-openstacksdk,dtroyer/python-openstacksdk,stackforge/python-openstacksdk,openstack/python-openstacksdk,briancurtin/python-openstacksdk,openstack/python-openstacksdk
--- +++ @@ -10,15 +10,13 @@ # License for the specific language governing permissions and limitations # under the License. -import six - from openstack.tests.functional import base class TestServiceProvider(base.BaseFunctionalTest): def test_list(self): providers = list(self.conn.network.service_providers()) - - for provide in providers: - self.assertIsInstance(provide.name, six.string_type) - self.assertIsInstance(provide.service_type, six.string_types) + names = [o.name for o in providers] + service_types = [o.service_type for o in providers] + self.assertIn('ha', names) + self.assertIn('L3_ROUTER_NAT', service_types)
644e4301d0a73756750a48226b9db00e51a9e46c
fmproject/urls.py
fmproject/urls.py
"""ssoproject URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.9/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import url from django.contrib import admin from sso import views urlpatterns = [ url(r'^admin/', admin.site.urls), url(r'^$', views.main, name='main'), url(r'^signin$', views.signin, name='signin'), url(r'^signout$', views.signout, name='signout'), url(r'^signup$', views.signup, name='signup'), url(r'^verify$', views.verify, name='verify'), url(r'^welcome$', views.welcome, name='welcome'), ]
"""ssoproject URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.9/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import url from django.contrib import admin from sso import views urlpatterns = [ url(r'^admin/', admin.site.urls), url(r'^$', views.main, name='main'), url(r'^signin$', views.signin, name='signin'), url(r'^signout$', views.signout, name='signout'), url(r'^signup$', views.signup, name='signup'), url(r'^verify$', views.verify, name='verify'), url(r'^welcome$', views.welcome, name='welcome'), url(r'^callback/github$', views.auth_with_github, name='auth with github'), ]
Add url mapping for github callback
Add url mapping for github callback
Python
mit
favoritemedium/sso-prototype,favoritemedium/sso-prototype
--- +++ @@ -26,4 +26,5 @@ url(r'^signup$', views.signup, name='signup'), url(r'^verify$', views.verify, name='verify'), url(r'^welcome$', views.welcome, name='welcome'), + url(r'^callback/github$', views.auth_with_github, name='auth with github'), ]
9275d654f7ce62cc3f9e7e6f9eea537bb295c608
lily/contacts/urls.py
lily/contacts/urls.py
from django.conf.urls import patterns, url from lily.contacts.views import (JsonContactListView, AddContactView, EditContactView, DetailContactView, DeleteContactView, ListContactView, ExportContactView) urlpatterns = patterns('', url(r'^add/$', AddContactView.as_view(), name='contact_add'), url(r'^add/from_account/(?P<account_pk>[\w-]+)/$', AddContactView.as_view(), name='contact_add'), url(r'^edit/(?P<pk>[\w-]+)/$', EditContactView.as_view(), name='contact_edit'), url(r'^details/(?P<pk>[\w-]+)/$', DetailContactView.as_view(), name='contact_details'), url(r'^delete/xhr/(?P<pk>[\w-]+)/$', DeleteContactView.as_view(), name='contact_delete'), url(r'^json_list/$', JsonContactListView.as_view(), name='json_contact_list'), url(r'^export/$', ExportContactView.as_view(), name='contact_export'), url(r'^tag/(?P<tag>[\w-]+)/$', ListContactView.as_view(), name='contact_list_filtered_by_tag'), url(r'^(?P<b36_pks>[\w;]*)/$', ListContactView.as_view(), name='contact_list_filtered'), url(r'^$', ListContactView.as_view(), name='contact_list'), )
from django.conf.urls import patterns, url from lily.contacts.views import (JsonContactListView, AddContactView, EditContactView, DetailContactView, DeleteContactView, ListContactView, ExportContactView) urlpatterns = patterns('', url(r'^add/$', AddContactView.as_view(), name='contact_add'), url(r'^add/from_account/(?P<account_pk>[\w-]+)/$', AddContactView.as_view(), name='contact_add'), url(r'^edit/(?P<pk>[\w-]+)/$', EditContactView.as_view(), name='contact_edit'), url(r'^details/(?P<pk>[\w-]+)/$', DetailContactView.as_view(), name='contact_details'), url(r'^delete/(?P<pk>[\w-]+)/$', DeleteContactView.as_view(), name='contact_delete'), url(r'^json_list/$', JsonContactListView.as_view(), name='json_contact_list'), url(r'^export/$', ExportContactView.as_view(), name='contact_export'), url(r'^tag/(?P<tag>[\w-]+)/$', ListContactView.as_view(), name='contact_list_filtered_by_tag'), url(r'^(?P<b36_pks>[\w;]*)/$', ListContactView.as_view(), name='contact_list_filtered'), url(r'^$', ListContactView.as_view(), name='contact_list'), )
Fix not being able to delete contacts
Fix not being able to delete contacts
Python
agpl-3.0
HelloLily/hellolily,HelloLily/hellolily,HelloLily/hellolily,HelloLily/hellolily
--- +++ @@ -9,7 +9,7 @@ url(r'^add/from_account/(?P<account_pk>[\w-]+)/$', AddContactView.as_view(), name='contact_add'), url(r'^edit/(?P<pk>[\w-]+)/$', EditContactView.as_view(), name='contact_edit'), url(r'^details/(?P<pk>[\w-]+)/$', DetailContactView.as_view(), name='contact_details'), - url(r'^delete/xhr/(?P<pk>[\w-]+)/$', DeleteContactView.as_view(), name='contact_delete'), + url(r'^delete/(?P<pk>[\w-]+)/$', DeleteContactView.as_view(), name='contact_delete'), url(r'^json_list/$', JsonContactListView.as_view(), name='json_contact_list'), url(r'^export/$', ExportContactView.as_view(), name='contact_export'), url(r'^tag/(?P<tag>[\w-]+)/$', ListContactView.as_view(), name='contact_list_filtered_by_tag'),
14762a73617007f0b65880e1d99cd5b47e03bfff
vimeo/auth/authorization_code.py
vimeo/auth/authorization_code.py
#! /usr/bin/env python # encoding: utf-8 from __future__ import absolute_import import urllib from .base import AuthenticationMixinBase from . import GrantFailed class AuthorizationCodeMixin(AuthenticationMixinBase): """Implement helpers for the Authorization Code grant for OAuth2.""" def auth_url(self, scope, redirect): """Get the url to direct a user to authenticate.""" url = self.API_ROOT + "/oauth/authorize?" query = { "response_type": "code", "client_id": self.app_info[0] } if scope: if not isinstance(scope, basestring): scope = ' '.join(scope) query['scope'] = scope if redirect: query['redirect_uri'] = redirect return url + urllib.urlencode(query) def exchange_code(self, code, redirect): """Perform the exchange step for the code from the redirected user.""" code, headers, resp = self.call_grant('/oauth/access_token', { "grant_type": "authorization_code", "code": code, "redirect_uri": redirect }) if not code == 200: raise GrantFailed() self.token = resp['access_token'] return self.token, resp['user'], resp['scope']
#! /usr/bin/env python # encoding: utf-8 from __future__ import absolute_import import urllib from .base import AuthenticationMixinBase from . import GrantFailed try: basestring except NameError: basestring = str class AuthorizationCodeMixin(AuthenticationMixinBase): """Implement helpers for the Authorization Code grant for OAuth2.""" def auth_url(self, scope, redirect): """Get the url to direct a user to authenticate.""" url = self.API_ROOT + "/oauth/authorize?" query = { "response_type": "code", "client_id": self.app_info[0] } if scope: if not isinstance(scope, basestring): scope = ' '.join(scope) query['scope'] = scope if redirect: query['redirect_uri'] = redirect return url + urllib.urlencode(query) def exchange_code(self, code, redirect): """Perform the exchange step for the code from the redirected user.""" code, headers, resp = self.call_grant('/oauth/access_token', { "grant_type": "authorization_code", "code": code, "redirect_uri": redirect }) if not code == 200: raise GrantFailed() self.token = resp['access_token'] return self.token, resp['user'], resp['scope']
Make basestring work in Python 3
Make basestring work in Python 3
Python
apache-2.0
vimeo/vimeo.py,greedo/vimeo.py,blorenz/vimeo.py,gabrielgisoldo/vimeo.py
--- +++ @@ -6,6 +6,11 @@ import urllib from .base import AuthenticationMixinBase from . import GrantFailed + +try: + basestring +except NameError: + basestring = str class AuthorizationCodeMixin(AuthenticationMixinBase): """Implement helpers for the Authorization Code grant for OAuth2."""
fb5378d178f6c4104ac0464ebbf04ce84753c6e2
conf_site/settings/dev.py
conf_site/settings/dev.py
# Top settings file for development from .base import * # noqa: F403 from .secrets import * # noqa: F403 COMPRESS_ENABLED = False DEBUG = True TEMPLATE_DEBUG = DEBUG SERVE_MEDIA = DEBUG SITE_ID = 2 ALLOWED_HOSTS = ["localhost", "0.0.0.0"] DATABASES = { "default": DATABASES_DEFAULT, # noqa: F405 } MIDDLEWARE_CLASSES = [ "debug_toolbar.middleware.DebugToolbarMiddleware", ] + MIDDLEWARE_CLASSES # noqa: F405 INSTALLED_APPS += ["debug_toolbar", ] # noqa: F405 INTERNAL_IPS = "127.0.0.1" LOGGING["loggers"]["django.request"]["level"] = "DEBUG" # noqa: F405
# Top settings file for development from .base import * # noqa: F403 from .secrets import * # noqa: F403 COMPRESS_ENABLED = False DEBUG = True TEMPLATE_DEBUG = DEBUG SERVE_MEDIA = DEBUG SITE_ID = 2 ALLOWED_HOSTS = ["localhost", "0.0.0.0"] DATABASES = { "default": DATABASES_DEFAULT, # noqa: F405 } DEBUG_TOOLBAR_PANELS = [ "debug_toolbar.panels.timer.TimerPanel", "debug_toolbar.panels.settings.SettingsPanel", "debug_toolbar.panels.headers.HeadersPanel", "debug_toolbar.panels.request.RequestPanel", "debug_toolbar.panels.sql.SQLPanel", "debug_toolbar.panels.staticfiles.StaticFilesPanel", "debug_toolbar.panels.templates.TemplatesPanel", "debug_toolbar.panels.cache.CachePanel", "debug_toolbar.panels.signals.SignalsPanel", "debug_toolbar.panels.logging.LoggingPanel", "debug_toolbar.panels.redirects.RedirectsPanel", ] MIDDLEWARE_CLASSES = [ "debug_toolbar.middleware.DebugToolbarMiddleware", ] + MIDDLEWARE_CLASSES # noqa: F405 INSTALLED_APPS += ["debug_toolbar", ] # noqa: F405 INTERNAL_IPS = "127.0.0.1" LOGGING["loggers"]["django.request"]["level"] = "DEBUG" # noqa: F405
Remove "versions" panel from django-debug-toolbar.
Remove "versions" panel from django-debug-toolbar. The "Versions" panel of django-debug-toolbar is not compatible with wagtailmenus (see https://github.com/jazzband/django-debug-toolbar/issues/922 and https://github.com/rkhleics/wagtailmenus/issues/115).
Python
mit
pydata/conf_site,pydata/conf_site,pydata/conf_site
--- +++ @@ -14,6 +14,19 @@ "default": DATABASES_DEFAULT, # noqa: F405 } +DEBUG_TOOLBAR_PANELS = [ + "debug_toolbar.panels.timer.TimerPanel", + "debug_toolbar.panels.settings.SettingsPanel", + "debug_toolbar.panels.headers.HeadersPanel", + "debug_toolbar.panels.request.RequestPanel", + "debug_toolbar.panels.sql.SQLPanel", + "debug_toolbar.panels.staticfiles.StaticFilesPanel", + "debug_toolbar.panels.templates.TemplatesPanel", + "debug_toolbar.panels.cache.CachePanel", + "debug_toolbar.panels.signals.SignalsPanel", + "debug_toolbar.panels.logging.LoggingPanel", + "debug_toolbar.panels.redirects.RedirectsPanel", +] MIDDLEWARE_CLASSES = [ "debug_toolbar.middleware.DebugToolbarMiddleware", ] + MIDDLEWARE_CLASSES # noqa: F405
bb1f2ded652c7ec58251112d42e61b8d1232887d
docs/conf.py
docs/conf.py
import sys import os import shlex import subprocess read_the_docs_build = os.environ.get('READTHEDOCS', None) == 'True' if read_the_docs_build: subprocess.call('doxygen', shell=True) extensions = ['breathe'] breathe_projects = { 'Nanoshield_LoadCell': 'xml' } breathe_default_project = "Nanoshield_LoadCell" templates_path = ['_templates'] source_suffix = '.rst' master_doc = 'index' project = u'Nanoshield_LoadCell' copyright = u'2015, Nanoshield_LoadCell' author = u'Nanoshield_LoadCell' version = '1.0' release = '1.0' language = None exclude_patterns = ['_build'] pygments_style = 'sphinx' todo_include_todos = False html_static_path = ['_static'] htmlhelp_basename = 'Nanoshield_LoadCelldoc' latex_elements = { } latex_documents = [ (master_doc, 'Nanoshield_LoadCell.tex', u'Nanoshield_LoadCell Documentation', u'Nanoshield_LoadCell', 'manual'), ]
import sys import os import shlex import subprocess read_the_docs_build = os.environ.get('READTHEDOCS', None) == 'True' if read_the_docs_build: subprocess.call('doxygen', shell=True) extensions = ['breathe'] breathe_projects = { 'Nanoshield_LoadCell': 'xml' } breathe_default_project = "Nanoshield_LoadCell" templates_path = ['_templates'] source_suffix = '.rst' master_doc = 'index' project = u'Nanoshield_LoadCell' copyright = u'2015, Nanoshield_LoadCell' author = u'Nanoshield_LoadCell' version = '1.0' release = '1.0' language = None exclude_patterns = ['_build'] pygments_style = 'sphinx' todo_include_todos = False html_static_path = ['_static'] htmlhelp_basename = 'Nanoshield_LoadCelldoc' latex_elements = { } latex_documents = [ (master_doc, 'Nanoshield_LoadCell.tex', u'Nanoshield\_LoadCell Documentation', u'Nanoshield_LoadCell', 'manual'), ]
Fix library name in Latex.
Fix library name in Latex.
Python
mit
circuitar/Nanoshield_LoadCell,circuitar/Nanoshield_LoadCell
--- +++ @@ -28,6 +28,6 @@ latex_elements = { } latex_documents = [ - (master_doc, 'Nanoshield_LoadCell.tex', u'Nanoshield_LoadCell Documentation', + (master_doc, 'Nanoshield_LoadCell.tex', u'Nanoshield\_LoadCell Documentation', u'Nanoshield_LoadCell', 'manual'), ]
303d256bd6615bfef7d26a1b5dadf474dbbb26af
cortex/main.py
cortex/main.py
'''Main file for running experiments. ''' import logging from cortex._lib import (config, data, exp, optimizer, setup_cortex, setup_experiment, train) from cortex._lib.utils import print_section import torch __author__ = 'R Devon Hjelm' __author_email__ = 'erroneus@gmail.com' logger = logging.getLogger('cortex') viz_process = None def run(model=None): '''Main function. ''' # Parse the command-line arguments try: args = setup_cortex(model=model) if args.command == 'setup': # Performs setup only. config.setup() exit(0) else: config.set_config() print_section('EXPERIMENT') model = setup_experiment(args, model=model) print_section('DATA') data.setup(**exp.ARGS['data']) print_section('NETWORKS') if args.reload and not args.load_models: pass else: model.build() if args.load_models: d = torch.load(args.load_models, map_location='cpu') for k in args.reloads: model.nets[k].load_state_dict(d['nets'][k].state_dict()) print_section('OPTIMIZER') optimizer.setup(model, **exp.ARGS['optimizer']) except KeyboardInterrupt: print('Cancelled') exit(0) train.main_loop(model, **exp.ARGS['train'])
'''Main file for running experiments. ''' import logging from cortex._lib import (config, data, exp, optimizer, setup_cortex, setup_experiment, train) from cortex._lib.utils import print_section import torch __author__ = 'R Devon Hjelm' __author_email__ = 'erroneus@gmail.com' logger = logging.getLogger('cortex') viz_process = None def run(model=None): '''Main function. ''' # Parse the command-line arguments try: args = setup_cortex(model=model) if args.command == 'setup': # Performs setup only. config.setup() exit(0) else: config.set_config() print_section('EXPERIMENT') model = setup_experiment(args, model=model) print_section('DATA') data.setup(**exp.ARGS['data']) print_section('NETWORKS') if args.reload and not args.load_models: pass else: model.build() if args.load_models: d = torch.load(args.load_models, map_location='cpu') for k in args.reloads: model.nets[k].load_state_dict(d['nets'][k].state_dict()) print_section('OPTIMIZER') optimizer.setup(model, **exp.ARGS['optimizer']) except KeyboardInterrupt: print('Cancelled') exit(0) train.main_loop(model, **exp.ARGS['train']) viz_process.terminate()
Terminate viz process at end of experiment.
Terminate viz process at end of experiment.
Python
bsd-3-clause
rdevon/cortex,rdevon/cortex
--- +++ @@ -58,3 +58,4 @@ exit(0) train.main_loop(model, **exp.ARGS['train']) + viz_process.terminate()
5b038b468af0f5a060eaea3bd2956ff85ad09071
conman/redirects/views.py
conman/redirects/views.py
from django.views.generic import RedirectView class RouteRedirectView(RedirectView): """Redirect to the target Route.""" permanent = False # Set to django 1.9's default to avoid RemovedInDjango19Warning def get_redirect_url(self, *args, **kwargs): """ Return the route's target url. Save the route's redirect type for use by RedirectView. """ redirect = kwargs['route'] self.permanent = redirect.permanent return redirect.target.url class URLRedirectView(RedirectView): """Redirect to a URLRedirect Route's target URL.""" permanent = False # Set to django 1.9's default to avoid RemovedInDjango19Warning def get_redirect_url(self, *args, **kwargs): """ Return the target url. Save the route's redirect type for use by RedirectView. """ redirect = kwargs['route'] self.permanent = redirect.permanent return redirect.target
from django.views.generic import RedirectView class RouteRedirectView(RedirectView): """Redirect to the target Route.""" permanent = False # Set to django 1.9's default to avoid RemovedInDjango19Warning def get_redirect_url(self, *args, route, **kwargs): """ Return the route's target url. Save the route's redirect type for use by RedirectView. """ self.permanent = route.permanent return route.target.url class URLRedirectView(RedirectView): """Redirect to a URLRedirect Route's target URL.""" permanent = False # Set to django 1.9's default to avoid RemovedInDjango19Warning def get_redirect_url(self, *args, route, **kwargs): """ Return the target url. Save the route's redirect type for use by RedirectView. """ self.permanent = route.permanent return route.target
Use explicit kwarg over kwargs dictionary access
Use explicit kwarg over kwargs dictionary access
Python
bsd-2-clause
meshy/django-conman,meshy/django-conman,Ian-Foote/django-conman
--- +++ @@ -5,27 +5,25 @@ """Redirect to the target Route.""" permanent = False # Set to django 1.9's default to avoid RemovedInDjango19Warning - def get_redirect_url(self, *args, **kwargs): + def get_redirect_url(self, *args, route, **kwargs): """ Return the route's target url. Save the route's redirect type for use by RedirectView. """ - redirect = kwargs['route'] - self.permanent = redirect.permanent - return redirect.target.url + self.permanent = route.permanent + return route.target.url class URLRedirectView(RedirectView): """Redirect to a URLRedirect Route's target URL.""" permanent = False # Set to django 1.9's default to avoid RemovedInDjango19Warning - def get_redirect_url(self, *args, **kwargs): + def get_redirect_url(self, *args, route, **kwargs): """ Return the target url. Save the route's redirect type for use by RedirectView. """ - redirect = kwargs['route'] - self.permanent = redirect.permanent - return redirect.target + self.permanent = route.permanent + return route.target
780b84a2ed7aff91de8ab7b5505e496649d3ddfa
nlppln/wfgenerator.py
nlppln/wfgenerator.py
from scriptcwl import WorkflowGenerator as WFGenerator from .utils import CWL_PATH class WorkflowGenerator(WFGenerator): def __init__(self, working_dir=None): WFGenerator.__init__(self, steps_dir=CWL_PATH, working_dir=working_dir) self.load(step_file='https://raw.githubusercontent.com/nlppln/' 'edlib-align/master/align.cwl') self.load(step_file='https://raw.githubusercontent.com/nlppln/' 'pattern-docker/master/pattern.cwl') def save(self, fname, validate=True, wd=True, inline=False, relative=False, pack=False, encoding='utf-8'): """Save workflow to file For nlppln, the default is to use a working directory (and save steps using the ``wd`` option). """ super(WorkflowGenerator, self).save(fname, validate=validate, wd=wd, inline=inline, relative=relative, pack=pack, encoding=encoding)
from scriptcwl import WorkflowGenerator as WFGenerator from .utils import CWL_PATH class WorkflowGenerator(WFGenerator): def __init__(self, working_dir=None): WFGenerator.__init__(self, steps_dir=CWL_PATH, working_dir=working_dir) self.load(step_file='https://raw.githubusercontent.com/nlppln/' 'edlib-align/master/align.cwl') self.load(step_file='https://raw.githubusercontent.com/nlppln/' 'pattern-docker/master/pattern.cwl') def save(self, fname, validate=True, wd=False, inline=False, relative=True, pack=False, encoding='utf-8'): """Save workflow to file For nlppln, the default is to save workflows with relative paths. """ super(WorkflowGenerator, self).save(fname, validate=validate, wd=wd, inline=inline, relative=relative, pack=pack, encoding=encoding)
Make default saving option relative
Make default saving option relative Saving workflows with wd=True only works when you use a working dir. Since this is optional, it makes more sense to use relative paths (and assume the user uses the nlppln CWL_PATH to save their workflows).
Python
apache-2.0
WhatWorksWhenForWhom/nlppln,WhatWorksWhenForWhom/nlppln,WhatWorksWhenForWhom/nlppln
--- +++ @@ -12,12 +12,11 @@ self.load(step_file='https://raw.githubusercontent.com/nlppln/' 'pattern-docker/master/pattern.cwl') - def save(self, fname, validate=True, wd=True, inline=False, relative=False, + def save(self, fname, validate=True, wd=False, inline=False, relative=True, pack=False, encoding='utf-8'): """Save workflow to file - For nlppln, the default is to use a working directory (and save steps - using the ``wd`` option). + For nlppln, the default is to save workflows with relative paths. """ super(WorkflowGenerator, self).save(fname, validate=validate,
c6021a20cacea609398bd07adabfba3d7782b7ef
rovercode/drivers/grovepi_ultrasonic_ranger_binary.py
rovercode/drivers/grovepi_ultrasonic_ranger_binary.py
""" Class for communicating with the GrovePi ultrasonic ranger. Here we treat it as a binary sensor. """ import logging logging.basicConfig() LOGGER = logging.getLogger(__name__) LOGGER.setLevel(logging.getLevelName('INFO')) try: from grovepi import ultrasonicRead except ImportError: LOGGER.warning("GrovePi lib unavailable. Using dummy.") from drivers.dummy_grovepi_interface import ultrasonicRead class GrovePiUltrasonicRangerBinary: """A module to read from the GrovePi Ultrasonic as a binary sensor.""" def __init__(self, port, binary_threshold): """Create a GrovePi Ultrasonic Ranger (Binary) driver module.""" self.port = int(port) self.binary_threshold = binary_threshold print(f"Setting up GrovePi Ultrasonic Ranger (Binary) on port {port}") def is_high(self): """HIGH, meaning "not seeing something".""" # to match the old GPIO sensors, we'll make this sensor active low # False output means object detected # True output means no object detected return ultrasonicRead(self.port) > self.binary_threshold
""" Class for communicating with the GrovePi ultrasonic ranger. Here we treat it as a binary sensor. """ import logging logging.basicConfig() LOGGER = logging.getLogger(__name__) LOGGER.setLevel(logging.getLevelName('INFO')) try: from grovepi import ultrasonicRead except ImportError: LOGGER.warning("GrovePi lib unavailable. Using dummy.") from drivers.dummy_grovepi_interface import ultrasonicRead class GrovePiUltrasonicRangerBinary: """A module to read from the GrovePi Ultrasonic as a binary sensor.""" def __init__(self, port, binary_threshold): """Create a GrovePi Ultrasonic Ranger (Binary) driver module.""" self.port = int(port) self.binary_threshold = binary_threshold print(f"Setting up GrovePi Ultrasonic Ranger (Binary) on port {port}") def is_high(self): """HIGH, meaning "seeing something".""" # False output means no object detected # True output means object detected return ultrasonicRead(self.port) < self.binary_threshold
Fix sensor to be active high
Fix sensor to be active high
Python
apache-2.0
aninternetof/rover-code,aninternetof/rover-code,aninternetof/rover-code
--- +++ @@ -25,8 +25,7 @@ print(f"Setting up GrovePi Ultrasonic Ranger (Binary) on port {port}") def is_high(self): - """HIGH, meaning "not seeing something".""" - # to match the old GPIO sensors, we'll make this sensor active low - # False output means object detected - # True output means no object detected - return ultrasonicRead(self.port) > self.binary_threshold + """HIGH, meaning "seeing something".""" + # False output means no object detected + # True output means object detected + return ultrasonicRead(self.port) < self.binary_threshold
6e19ff22ea0e8c78e7faaa2ba58626de383dfee3
djangae/contrib/mappers/urls.py
djangae/contrib/mappers/urls.py
from django.conf.urls import url from djangae.utils import djangae_webapp from django.views.decorators.csrf import csrf_exempt try: from mapreduce.main import create_handlers_map wrapped_urls = [url(url_re.replace('.*/', '^', 1), csrf_exempt(djangae_webapp(func))) for url_re, func in create_handlers_map()] except ImportError as e: wrapped_urls = [] urlpatterns = wrapped_urls
from django.conf.urls import url from djangae.utils import djangae_webapp from django.views.decorators.csrf import csrf_exempt # The Mapreduce status UI uses inline JS, which will fail If we have django-csp # installed and are not allowing 'unsafe-inline' as a SCRIPT_SRC. try: from csp.decorators import csp_update exempt_from_unsafe_inline = csp_update(SCRIPT_SRC=("'unsafe-inline'",)) except ImportError: exempt_from_unsafe_inline = lambda func: func try: from mapreduce.main import create_handlers_map wrapped_urls = [ url( url_re.replace('.*/', '^', 1), exempt_from_unsafe_inline(csrf_exempt(djangae_webapp(func))) ) for url_re, func in create_handlers_map() ] except ImportError as e: wrapped_urls = [] urlpatterns = wrapped_urls
Allow the Mapreduce status UI to function when a CSP is enforced with django-csp.
Allow the Mapreduce status UI to function when a CSP is enforced with django-csp.
Python
bsd-3-clause
potatolondon/djangae,grzes/djangae,grzes/djangae,kirberich/djangae,potatolondon/djangae,grzes/djangae,kirberich/djangae,kirberich/djangae
--- +++ @@ -3,9 +3,24 @@ from django.views.decorators.csrf import csrf_exempt +# The Mapreduce status UI uses inline JS, which will fail If we have django-csp +# installed and are not allowing 'unsafe-inline' as a SCRIPT_SRC. +try: + from csp.decorators import csp_update + exempt_from_unsafe_inline = csp_update(SCRIPT_SRC=("'unsafe-inline'",)) +except ImportError: + exempt_from_unsafe_inline = lambda func: func + + try: from mapreduce.main import create_handlers_map - wrapped_urls = [url(url_re.replace('.*/', '^', 1), csrf_exempt(djangae_webapp(func))) for url_re, func in create_handlers_map()] + wrapped_urls = [ + url( + url_re.replace('.*/', '^', 1), + exempt_from_unsafe_inline(csrf_exempt(djangae_webapp(func))) + ) + for url_re, func in create_handlers_map() + ] except ImportError as e: wrapped_urls = []
92a5712bdb04ae265120a41d688b37b60507d9dd
opps/core/__init__.py
opps/core/__init__.py
# -*- coding: utf-8 -*- from django.utils.translation import ugettext_lazy as _ from django.conf import settings trans_app_label = _('Opps') settings.INSTALLED_APPS += ( 'opps.article', 'opps.image', 'opps.channel', 'opps.source', 'django.contrib.redirects', 'django_thumbor', 'googl', 'redactor', 'tagging',) settings.MIDDLEWARE_CLASSES += ( 'django.contrib.redirects.middleware.RedirectFallbackMiddleware',) # Opps getattr(settings, 'OPPS_SHORT', 'googl') getattr(settings, 'OPPS_SHORT_URL', 'googl.short.GooglUrlShort') # redactor getattr(settings, 'REDACTOR_OPTIONS', {'lang': 'en'}) getattr(settings, 'REDACTOR_UPLOAD', 'uploads/') # thumbor getattr(settings, 'THUMBOR_SERVER', 'http://localhost:8888') getattr(settings, 'THUMBOR_MEDIA_URL', 'http://localhost:8000/media') getattr(settings, 'THUMBOR_SECURITY_KEY', '')
# -*- coding: utf-8 -*- from django.utils.translation import ugettext_lazy as _ from django.conf import settings trans_app_label = _('Opps') settings.INSTALLED_APPS += ( 'opps.article', 'opps.image', 'opps.channel', 'opps.source', 'django.contrib.redirects', 'django_thumbor', 'googl', 'redactor', 'static_sitemaps', 'tagging',) settings.MIDDLEWARE_CLASSES += ( 'django.contrib.redirects.middleware.RedirectFallbackMiddleware',) # Opps getattr(settings, 'OPPS_SHORT', 'googl') getattr(settings, 'OPPS_SHORT_URL', 'googl.short.GooglUrlShort') # Sitemap if not hasattr(settings, 'STATICSITEMAPS_ROOT_SITEMAP'): settings.STATICSITEMAPS_ROOT_SITEMAP = 'opps.sitemaps.feed.sitemaps' # redactor getattr(settings, 'REDACTOR_OPTIONS', {'lang': 'en'}) getattr(settings, 'REDACTOR_UPLOAD', 'uploads/') # thumbor getattr(settings, 'THUMBOR_SERVER', 'http://localhost:8888') getattr(settings, 'THUMBOR_MEDIA_URL', 'http://localhost:8000/media') getattr(settings, 'THUMBOR_SECURITY_KEY', '')
Set class process site map
Set class process site map
Python
mit
YACOWS/opps,jeanmask/opps,jeanmask/opps,opps/opps,jeanmask/opps,jeanmask/opps,williamroot/opps,williamroot/opps,williamroot/opps,YACOWS/opps,williamroot/opps,opps/opps,opps/opps,YACOWS/opps,opps/opps,YACOWS/opps
--- +++ @@ -14,6 +14,7 @@ 'django_thumbor', 'googl', 'redactor', + 'static_sitemaps', 'tagging',) settings.MIDDLEWARE_CLASSES += ( @@ -23,6 +24,10 @@ getattr(settings, 'OPPS_SHORT', 'googl') getattr(settings, 'OPPS_SHORT_URL', 'googl.short.GooglUrlShort') +# Sitemap +if not hasattr(settings, 'STATICSITEMAPS_ROOT_SITEMAP'): + settings.STATICSITEMAPS_ROOT_SITEMAP = 'opps.sitemaps.feed.sitemaps' + # redactor getattr(settings, 'REDACTOR_OPTIONS', {'lang': 'en'}) getattr(settings, 'REDACTOR_UPLOAD', 'uploads/')
f46226ed4b5a1c0bf2592692aba8481cc777414f
exp/views/dashboard.py
exp/views/dashboard.py
from django.shortcuts import redirect from django.urls import reverse_lazy from django.views import generic from exp.views.mixins import ExperimenterLoginRequiredMixin class ExperimenterDashboardView(ExperimenterLoginRequiredMixin, generic.TemplateView): ''' ExperimenterDashboard will show a customized view to each user based on the role and tasks that they perform. ''' template_name = 'exp/dashboard.html' def dispatch(self, request, *args, **kwargs): if not request.user.is_researcher: return redirect(reverse_lazy('web:home')) if self.request.path.endswith('/'): if self.request.user.groups.exists(): # Redirect to manage studies if user has been approved return redirect(reverse_lazy('exp:study-list')) return super().dispatch(request, *args, **kwargs) # If no trailing slash, append slash and redirect. return redirect(self.request.path + '/')
from django.shortcuts import redirect from django.urls import reverse_lazy from django.views import generic from exp.views.mixins import ExperimenterLoginRequiredMixin class ExperimenterDashboardView(ExperimenterLoginRequiredMixin, generic.TemplateView): ''' ExperimenterDashboard will show a customized view to each user based on the role and tasks that they perform. ''' template_name = 'exp/dashboard.html' def dispatch(self, request, *args, **kwargs): if hasattr(request.user, 'is_researcher') and not request.user.is_researcher: return redirect(reverse_lazy('web:home')) if self.request.path.endswith('/'): if self.request.user.groups.exists(): # Redirect to manage studies if user has been approved return redirect(reverse_lazy('exp:study-list')) return super().dispatch(request, *args, **kwargs) # If no trailing slash, append slash and redirect. return redirect(self.request.path + '/')
Check if user has is_researcher attribute before accessing it to accommodate AnonymousUser.
Check if user has is_researcher attribute before accessing it to accommodate AnonymousUser.
Python
apache-2.0
CenterForOpenScience/lookit-api,CenterForOpenScience/lookit-api,pattisdr/lookit-api,CenterForOpenScience/lookit-api,pattisdr/lookit-api,pattisdr/lookit-api
--- +++ @@ -14,7 +14,7 @@ template_name = 'exp/dashboard.html' def dispatch(self, request, *args, **kwargs): - if not request.user.is_researcher: + if hasattr(request.user, 'is_researcher') and not request.user.is_researcher: return redirect(reverse_lazy('web:home')) if self.request.path.endswith('/'): if self.request.user.groups.exists():
2b3e165a5dbf34d8ed94eda1453b20099d88618f
BasicSequences/__init__.py
BasicSequences/__init__.py
from RabiAmp import RabiAmp from Ramsey import Ramsey from FlipFlop import FlipFlop from SPAM import SPAM from RB import SingleQubitRB, SingleQubitRB_AC, SingleQubitRBT
from RabiAmp import RabiAmp from Ramsey import Ramsey from FlipFlop import FlipFlop from SPAM import SPAM from RB import SingleQubitRB, SingleQubitRB_AC, SingleQubitRBT from itertools import product import operator from ..PulsePrimitives import Id, X def create_cal_seqs(qubits, numCals): """ Helper function to create a set of calibration sequences. """ calSet = [Id, X] calSeqs = [reduce(operator.mul, [p(q) for p,q in zip(pulseSet, qubits)]) for pulseSet in product(calSet, repeat=len(qubits))] return reduce(operator.add, [[[seq]]*numCals for seq in calSeqs])
Add a helper function to create calibration sequences.
Add a helper function to create calibration sequences.
Python
apache-2.0
BBN-Q/QGL,BBN-Q/QGL
--- +++ @@ -3,3 +3,18 @@ from FlipFlop import FlipFlop from SPAM import SPAM from RB import SingleQubitRB, SingleQubitRB_AC, SingleQubitRBT + + + + +from itertools import product +import operator +from ..PulsePrimitives import Id, X + +def create_cal_seqs(qubits, numCals): + """ + Helper function to create a set of calibration sequences. + """ + calSet = [Id, X] + calSeqs = [reduce(operator.mul, [p(q) for p,q in zip(pulseSet, qubits)]) for pulseSet in product(calSet, repeat=len(qubits))] + return reduce(operator.add, [[[seq]]*numCals for seq in calSeqs])
091f9daf8758e56c82dbe7a88a50489ab279f793
adhocracy/lib/helpers/site_helper.py
adhocracy/lib/helpers/site_helper.py
from pylons import config, g from pylons.i18n import _ def name(): return config.get('adhocracy.site.name', _("Adhocracy")) def base_url(instance, path=None): url = "%s://" % config.get('adhocracy.protocol', 'http').strip() if instance is not None and g.single_instance is None: url += instance.key + "." url += config.get('adhocracy.domain').strip() if path is not None: url += path return url def shortlink_url(delegateable): path = "/d/%s" % delegateable.id return base_url(None, path=path)
from pylons import config, g from pylons.i18n import _ def domain(): return config.get('adhocracy.domain').split(':')[0] def name(): return config.get('adhocracy.site.name', _("Adhocracy")) def base_url(instance, path=None): url = "%s://" % config.get('adhocracy.protocol', 'http').strip() if instance is not None and g.single_instance is None: url += instance.key + "." url += config.get('adhocracy.domain').strip() if path is not None: url += path return url def shortlink_url(delegateable): path = "/d/%s" % delegateable.id return base_url(None, path=path)
Add h.site.domain() to return the domian without the port
Add h.site.domain() to return the domian without the port
Python
agpl-3.0
DanielNeugebauer/adhocracy,liqd/adhocracy,alkadis/vcv,DanielNeugebauer/adhocracy,DanielNeugebauer/adhocracy,liqd/adhocracy,SysTheron/adhocracy,alkadis/vcv,phihag/adhocracy,alkadis/vcv,phihag/adhocracy,SysTheron/adhocracy,alkadis/vcv,DanielNeugebauer/adhocracy,liqd/adhocracy,liqd/adhocracy,alkadis/vcv,DanielNeugebauer/adhocracy,SysTheron/adhocracy,phihag/adhocracy,phihag/adhocracy,phihag/adhocracy
--- +++ @@ -1,5 +1,9 @@ from pylons import config, g from pylons.i18n import _ + + +def domain(): + return config.get('adhocracy.domain').split(':')[0] def name():
ede07cc5f2e410481b71bd7ba0cf1aa2fce26e08
astroquery/simbad/tests/test_simbad.py
astroquery/simbad/tests/test_simbad.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst from ... import simbad def test_simbad(): r = simbad.QueryAroundId('m31', radius='0.5s').execute() print r.table assert "M 31" in r.table["MAIN_ID"] def test_multi(): result = simbad.QueryMulti( [simbad.QueryId('m31'), simbad.QueryId('m51')]) table = result.execute().table assert "M 31" in table["MAIN_ID"] assert "M 51" in table["MAIN_ID"] if __name__ == "__main__": test_simbad() test_multi()
# Licensed under a 3-clause BSD style license - see LICENSE.rst from ... import simbad import sys is_python3 = (sys.version_info >= (3,)) def test_simbad(): r = simbad.QueryAroundId('m31', radius='0.5s').execute() print r.table if is_python3: m31 = b"M 31" else: m31 = "M 31" assert m31 in r.table["MAIN_ID"] def test_multi(): result = simbad.QueryMulti( [simbad.QueryId('m31'), simbad.QueryId('m51')]) table = result.execute().table if is_python3: m31 = b"M 31" m51 = b"M 51" else: m31 = "M 31" m51 = "M 51" assert m31 in table["MAIN_ID"] assert m51 in table["MAIN_ID"] if __name__ == "__main__": test_simbad() test_multi()
Fix python3 tests for simbad.
Fix python3 tests for simbad.
Python
bsd-3-clause
imbasimba/astroquery,ceb8/astroquery,ceb8/astroquery,imbasimba/astroquery
--- +++ @@ -1,11 +1,17 @@ # Licensed under a 3-clause BSD style license - see LICENSE.rst from ... import simbad +import sys +is_python3 = (sys.version_info >= (3,)) def test_simbad(): r = simbad.QueryAroundId('m31', radius='0.5s').execute() print r.table - assert "M 31" in r.table["MAIN_ID"] + if is_python3: + m31 = b"M 31" + else: + m31 = "M 31" + assert m31 in r.table["MAIN_ID"] def test_multi(): @@ -13,8 +19,14 @@ [simbad.QueryId('m31'), simbad.QueryId('m51')]) table = result.execute().table - assert "M 31" in table["MAIN_ID"] - assert "M 51" in table["MAIN_ID"] + if is_python3: + m31 = b"M 31" + m51 = b"M 51" + else: + m31 = "M 31" + m51 = "M 51" + assert m31 in table["MAIN_ID"] + assert m51 in table["MAIN_ID"] if __name__ == "__main__": test_simbad()
034068e9d01f8087bf9061047fb3aa2550c31708
cadnano/gui/views/sliceview/slicestyles.py
cadnano/gui/views/sliceview/slicestyles.py
from PyQt5.QtGui import QFont from cadnano.gui.views.styles import BLUE_STROKE, GRAY_STROKE, THE_FONT # Slice Sizing SLICE_HELIX_RADIUS = 15. SLICE_HELIX_STROKE_WIDTH = 0.5 SLICE_HELIX_MOD_HILIGHT_WIDTH = 1 EMPTY_HELIX_STROKE_WIDTH = 0.25 # Z values # bottom ZSLICEHELIX = 40 ZSELECTION = 50 ZDESELECTOR = 60 ZWEDGEGIZMO = 100 ZPXIGROUP = 150 ZPARTITEM = 200 # top # Part appearance SLICE_FILL = "#ffffff" DEFAULT_PEN_WIDTH = 0 # cosmetic DEFAULT_ALPHA = 2 SELECTED_COLOR = '#5a8bff' SELECTED_PEN_WIDTH = 2 SELECTED_ALPHA = 0 SLICE_NUM_FONT = QFont(THE_FONT, 10, QFont.Bold) USE_TEXT_COLOR = "#ffffff" SLICE_TEXT_COLOR = "#000000" ACTIVE_STROKE = '#cccc00' ACTIVE_GRID_DOT_COLOR = '#0000ff' DEFAULT_GRID_DOT_COLOR = '#0000ff' VHI_HINT_ACTIVE_STROKE = BLUE_STROKE VHI_HINT_INACTIVE_STROKE = '#cccccc'
from PyQt5.QtGui import QFont from cadnano.gui.views.styles import BLUE_STROKE, GRAY_STROKE, THE_FONT # Slice Sizing SLICE_HELIX_RADIUS = 15. SLICE_HELIX_STROKE_WIDTH = 0.5 SLICE_HELIX_MOD_HILIGHT_WIDTH = 1 EMPTY_HELIX_STROKE_WIDTH = 0.25 # Z values # bottom ZSLICEHELIX = 40 ZSELECTION = 50 ZDESELECTOR = 60 ZWEDGEGIZMO = 100 ZPXIGROUP = 150 ZPARTITEM = 200 # top # Part appearance SLICE_FILL = "#f6f6f6" DEFAULT_PEN_WIDTH = 0 # cosmetic DEFAULT_ALPHA = 2 SELECTED_COLOR = '#5a8bff' SELECTED_PEN_WIDTH = 2 SELECTED_ALPHA = 0 SLICE_NUM_FONT = QFont(THE_FONT, 10, QFont.Bold) USE_TEXT_COLOR = "#ffffff" SLICE_TEXT_COLOR = "#000000" ACTIVE_STROKE = '#cccc00' ACTIVE_GRID_DOT_COLOR = '#0000ff' DEFAULT_GRID_DOT_COLOR = '#0000ff' VHI_HINT_ACTIVE_STROKE = BLUE_STROKE VHI_HINT_INACTIVE_STROKE = '#cccccc'
Fix reset GridItem color for SPA preview
2.5.1: Fix reset GridItem color for SPA preview
Python
mit
scholer/cadnano2.5
--- +++ @@ -18,7 +18,7 @@ # top # Part appearance -SLICE_FILL = "#ffffff" +SLICE_FILL = "#f6f6f6" DEFAULT_PEN_WIDTH = 0 # cosmetic DEFAULT_ALPHA = 2
095678fa910f78de1cac80bef46d0e29323a120c
camz/camera.py
camz/camera.py
import io import picamera class Camera(object): def __init__(self): self.camera = picamera.PiCamera() self.camera.resolution = (800, 600) self.camera.framerate = 30 self.camera.rotation = 180 self.camera.led = False self.recording = False self.loopStream = picamera.PiCameraCircularIO(self.camera, seconds=600) def captureStream(self): if self.recording: self.camera.wait_recording() stream = io.BytesIO() self.camera.capture(stream, format='jpeg', use_video_port=True, resize=(320, 240)) stream.seek(0) return stream def startRecording(self): self.camera.start_recording(self.loopStream, format='h264') self.recording = True def stopRecording(self): self.camera.stop_recording() self.recording = False def writeStream(self, filename): if not self.recording: return with io.open(filename, 'wb') as output: for frame in self.loopStream.frames: if frame.header: self.loopStream.seek(frame.position) break while True: buf = stream.read1() if not buf: break output.write(buf) self.loopStream.seek(0) self.loopStream.truncate()
import io import picamera class Camera(object): def __init__(self): self.camera = picamera.PiCamera() self.camera.resolution = (640, 480) self.camera.rotation = 180 self.camera.led = False self.recording = False self.loopStream = picamera.PiCameraCircularIO(self.camera, seconds=600) def captureStream(self): stream = io.BytesIO() self.camera.capture(stream, format='jpeg', use_video_port=True, resize=(320, 240)) stream.seek(0) return stream def startRecording(self): self.camera.start_recording(self.loopStream, format='h264') self.recording = True def stopRecording(self): self.camera.stop_recording() self.recording = False def writeStream(self, filename): if not self.recording: return with io.open(filename, 'wb') as output: for frame in self.loopStream.frames: if frame.header: self.loopStream.seek(frame.position) break while True: buf = self.loopStream.read1() if not buf: break output.write(buf) self.loopStream.seek(0) self.loopStream.truncate()
Remove recording wait, works better
Remove recording wait, works better
Python
mit
calston/pirnv
--- +++ @@ -4,8 +4,7 @@ class Camera(object): def __init__(self): self.camera = picamera.PiCamera() - self.camera.resolution = (800, 600) - self.camera.framerate = 30 + self.camera.resolution = (640, 480) self.camera.rotation = 180 self.camera.led = False @@ -14,9 +13,6 @@ self.loopStream = picamera.PiCameraCircularIO(self.camera, seconds=600) def captureStream(self): - if self.recording: - self.camera.wait_recording() - stream = io.BytesIO() self.camera.capture(stream, format='jpeg', use_video_port=True, resize=(320, 240)) stream.seek(0) @@ -41,7 +37,7 @@ self.loopStream.seek(frame.position) break while True: - buf = stream.read1() + buf = self.loopStream.read1() if not buf: break output.write(buf)
4fa76c04a3455ebce6251b59aea54f5a769f3deb
invite/utils.py
invite/utils.py
from datetime import date, timedelta def get_cutoff_date(days): """Calculate the cutoff date or return None if no time period was set.""" if days is None or type(days) != int: return None else: if days >= 0: return date.today() - timedelta(days=days) else: return None
from datetime import date, timedelta def get_cutoff_date(days): """Calculate the cutoff date or return None if no time period was set.""" if days is None or type(days) != int: return None else: if days > 0: return date.today() - timedelta(days=days) elif days == 0: return date.today() + timedelta(days=2) else: return None
Make it so a cutoff of 0 leads to no invites/registrations being shown.
Make it so a cutoff of 0 leads to no invites/registrations being shown.
Python
bsd-3-clause
unt-libraries/django-invite,unt-libraries/django-invite
--- +++ @@ -6,7 +6,9 @@ if days is None or type(days) != int: return None else: - if days >= 0: + if days > 0: return date.today() - timedelta(days=days) + elif days == 0: + return date.today() + timedelta(days=2) else: return None
7206db27eb5fccde808f7a4e2b9bea974181bdbc
fluenttest/__init__.py
fluenttest/__init__.py
from fluenttest.class_based import ClassTester, lookup_class, the_class from fluenttest.test_case import TestCase __all__ = [ 'ClassTester', 'TestCase', 'lookup_class', 'the_class', ]
from fluenttest.class_based import ClassTester, lookup_class, the_class from fluenttest.test_case import TestCase version_info = (1, 1, 0) __version__ = '.'.join(str(x) for x in version_info) __all__ = [ 'ClassTester', 'TestCase', 'lookup_class', 'the_class', '__version__', 'version_info', ]
Add __version__ attribute to fluenttest.
Add __version__ attribute to fluenttest. The __version__ attribute is the public version identifier and is server safe. The version_info tuple contains the full version.
Python
bsd-2-clause
dave-shawley/fluent-test
--- +++ @@ -1,9 +1,13 @@ from fluenttest.class_based import ClassTester, lookup_class, the_class from fluenttest.test_case import TestCase +version_info = (1, 1, 0) +__version__ = '.'.join(str(x) for x in version_info) __all__ = [ 'ClassTester', 'TestCase', 'lookup_class', 'the_class', + '__version__', + 'version_info', ]
ae4f144ea9256b4b53fe497a656be38f32213277
TwitterDataIngestSource.py
TwitterDataIngestSource.py
import sys from itertools import ifilter from requests_oauthlib import OAuth1Session class TwitterDataIngestSource: """Ingest data from Twitter""" def __init__(self, config): self.config = config def __iter__(self): if 'track' in self.config: self.track = self.config['track'] else: self.track = 'ski,surf,board' auth = OAuth1Session( self.config['consumer_key'], client_secret = self.config['consumer_secret'], resource_owner_key = self.config['access_token'], resource_owner_secret = self.config['access_token_secret'] ) request = auth.post( 'https://stream.twitter.com/1.1/statuses/filter.json', data = 'track=' + self.track, stream = True ) # filter out empty lines sent to keep the stream alive self.source_iterator = ifilter(lambda x: x, request.iter_lines()) return self def next(self): return self.source_iterator.next()
import sys from itertools import ifilter from requests_oauthlib import OAuth1Session import json class TwitterDataIngestSource: """Ingest data from Twitter""" def __init__(self, config): self.config = config def __iter__(self): if 'track' in self.config: self.track = self.config['track'] else: self.track = 'ski,surf,board' auth = OAuth1Session( self.config['consumer_key'], client_secret = self.config['consumer_secret'], resource_owner_key = self.config['access_token'], resource_owner_secret = self.config['access_token_secret'] ) request = auth.post( 'https://stream.twitter.com/1.1/statuses/filter.json', data = 'track=' + self.track, stream = True ) # filter out empty lines sent to keep the stream alive self.source_iterator = ifilter(lambda x: x, request.iter_lines()) return self def next(self): return { 'tweet' : json.loads(self.source_iterator.next()) }
Fix tweet formatting issues when streaming from twitter
Fix tweet formatting issues when streaming from twitter
Python
mit
W205-Social-Media/w205-data-ingest,abessou/w251-FinalProject,abessou/w251-FinalProject
--- +++ @@ -1,6 +1,7 @@ import sys from itertools import ifilter from requests_oauthlib import OAuth1Session +import json class TwitterDataIngestSource: """Ingest data from Twitter""" @@ -33,4 +34,4 @@ return self def next(self): - return self.source_iterator.next() + return { 'tweet' : json.loads(self.source_iterator.next()) }
f50efcb65d794985185f5cc82c697673f50e4c47
synapse/replication/slave/storage/keys.py
synapse/replication/slave/storage/keys.py
# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from synapse.storage import DataStore from synapse.storage.keys import KeyStore from ._base import BaseSlavedStore, __func__ class SlavedKeyStore(BaseSlavedStore): _get_server_verify_key = KeyStore.__dict__[ "_get_server_verify_key" ] get_server_verify_keys = __func__(DataStore.get_server_verify_keys) store_server_verify_key = __func__(DataStore.store_server_verify_key) get_server_keys_json = __func__(DataStore.get_server_keys_json) store_server_keys_json = __func__(DataStore.store_server_keys_json)
# -*- coding: utf-8 -*- # Copyright 2015, 2016 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from synapse.storage import KeyStore # KeyStore isn't really safe to use from a worker, but for now we do so and hope that # the races it creates aren't too bad. SlavedKeyStore = KeyStore
Replace SlavedKeyStore with a shim
Replace SlavedKeyStore with a shim since we're pulling everything out of KeyStore anyway, we may as well simplify it.
Python
apache-2.0
matrix-org/synapse,matrix-org/synapse,matrix-org/synapse,matrix-org/synapse,matrix-org/synapse,matrix-org/synapse
--- +++ @@ -13,19 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -from synapse.storage import DataStore -from synapse.storage.keys import KeyStore +from synapse.storage import KeyStore -from ._base import BaseSlavedStore, __func__ +# KeyStore isn't really safe to use from a worker, but for now we do so and hope that +# the races it creates aren't too bad. - -class SlavedKeyStore(BaseSlavedStore): - _get_server_verify_key = KeyStore.__dict__[ - "_get_server_verify_key" - ] - - get_server_verify_keys = __func__(DataStore.get_server_verify_keys) - store_server_verify_key = __func__(DataStore.store_server_verify_key) - - get_server_keys_json = __func__(DataStore.get_server_keys_json) - store_server_keys_json = __func__(DataStore.store_server_keys_json) +SlavedKeyStore = KeyStore
62c9322cf1508eafa6bd3061d1f047ce42b95804
byceps/blueprints/ticketing/views.py
byceps/blueprints/ticketing/views.py
""" byceps.blueprints.ticketing.views ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2017 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from flask import abort, g from ...services.party import service as party_service from ...services.ticketing import ticket_service from ...util.framework.blueprint import create_blueprint from ...util.iterables import find from ...util.framework.templating import templated blueprint = create_blueprint('ticketing', __name__) @blueprint.route('/mine') @templated def index_mine(): """List tickets related to the current user.""" current_user = _get_current_user_or_403() party = party_service.find_party(g.party_id) tickets = ticket_service.find_tickets_related_to_user_for_party( current_user.id, party.id) current_user_uses_any_ticket = find( lambda t: t.used_by_id == current_user.id, tickets) return { 'party_title': party.title, 'tickets': tickets, 'current_user_uses_any_ticket': current_user_uses_any_ticket, } def _get_current_user_or_403(): user = g.current_user if not user.is_active: abort(403) return user
""" byceps.blueprints.ticketing.views ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2017 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from flask import abort, g from ...services.party import service as party_service from ...services.ticketing import ticket_service from ...util.framework.blueprint import create_blueprint from ...util.iterables import find from ...util.framework.templating import templated blueprint = create_blueprint('ticketing', __name__) @blueprint.route('/mine') @templated def index_mine(): """List tickets related to the current user.""" current_user = _get_current_user_or_403() party = party_service.find_party(g.party_id) tickets = ticket_service.find_tickets_related_to_user_for_party( current_user.id, party.id) tickets = [ticket for ticket in tickets if not ticket.revoked] current_user_uses_any_ticket = find( lambda t: t.used_by_id == current_user.id, tickets) return { 'party_title': party.title, 'tickets': tickets, 'current_user_uses_any_ticket': current_user_uses_any_ticket, } def _get_current_user_or_403(): user = g.current_user if not user.is_active: abort(403) return user
Hide revoked tickets from user in personal ticket list
Hide revoked tickets from user in personal ticket list
Python
bsd-3-clause
m-ober/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps
--- +++ @@ -29,6 +29,8 @@ tickets = ticket_service.find_tickets_related_to_user_for_party( current_user.id, party.id) + tickets = [ticket for ticket in tickets if not ticket.revoked] + current_user_uses_any_ticket = find( lambda t: t.used_by_id == current_user.id, tickets)
2399fea4f3fa1c4d4df1bac06b6fb6e3e32f03e9
exotica_python/tests/runtest.py
exotica_python/tests/runtest.py
#!/usr/bin/env python # This is a workaround for liburdf.so throwing an exception and killing # the process on exit in ROS Indigo. import subprocess import os import sys tests = ['core.py', 'valkyrie_com.py' #, # 'valkyrie_collision_check_fcl_default.py', # 'valkyrie_collision_check_fcl_latest.py' ] for test in tests: process=subprocess.Popen(['rosrun', 'exotica_python', test],stdout=subprocess.PIPE, stderr=subprocess.PIPE) output = process.stdout.readlines() print(''.join(output)) if output[-1][0:11]!='>>SUCCESS<<': print('Test '+test+' failed\n'+process.stderr.read()) os._exit(1)
#!/usr/bin/env python # This is a workaround for liburdf.so throwing an exception and killing # the process on exit in ROS Indigo. import subprocess import os import sys tests = ['core.py', 'valkyrie_com.py', 'valkyrie_collision_check_fcl_default.py', 'valkyrie_collision_check_fcl_latest.py' ] for test in tests: process=subprocess.Popen(['rosrun', 'exotica_python', test],stdout=subprocess.PIPE, stderr=subprocess.PIPE) output = process.stdout.readlines() print(''.join(output)) if output[-1][0:11]!='>>SUCCESS<<': print('Test '+test+' failed\n'+process.stderr.read()) os._exit(1)
Enable Valkyrie collision checking tests
exotica_python: Enable Valkyrie collision checking tests
Python
bsd-3-clause
openhumanoids/exotica,openhumanoids/exotica,openhumanoids/exotica,openhumanoids/exotica
--- +++ @@ -7,9 +7,9 @@ import sys tests = ['core.py', - 'valkyrie_com.py' #, - # 'valkyrie_collision_check_fcl_default.py', - # 'valkyrie_collision_check_fcl_latest.py' + 'valkyrie_com.py', + 'valkyrie_collision_check_fcl_default.py', + 'valkyrie_collision_check_fcl_latest.py' ] for test in tests:
3de1b3c8538a473c29189ef4df02f93e67e221ac
migrations/versions/420_dos_is_coming.py
migrations/versions/420_dos_is_coming.py
"""DOS is coming Revision ID: 420 Revises: 410_remove_empty_drafts Create Date: 2015-11-16 14:10:35.814066 """ # revision identifiers, used by Alembic. revision = '420' down_revision = '410_remove_empty_drafts' from alembic import op import sqlalchemy as sa from app.models import Framework def upgrade(): op.execute("COMMIT") op.execute("ALTER TYPE framework_enum ADD VALUE IF NOT EXISTS 'dos' after 'gcloud'") framework = Framework.query.filter(Framework.slug == 'digital-outcomes-and-specialists').first() if not framework: op.execute(""" INSERT INTO frameworks (name, framework, status, slug) values('Digital Outcomes and Specialists', 'dos', 'coming', 'digital-outcomes-and-specialists') """) def downgrade(): op.execute(""" DELETE FROM frameworks where slug='digital-outcomes-and-specialists' """)
"""DOS is coming Revision ID: 420 Revises: 410_remove_empty_drafts Create Date: 2015-11-16 14:10:35.814066 """ # revision identifiers, used by Alembic. revision = '420' down_revision = '410_remove_empty_drafts' from alembic import op def upgrade(): op.execute("COMMIT") op.execute("ALTER TYPE framework_enum ADD VALUE IF NOT EXISTS 'dos' after 'gcloud'") conn = op.get_bind() res = conn.execute("SELECT * FROM frameworks WHERE slug = 'digital-outcomes-and-specialists'") results = res.fetchall() if not results: op.execute(""" INSERT INTO frameworks (name, framework, status, slug) values('Digital Outcomes and Specialists', 'dos', 'coming', 'digital-outcomes-and-specialists') """) def downgrade(): op.execute(""" DELETE FROM frameworks where slug='digital-outcomes-and-specialists' """)
Use `op` instead of `app` so that `list_migrations` still works
Use `op` instead of `app` so that `list_migrations` still works By importing `app` the `list_migrations.py` script broke because it doesn't have the `app` context.
Python
mit
alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api
--- +++ @@ -11,17 +11,17 @@ down_revision = '410_remove_empty_drafts' from alembic import op -import sqlalchemy as sa -from app.models import Framework def upgrade(): op.execute("COMMIT") op.execute("ALTER TYPE framework_enum ADD VALUE IF NOT EXISTS 'dos' after 'gcloud'") - framework = Framework.query.filter(Framework.slug == 'digital-outcomes-and-specialists').first() + conn = op.get_bind() + res = conn.execute("SELECT * FROM frameworks WHERE slug = 'digital-outcomes-and-specialists'") + results = res.fetchall() - if not framework: + if not results: op.execute(""" INSERT INTO frameworks (name, framework, status, slug) values('Digital Outcomes and Specialists', 'dos', 'coming', 'digital-outcomes-and-specialists')
bb18029c9ca75b420aa486e393b2f79e8f2e009b
examples/echobot.py
examples/echobot.py
# -*- coding: utf-8 -*- from linepy import * client = LineClient() #client = LineClient(authToken='AUTHTOKEN') client.log("Auth Token : " + str(client.authToken)) poll = LinePoll(client) # Receive messages from LinePoll def RECEIVE_MESSAGE(op): msg = op.message text = msg.text msg_id = msg.id receiver = msg.to sender = msg._from if msg.contentType == 0: contact = client.getContact(receiver) txt = '[%s] %s' % (contact.displayName, text) client.sendMessage(receiver, txt) client.log(txt) # Add function to LinePoll poll.addOpInterruptWithDict({ OpType.RECEIVE_MESSAGE: RECEIVE_MESSAGE }) while True: poll.trace()
# -*- coding: utf-8 -*- from linepy import * client = LineClient() #client = LineClient(authToken='AUTHTOKEN') client.log("Auth Token : " + str(client.authToken)) poll = LinePoll(client) # Receive messages from LinePoll def RECEIVE_MESSAGE(op): msg = op.message text = msg.text msg_id = msg.id receiver = msg.to sender = msg._from # Check content only text message if msg.contentType == 0: # Check only group chat if msg.toType == 2: # Get sender contact contact = client.getContact(sender) txt = '[%s] %s' % (contact.displayName, text) # Send a message client.sendMessage(receiver, txt) # Print log client.log(txt) # Add function to LinePoll poll.addOpInterruptWithDict({ OpType.RECEIVE_MESSAGE: RECEIVE_MESSAGE }) while True: poll.trace()
Change receiver contact to sender
Change receiver contact to sender
Python
bsd-3-clause
fadhiilrachman/line-py
--- +++ @@ -16,12 +16,18 @@ msg_id = msg.id receiver = msg.to sender = msg._from - + + # Check content only text message if msg.contentType == 0: - contact = client.getContact(receiver) - txt = '[%s] %s' % (contact.displayName, text) - client.sendMessage(receiver, txt) - client.log(txt) + # Check only group chat + if msg.toType == 2: + # Get sender contact + contact = client.getContact(sender) + txt = '[%s] %s' % (contact.displayName, text) + # Send a message + client.sendMessage(receiver, txt) + # Print log + client.log(txt) # Add function to LinePoll poll.addOpInterruptWithDict({
0313743781fd046a587936545f030938beb71364
dataset/dataset/spiders/dataset_spider.py
dataset/dataset/spiders/dataset_spider.py
from scrapy.contrib.spiders import CrawlSpider, Rule from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor from scrapy.selector import Selector from .. import items class DatasetSpider(CrawlSpider): pages = 9466 name = 'dataset' allowed_domains = ['data.gc.ca'] start_urls = [] for i in range(1, pages + 1): start_urls.append('http://data.gc.ca/data/en/dataset?page=' + str(i)) rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']), 'parse_dataset')] def parse_dataset(self, response): sel = Selector(response) dataset = items.DatasetItem() dataset['url'] = response.url dataset['name'] = sel.xpath("//div[@class='span-6']/article/div[@class='module'][1]/section[@class='module-content indent-large'][1]/h1/text()").extract() dataset['frequency'] = sel.xpath("//div[@class='span-2']/aside[@class='secondary']/div[@class='module-related'][2]/ul[1]/li[@class='margin-bottom-medium']/text()").extract() return dataset
from scrapy.contrib.spiders import CrawlSpider, Rule from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor from scrapy.selector import Selector from .. import items class DatasetSpider(CrawlSpider): pages = 9466 name = 'dataset' allowed_domains = ['data.gc.ca'] start_urls = [] for i in range(1, pages + 1): start_urls.append('http://data.gc.ca/data/en/dataset?page=' + str(i)) rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']), 'parse_dataset')] def parse_dataset(self, response): sel = Selector(response) dataset = items.DatasetItem() dataset['url'] = response.url dataset['name'] = sel.xpath("//div[@class='span-6']/article/div[@class='module'][1]/section[@class='module-content indent-large'][1]/h1/text()").extract() dataset['frequency'] = sel.xpath("//div[@class='span-2']/aside[@class='secondary']/div[@class='module-related'][2]/ul[1]/li[@class='margin-bottom-medium']/text()").re('[A-Z]{1}[a-z]+')[0].encode('ascii','ignore') return dataset
Add regex to filter out tab/space/newline
Add regex to filter out tab/space/newline
Python
mit
MaxLikelihood/CODE
--- +++ @@ -21,6 +21,6 @@ dataset = items.DatasetItem() dataset['url'] = response.url dataset['name'] = sel.xpath("//div[@class='span-6']/article/div[@class='module'][1]/section[@class='module-content indent-large'][1]/h1/text()").extract() - dataset['frequency'] = sel.xpath("//div[@class='span-2']/aside[@class='secondary']/div[@class='module-related'][2]/ul[1]/li[@class='margin-bottom-medium']/text()").extract() + dataset['frequency'] = sel.xpath("//div[@class='span-2']/aside[@class='secondary']/div[@class='module-related'][2]/ul[1]/li[@class='margin-bottom-medium']/text()").re('[A-Z]{1}[a-z]+')[0].encode('ascii','ignore') return dataset
e24098fed631473671849ec920ebb88345b146bc
fastapp/__init__.py
fastapp/__init__.py
__version__ = "0.7.7" import os from django.core.exceptions import ImproperlyConfigured # load plugins from django.conf import settings try: plugins_config = getattr(settings, "FASTAPP_PLUGINS_CONFIG", {}) plugins = plugins_config.keys() plugins = plugins + getattr(settings, "FASTAPP_PLUGINS", []) for plugin in list(set(plugins)): def my_import(name): # from http://effbot.org/zone/import-string.htm m = __import__(name) for n in name.split(".")[1:]: m = getattr(m, n) return m amod = my_import(plugin) except ImproperlyConfigured, e: print e
__version__ = "0.7.8" import os from django.core.exceptions import ImproperlyConfigured # load plugins from django.conf import settings try: plugins_config = getattr(settings, "FASTAPP_PLUGINS_CONFIG", {}) plugins = plugins_config.keys() plugins = plugins + getattr(settings, "FASTAPP_PLUGINS", []) for plugin in list(set(plugins)): def my_import(name): # from http://effbot.org/zone/import-string.htm m = __import__(name) for n in name.split(".")[1:]: m = getattr(m, n) return m amod = my_import(plugin) except ImproperlyConfigured, e: print e
Update fastapp version to 0.7.8
Update fastapp version to 0.7.8
Python
mit
sahlinet/fastapp,sahlinet/fastapp,sahlinet/fastapp,sahlinet/fastapp
--- +++ @@ -1,4 +1,4 @@ -__version__ = "0.7.7" +__version__ = "0.7.8" import os
92bac498d3ad8f2e49212ce73b7324d661620d63
grako/ast.py
grako/ast.py
from collections import OrderedDict, Mapping import json class AST(Mapping): def __init__(self, **kwargs): self._elements = OrderedDict(**kwargs) def add(self, key, value): previous = self._elements.get(key, None) if previous is None: self._elements[key] = [value] else: previous.append(value) def update(self, *args, **kwargs): for dct in args: for k, v in dct: self.add(k, v) for k, v in kwargs.items(): self.add(k, v) @property def first(self): key = self.elements.keys[0] return self.elements[key] def __iter__(self): return iter(self._elements) def __contains__(self, key): return key in self._elements def __len__(self): return len(self._elements) def __getitem__(self, key): if key not in self._elements: self._elements[key] = list() return self._elements[key] def __getattr__(self, key): return self.__getitem__(key) if key in self._elements: return self.__getitem__(key) raise KeyError(key) @staticmethod def serializable(obj): if isinstance(obj, AST): return obj._elements return obj def __repr__(self): return self.serializable(self._elements) def __str__(self): return json.dumps(self._elements, indent=4, default=self.serializable)
from collections import OrderedDict, Mapping import json __all__ = ['AST'] class AST(Mapping): def __init__(self, **kwargs): self._elements = OrderedDict(**kwargs) def add(self, key, value): previous = self._elements.get(key, None) if previous is None: self._elements[key] = [value] else: previous.append(value) def update(self, *args, **kwargs): for dct in args: for k, v in dct: self.add(k, v) for k, v in kwargs.items(): self.add(k, v) @property def first(self): key = self.elements.keys[0] return self.elements[key] def __iter__(self): return iter(self._elements) def __contains__(self, key): return key in self._elements def __len__(self): return len(self._elements) def __getitem__(self, key): if key not in self._elements: self._elements[key] = list() return self._elements[key] def __setitem__(self, key, value): self._elements[key] = value def __getattr__(self, key): return self.__getitem__(key) if key in self._elements: return self.__getitem__(key) raise KeyError(key) @staticmethod def serializable(obj): if isinstance(obj, AST): return obj._elements return obj def __repr__(self): return self.serializable(self._elements) def __str__(self): return json.dumps(self._elements, indent=4, default=self.serializable)
Allow to set items in AST.
Allow to set items in AST.
Python
bsd-2-clause
swayf/grako,swayf/grako
--- +++ @@ -1,5 +1,7 @@ from collections import OrderedDict, Mapping import json + +__all__ = ['AST'] class AST(Mapping): def __init__(self, **kwargs): @@ -38,6 +40,9 @@ self._elements[key] = list() return self._elements[key] + def __setitem__(self, key, value): + self._elements[key] = value + def __getattr__(self, key): return self.__getitem__(key) if key in self._elements:
5b91e6ce3b66721df9943f996368e7d977a1e1c9
footparse/_utils.py
footparse/_utils.py
import requests class BasePage: def __init__(self, data): self.data = data @classmethod def from_file(cls, path): with open(path) as f: raw = f.read() return cls(raw) @classmethod def from_url(cls, url): res = requests.get(url) return cls(res.text) def int_or_none(blob): try: return int(blob) except ValueError: return None def float_or_none(blob): try: return float(blob) except TypeError: return None
import requests class BasePage: def __init__(self, data): self.data = data @classmethod def from_file(cls, path): with open(path) as f: raw = f.read() return cls(raw) @classmethod def from_url(cls, url): res = requests.get(url) res.raise_for_status() return cls(res.text) def int_or_none(blob): try: return int(blob) except ValueError: return None def float_or_none(blob): try: return float(blob) except TypeError: return None
Raise an exception when request fails.
Raise an exception when request fails. In general, I think that it is safer to raise an exception when an HTTP request used to fetch a page fails.
Python
mit
kickoffai/footparse,kickoffai/footparse
--- +++ @@ -15,6 +15,7 @@ @classmethod def from_url(cls, url): res = requests.get(url) + res.raise_for_status() return cls(res.text)
6d6e0b780c62bea5fec43eae1411db827f13fa17
faker/providers/internet/uk_UA/__init__.py
faker/providers/internet/uk_UA/__init__.py
# coding=utf-8 from __future__ import unicode_literals from .. import Provider as InternetProvider class Provider(InternetProvider): free_email_domains = [ 'email.ua', 'gmail.com', 'gov.ua', 'i.ua', 'meta.ua', 'ukr.net' ] tlds = ['com', 'info', 'net', 'org', 'ua', 'укр']
# coding=utf-8 from __future__ import unicode_literals from .. import Provider as InternetProvider class Provider(InternetProvider): free_email_domains = ( 'email.ua', 'gmail.com', 'gov.ua', 'i.ua', 'meta.ua', 'ukr.net' ) tlds = ('com', 'info', 'net', 'org', 'ua', 'укр') replacements = ( ('А', 'a'), ('Б', 'b'), ('В', 'v'), ('Г', 'h'), ('Ґ', 'g'), ('Д', 'd'), ('Е', 'e'), ('Є', 'ye'), ('Ж', 'zh'), ('З', 'z'), ('И', 'y'), ('І', 'i'), ('Ї', 'yi'), ('Й', 'y'), ('К', 'k'), ('Л', 'l'), ('М', 'm'), ('Н', 'n'), ('О', 'o'), ('П', 'p'), ('Р', 'r'), ('С', 's'), ('Т', 't'), ('У', 'u'), ('Ф', 'f'), ('Х', 'kh'), ('Ц', 'ts'), ('Ч', 'ch'), ('Ш', 'sh'), ('Щ', 'shch'), ('Ь', 'ʹ'), ('Ю', 'yu'), ('Я', 'ya'), ('а', 'a'), ('б', 'b'), ('в', 'v'), ('г', 'h'), ('ґ', 'g'), ('д', 'd'), ('е', 'e'), ('є', 'ie'), ('ж', 'zh'), ('з', 'z'), ('и', 'y'), ('і', 'i'), ('ї', 'i'), ('й', 'i'), ('к', 'k'), ('л', 'l'), ('м', 'm'), ('н', 'n'), ('о', 'o'), ('п', 'p'), ('р', 'r'), ('с', 's'), ('т', 't'), ('у', 'u'), ('ф', 'f'), ('х', 'kh'), ('ц', 'ts'), ('ч', 'ch'), ('ш', 'sh'), ('щ', 'shch'), ('ь', 'ʹ'), ('ю', 'iu'), ('я', 'ia') )
Improve the Ukrainian Internet provider
Improve the Ukrainian Internet provider Add `replacements`. Replace lists by tuples
Python
mit
joke2k/faker,trtd/faker,danhuss/faker,joke2k/faker
--- +++ @@ -5,7 +5,23 @@ class Provider(InternetProvider): - free_email_domains = [ + free_email_domains = ( 'email.ua', 'gmail.com', 'gov.ua', 'i.ua', 'meta.ua', 'ukr.net' - ] - tlds = ['com', 'info', 'net', 'org', 'ua', 'укр'] + ) + tlds = ('com', 'info', 'net', 'org', 'ua', 'укр') + + replacements = ( + ('А', 'a'), ('Б', 'b'), ('В', 'v'), ('Г', 'h'), ('Ґ', 'g'), ('Д', 'd'), + ('Е', 'e'), ('Є', 'ye'), ('Ж', 'zh'), ('З', 'z'), ('И', 'y'), + ('І', 'i'), ('Ї', 'yi'), ('Й', 'y'), ('К', 'k'), ('Л', 'l'), + ('М', 'm'), ('Н', 'n'), ('О', 'o'), ('П', 'p'), ('Р', 'r'), ('С', 's'), + ('Т', 't'), ('У', 'u'), ('Ф', 'f'), ('Х', 'kh'), ('Ц', 'ts'), + ('Ч', 'ch'), ('Ш', 'sh'), ('Щ', 'shch'), ('Ь', 'ʹ'), ('Ю', 'yu'), + ('Я', 'ya'), ('а', 'a'), ('б', 'b'), ('в', 'v'), ('г', 'h'), + ('ґ', 'g'), ('д', 'd'), ('е', 'e'), ('є', 'ie'), ('ж', 'zh'), + ('з', 'z'), ('и', 'y'), ('і', 'i'), ('ї', 'i'), ('й', 'i'), + ('к', 'k'), ('л', 'l'), ('м', 'm'), ('н', 'n'), ('о', 'o'), ('п', 'p'), + ('р', 'r'), ('с', 's'), ('т', 't'), ('у', 'u'), ('ф', 'f'), + ('х', 'kh'), ('ц', 'ts'), ('ч', 'ch'), ('ш', 'sh'), ('щ', 'shch'), + ('ь', 'ʹ'), ('ю', 'iu'), ('я', 'ia') + )
364fde2dd6554760ca63c5b16e35222d5482999e
report/report_util.py
report/report_util.py
def compare_ledger_types(account, data, orm): selected_ledger = data['form']['ledger_type'] account_ledgers = [ledger.id for ledger in account.ledger_types] if not selected_ledger: return account_ledgers == [] return selected_ledger in account_ledgers def should_show_account(account, data): if 'account_from' not in data['form'] or 'account_to' not in data['form']: return True low = data['form']['account_from'] high = data['form']['account_to'] return low <= account.code <= high
def compare_ledger_types(account, data, orm): if not hasattr(account, 'ledger_types'): # Ignore this filter when alternate_ledger is not installed. return True selected_ledger = data['form']['ledger_type'] account_ledgers = [ledger.id for ledger in account.ledger_types] if not selected_ledger: return account_ledgers == [] return selected_ledger in account_ledgers def should_show_account(account, data): if 'account_from' not in data['form'] or 'account_to' not in data['form']: return True low = data['form']['account_from'] high = data['form']['account_to'] return low <= account.code <= high
Fix errors when alternate_ledger is not installed
Fix errors when alternate_ledger is not installed
Python
agpl-3.0
lithint/account_report_webkit,xcgd/account_report_webkit,xcgd/account_report_webkit,lithint/account_report_webkit
--- +++ @@ -1,4 +1,8 @@ def compare_ledger_types(account, data, orm): + if not hasattr(account, 'ledger_types'): + # Ignore this filter when alternate_ledger is not installed. + return True + selected_ledger = data['form']['ledger_type'] account_ledgers = [ledger.id for ledger in account.ledger_types]
51de7814ed881a7974f972aafc391584d0c2c517
kuryr/server.py
kuryr/server.py
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sys from oslo_log import log from kuryr import app from kuryr.common import config from kuryr import controllers config.init(sys.argv[1:]) controllers.neutron_client() controllers.check_for_neutron_ext_support() controllers.check_for_neutron_ext_tag() app.debug = config.CONF.debug log.setup(config.CONF, 'Kuryr') def start(): port = int(config.CONF.kuryr_uri.split(':')[-1]) app.run("0.0.0.0", port) if __name__ == '__main__': start()
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sys from oslo_log import log from kuryr import app from kuryr.common import config from kuryr import controllers config.init(sys.argv[1:]) controllers.neutron_client() controllers.check_for_neutron_ext_support() controllers.check_for_neutron_ext_tag() log.setup(config.CONF, 'Kuryr') def start(): port = int(config.CONF.kuryr_uri.split(':')[-1]) app.run("0.0.0.0", port) if __name__ == '__main__': start()
Remove app.debug as we do not use it any more.
Remove app.debug as we do not use it any more. Change-Id: I61497ae95dd304e60240f8ac731e63950351782f Closes-Bug: #1583663
Python
apache-2.0
openstack/kuryr,celebdor/kuryr-libnetwork,celebdor/kuryr-libnetwork,celebdor/kuryr,celebdor/kuryr,celebdor/kuryr-libnetwork,openstack/kuryr
--- +++ @@ -23,7 +23,6 @@ controllers.neutron_client() controllers.check_for_neutron_ext_support() controllers.check_for_neutron_ext_tag() -app.debug = config.CONF.debug log.setup(config.CONF, 'Kuryr')
657d5b1a79811df660857b7488895143fd4106fc
openacademy/model/openacademy_session.py
openacademy/model/openacademy_session.py
# -*- coding: utf-8 -*- from openerp import fields, models class Session(models.Model): _name = 'openacademy.session' name = fields.Char(required=True) start_date = fields.Date() duration = fields.Float(digits=(6,2), help="Duration in days") seats = fields.Integer(string="Number of seats") instructor_id = fields.Many2one('res.partner', string="Instructor") course_id = fields.Many2one('openacademy.course', ondelete='cascade', string="Course", required=True) attendee_ids = fields.Many2many('res.partner', string="Attendees")
# -*- coding: utf-8 -*- from openerp import fields, models class Session(models.Model): _name = 'openacademy.session' name = fields.Char(required=True) start_date = fields.Date() duration = fields.Float(digits=(6,2), help="Duration in days") seats = fields.Integer(string="Number of seats") instructor_id = fields.Many2one('res.partner', string="Instructor", domain=['|',("instructor","=","True"), ("category_id.name", "ilike", "Teacher")]) course_id = fields.Many2one('openacademy.course', ondelete='cascade', string="Course", required=True) attendee_ids = fields.Many2many('res.partner', string="Attendees")
Add domain | & ilike
[REF] openacademy: Add domain | & ilike
Python
apache-2.0
jorgescalona/openacademy-project
--- +++ @@ -9,7 +9,9 @@ start_date = fields.Date() duration = fields.Float(digits=(6,2), help="Duration in days") seats = fields.Integer(string="Number of seats") - instructor_id = fields.Many2one('res.partner', string="Instructor") + instructor_id = fields.Many2one('res.partner', string="Instructor", + domain=['|',("instructor","=","True"), + ("category_id.name", "ilike", "Teacher")]) course_id = fields.Many2one('openacademy.course', ondelete='cascade', string="Course", required=True) attendee_ids = fields.Many2many('res.partner', string="Attendees")
fb986717d5016b1cb3c6b953020ff2aff037b3dc
call_server/extensions.py
call_server/extensions.py
# define flask extensions in separate file, to resolve import dependencies from flask_sqlalchemy import SQLAlchemy db = SQLAlchemy() from flask_caching import Cache cache = Cache() from flask_assets import Environment assets = Environment() from flask_babel import Babel babel = Babel() from flask_mail import Mail mail = Mail() from flask_login import LoginManager login_manager = LoginManager() from flask_restless import APIManager rest = APIManager() from flask_wtf.csrf import CSRFProtect csrf = CSRFProtect() from flask_store import Store store = Store() from flask_rq2 import RQ rq = RQ() from flask_talisman import Talisman CALLPOWER_CSP = { 'default-src':'\'self\'', 'script-src':['\'self\'', '\'unsafe-inline\'', 'cdnjs.cloudflare.com', 'media.twiliocdn.com'], 'style-src': ['\'self\'', '\'unsafe-inline\'', 'fonts.googleapis.com'], 'font-src': ['\'self\'', 'fonts.gstatic.com'], } talisman = Talisman()
# define flask extensions in separate file, to resolve import dependencies from flask_sqlalchemy import SQLAlchemy db = SQLAlchemy() from flask_caching import Cache cache = Cache() from flask_assets import Environment assets = Environment() from flask_babel import Babel babel = Babel() from flask_mail import Mail mail = Mail() from flask_login import LoginManager login_manager = LoginManager() from flask_restless import APIManager rest = APIManager() from flask_wtf.csrf import CSRFProtect csrf = CSRFProtect() from flask_store import Store store = Store() from flask_rq2 import RQ rq = RQ() from flask_talisman import Talisman CALLPOWER_CSP = { 'default-src':'\'self\'', 'script-src':['\'self\'', '\'unsafe-inline\'', '\'unsafe-eval\'', 'cdnjs.cloudflare.com', 'media.twiliocdn.com'], 'style-src': ['\'self\'', '\'unsafe-inline\'', 'fonts.googleapis.com'], 'font-src': ['\'self\'', 'fonts.gstatic.com'], } # unsafe-inline needed to render <script> tags without nonce # unsafe-eval needed to run bootstrap templates talisman = Talisman()
Include script-src unsafe-eval to allow underscore templating Long term, we should pre-compile with webpack to avoid needing this
Include script-src unsafe-eval to allow underscore templating Long term, we should pre-compile with webpack to avoid needing this
Python
agpl-3.0
OpenSourceActivismTech/call-power,spacedogXYZ/call-power,18mr/call-congress,spacedogXYZ/call-power,spacedogXYZ/call-power,18mr/call-congress,OpenSourceActivismTech/call-power,OpenSourceActivismTech/call-power,spacedogXYZ/call-power,OpenSourceActivismTech/call-power,18mr/call-congress,18mr/call-congress
--- +++ @@ -33,8 +33,10 @@ from flask_talisman import Talisman CALLPOWER_CSP = { 'default-src':'\'self\'', - 'script-src':['\'self\'', '\'unsafe-inline\'', 'cdnjs.cloudflare.com', 'media.twiliocdn.com'], + 'script-src':['\'self\'', '\'unsafe-inline\'', '\'unsafe-eval\'', 'cdnjs.cloudflare.com', 'media.twiliocdn.com'], 'style-src': ['\'self\'', '\'unsafe-inline\'', 'fonts.googleapis.com'], 'font-src': ['\'self\'', 'fonts.gstatic.com'], } +# unsafe-inline needed to render <script> tags without nonce +# unsafe-eval needed to run bootstrap templates talisman = Talisman()
3ec333d8fc1ad7136b4324476001ed2279977356
pyfibot/util/twitter_application_auth.py
pyfibot/util/twitter_application_auth.py
import requests import base64 import sys if len(sys.argv) < 3: print "Usage: twitter_application_auth.py <consumer key> <consumer secret>" sys.exit(1) consumer_key = sys.argv[1] consumer_secret = sys.argv[2] token = consumer_key + ":" + consumer_secret encoded_token = base64.b64encode(token) payload = {'grant_type': 'client_credentials'} headers = {'Authorization': 'Basic ' + encoded_token} auth_url = "https://api.twitter.com/oauth2/token" r = requests.post(auth_url, payload, headers=headers) try: bearer_token = r.json()['access_token'] except TypeError: bearer_token = r.json['access_token'] print "Bearer token:" print bearer_token
import requests import base64 import sys if len(sys.argv) < 3: print "Usage: twitter_application_auth.py <consumer key> <consumer secret>" sys.exit(1) consumer_key = sys.argv[1] consumer_secret = sys.argv[2] token = consumer_key + ":" + consumer_secret encoded_token = base64.b64encode(token) payload = {'grant_type': 'client_credentials'} headers = {'Authorization': 'Basic ' + encoded_token} auth_url = "https://api.twitter.com/oauth2/token" r = requests.post(auth_url, payload, headers=headers) try: bearer_token = r.json()['access_token'] except TypeError: bearer_token = r.json['access_token'] print "Paste the following to your config below module_urltitle" print "twitter_bearer: '%s'" % bearer_token
Improve the instructions on twitter application auth
Improve the instructions on twitter application auth
Python
bsd-3-clause
aapa/pyfibot,lepinkainen/pyfibot,aapa/pyfibot,lepinkainen/pyfibot
--- +++ @@ -20,5 +20,5 @@ except TypeError: bearer_token = r.json['access_token'] -print "Bearer token:" -print bearer_token +print "Paste the following to your config below module_urltitle" +print "twitter_bearer: '%s'" % bearer_token
46cd1dad595aeba0e238a88de9485b1bcbfdab57
txircd/modules/cmode_s.py
txircd/modules/cmode_s.py
from txircd.modbase import Mode class SecretMode(Mode): def listOutput(self, command, data): if command != "LIST": return data cdata = data["cdata"] if "s" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels: data["cdata"].clear() # other +s stuff is hiding in other modules. class Spawner(object): def __init__(self, ircd): self.ircd = ircd self.mode_s = None def spawn(self): self.mode_s = SecretMode() return { "modes": { "cns": self.mode_s }, "actions": { "commandextra": [self.mode_s.listOutput] } } def cleanup(self): self.ircd.removeMode("cns") self.ircd.actions["commandextra"].remove(self.mode_s.listOutput)
from twisted.words.protocols import irc from txircd.modbase import Mode class SecretMode(Mode): def checkPermission(self, user, cmd, data): if cmd != "NAMES": return data remove = [] for chan in data["targetchan"]: if "p" in chan.mode and chan.name not in user.channels: user.sendMessage(irc.ERR_NOSUCHNICK, chan, ":No such nick/channel") remove.append(chan) for chan in remove: data["targetchan"].remove(chan) return data def listOutput(self, command, data): if command != "LIST": return data cdata = data["cdata"] if "s" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels: data["cdata"].clear() # other +s stuff is hiding in other modules. class Spawner(object): def __init__(self, ircd): self.ircd = ircd self.mode_s = None def spawn(self): self.mode_s = SecretMode() return { "modes": { "cns": self.mode_s }, "actions": { "commandextra": [self.mode_s.listOutput] } } def cleanup(self): self.ircd.removeMode("cns") self.ircd.actions["commandextra"].remove(self.mode_s.listOutput)
Hide secret channels from /NAMES users
Hide secret channels from /NAMES users
Python
bsd-3-clause
Heufneutje/txircd,ElementalAlchemist/txircd,DesertBus/txircd
--- +++ @@ -1,6 +1,19 @@ +from twisted.words.protocols import irc from txircd.modbase import Mode class SecretMode(Mode): + def checkPermission(self, user, cmd, data): + if cmd != "NAMES": + return data + remove = [] + for chan in data["targetchan"]: + if "p" in chan.mode and chan.name not in user.channels: + user.sendMessage(irc.ERR_NOSUCHNICK, chan, ":No such nick/channel") + remove.append(chan) + for chan in remove: + data["targetchan"].remove(chan) + return data + def listOutput(self, command, data): if command != "LIST": return data
3c0ce6a3e4e16ff3991a838009c42efa2f5b237d
tviit/admin.py
tviit/admin.py
from django.contrib import admin from .models import Tviit admin.site.register(Tviit)
from django.contrib import admin from .models import Tviit class TviitAdmin(admin.ModelAdmin): readonly_fields=('uuid',) admin.site.register(Tviit, TviitAdmin)
Add uuid to be readable in Admin-panel
Add uuid to be readable in Admin-panel
Python
mit
DeWaster/Tviserrys,DeWaster/Tviserrys
--- +++ @@ -1,4 +1,7 @@ from django.contrib import admin from .models import Tviit -admin.site.register(Tviit) +class TviitAdmin(admin.ModelAdmin): + readonly_fields=('uuid',) + +admin.site.register(Tviit, TviitAdmin)
ab8af0f34c468103a092f2c9d751c6c51c5522f1
bioconda_utils/__init__.py
bioconda_utils/__init__.py
""" Bioconda Utilities Package .. rubric:: Subpackages .. autosummary:: :toctree: bioconda_utils.bot bioconda_utils.lint .. rubric:: Submodules .. autosummary:: :toctree: aiopipe bioconductor_skeleton build circleci cli cran_skeleton docker_utils githandler github_integration githubhandler gitter graph hosters pkg_test recipe sphinxext autobump update_pinnings upload utils """ from ._version import get_versions __version__ = get_versions()["version"] del get_versions
""" Bioconda Utilities Package .. rubric:: Subpackages .. autosummary:: :toctree: bioconda_utils.bot bioconda_utils.lint .. rubric:: Submodules .. autosummary:: :toctree: aiopipe bioconductor_skeleton build circleci cli cran_skeleton docker_utils githandler githubhandler gitter graph hosters pkg_test recipe sphinxext autobump update_pinnings upload utils """ from ._version import get_versions __version__ = get_versions()["version"] del get_versions
Fix leftover docs hook for github_integration module
Fix leftover docs hook for github_integration module
Python
mit
bioconda/bioconda-utils,bioconda/bioconda-utils,bioconda/bioconda-utils
--- +++ @@ -22,7 +22,6 @@ cran_skeleton docker_utils githandler - github_integration githubhandler gitter graph
575462ca4cf9f5345939026ce5571bdc7e8277ad
bonobo/execution/plugin.py
bonobo/execution/plugin.py
from bonobo.execution.base import LoopingExecutionContext, recoverable class PluginExecutionContext(LoopingExecutionContext): PERIOD = 0.5 def __init__(self, wrapped, parent): # Instanciate plugin. This is not yet considered stable, as at some point we may need a way to configure # plugins, for example if it depends on an external service. super().__init__(wrapped(self), parent) def start(self): super().start() with recoverable(self.handle_error): self.wrapped.initialize() def shutdown(self): with recoverable(self.handle_error): self.wrapped.finalize() self.alive = False def step(self): with recoverable(self.handle_error): self.wrapped.run()
from bonobo.execution.base import LoopingExecutionContext, recoverable class PluginExecutionContext(LoopingExecutionContext): PERIOD = 0.5 def __init__(self, wrapped, parent): # Instanciate plugin. This is not yet considered stable, as at some point we may need a way to configure # plugins, for example if it depends on an external service. super().__init__(wrapped(self), parent) def start(self): super().start() with recoverable(self.handle_error): self.wrapped.initialize() def shutdown(self): if self.started: with recoverable(self.handle_error): self.wrapped.finalize() self.alive = False def step(self): with recoverable(self.handle_error): self.wrapped.run()
Check if PluginExecutionContext was started before shutting it down.
Check if PluginExecutionContext was started before shutting it down. If a `PluginExecutionContext().shutdown()` is called _before_ `PluginExecutionContext().start()` was called, this leads to an `AttributeError` exception since finalizer tries to access to attributes which were never defined.
Python
apache-2.0
hartym/bonobo,python-bonobo/bonobo,hartym/bonobo,python-bonobo/bonobo,python-bonobo/bonobo,hartym/bonobo
--- +++ @@ -16,8 +16,9 @@ self.wrapped.initialize() def shutdown(self): - with recoverable(self.handle_error): - self.wrapped.finalize() + if self.started: + with recoverable(self.handle_error): + self.wrapped.finalize() self.alive = False def step(self):
d7bce814c10ce13cf4c228fd87dcbdee75f8d0a1
integration-test/1211-fix-null-network.py
integration-test/1211-fix-null-network.py
from . import OsmFixtureTest class FixNullNetwork(OsmFixtureTest): def test_routes_with_no_network(self): # ref="N 4", route=road, but no network=* # so we should get something that has no network, but a shield text of # '4' self.load_fixtures(['http://www.openstreetmap.org/relation/2307408']) self.assert_has_feature( 11, 1038, 705, 'roads', {'kind': 'major_road', 'shield_text': '4', 'network': type(None)})
from . import OsmFixtureTest class FixNullNetwork(OsmFixtureTest): def test_routes_with_no_network(self): # ref="N 4", route=road, but no network=* # so we should get something that has no network, but a shield text of # '4' self.load_fixtures( ['http://www.openstreetmap.org/relation/2307408'], clip=self.tile_bbox(11, 1038, 705)) self.assert_has_feature( 11, 1038, 705, 'roads', {'kind': 'major_road', 'shield_text': '4', 'network': type(None)})
Add clip to reduce fixture size.
Add clip to reduce fixture size.
Python
mit
mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource
--- +++ @@ -6,7 +6,9 @@ # ref="N 4", route=road, but no network=* # so we should get something that has no network, but a shield text of # '4' - self.load_fixtures(['http://www.openstreetmap.org/relation/2307408']) + self.load_fixtures( + ['http://www.openstreetmap.org/relation/2307408'], + clip=self.tile_bbox(11, 1038, 705)) self.assert_has_feature( 11, 1038, 705, 'roads',
e3d1c8bbf238516d7a10e03aea0fbd378c4a4f6f
profile_collection/startup/99-bluesky.py
profile_collection/startup/99-bluesky.py
def detselect(detector_object, suffix="_stats_total1"): """Switch the active detector and set some internal state""" gs.DETS =[detector_object] gs.PLOT_Y = detector_object.name + suffix gs.TABLE_COLS = [gs.PLOT_Y]
def detselect(detector_object, suffix="_stats_total1"): """Switch the active detector and set some internal state""" gs.DETS =[detector_object] gs.PLOT_Y = detector_object.name + suffix gs.TABLE_COLS = [gs.PLOT_Y] def chx_plot_motor(scan): fig = None if gs.PLOTMODE == 1: fig = plt.gcf() elif gs.PLOTMODE == 2: fig = plt.gcf() fig.clear() elif gs.PLOTMODE == 3: fig = plt.figure() return LivePlot(gs.PLOT_Y, scan.motor._name, fig=fig) dscan.default_sub_factories['all'][1] = chx_plot_motor gs.PLOTMODE = 1 from bluesky.global_state import resume, abort, stop, panic, all_is_well, state
Add 'better' plotting control for live plots
ENH: Add 'better' plotting control for live plots
Python
bsd-2-clause
NSLS-II-CHX/ipython_ophyd,NSLS-II-CHX/ipython_ophyd
--- +++ @@ -5,3 +5,19 @@ gs.PLOT_Y = detector_object.name + suffix gs.TABLE_COLS = [gs.PLOT_Y] +def chx_plot_motor(scan): + fig = None + if gs.PLOTMODE == 1: + fig = plt.gcf() + elif gs.PLOTMODE == 2: + fig = plt.gcf() + fig.clear() + elif gs.PLOTMODE == 3: + fig = plt.figure() + return LivePlot(gs.PLOT_Y, scan.motor._name, fig=fig) + +dscan.default_sub_factories['all'][1] = chx_plot_motor + +gs.PLOTMODE = 1 + +from bluesky.global_state import resume, abort, stop, panic, all_is_well, state
c64d35346ed8d7ae5b08bc8d5eb37f0c827da9f4
jesusmtnez/python/kata/tests/test_game.py
jesusmtnez/python/kata/tests/test_game.py
import unittest from game import Game class BowlingGameTest(unittest.TestCase): def setUp(self): self.g = Game() def tearDown(self): self.g = None def _roll_many(self, n, pins): "Roll 'n' times a roll of 'pins' pins" for i in range(n): self.g.roll(pins) def test_gutter_game(self): self._roll_many(20, 0) self.assertEqual(0, self.g.score()) def test_all_ones(self): self._roll_many(20, 1) self.assertEqual(20, self.g.score()) def test_one_spare(self): self.g.roll(5) self.g.roll(5) self.g.roll(3) self._roll_many(17, 0) self.assertEqual(16, self.g.score()) if __name__ == '__main__': unittest.main()
import unittest from game import Game class BowlingGameTest(unittest.TestCase): def setUp(self): self.g = Game() def tearDown(self): self.g = None def _roll_many(self, n, pins): "Roll 'n' times a roll of 'pins' pins" for i in range(n): self.g.roll(pins) def _roll_spare(self): "Roll a spare" self.g.roll(5) self.g.roll(5) def test_gutter_game(self): self._roll_many(20, 0) self.assertEqual(0, self.g.score()) def test_all_ones(self): self._roll_many(20, 1) self.assertEqual(20, self.g.score()) def test_one_spare(self): self._roll_spare() self.g.roll(3) self._roll_many(17, 0) self.assertEqual(16, self.g.score()) if __name__ == '__main__': unittest.main()
Refactor rolling a spare in tests
[Python] Refactor rolling a spare in tests
Python
mit
JesusMtnez/devexperto-challenge,JesusMtnez/devexperto-challenge
--- +++ @@ -14,6 +14,11 @@ for i in range(n): self.g.roll(pins) + def _roll_spare(self): + "Roll a spare" + self.g.roll(5) + self.g.roll(5) + def test_gutter_game(self): self._roll_many(20, 0) self.assertEqual(0, self.g.score()) @@ -23,8 +28,7 @@ self.assertEqual(20, self.g.score()) def test_one_spare(self): - self.g.roll(5) - self.g.roll(5) + self._roll_spare() self.g.roll(3) self._roll_many(17, 0) self.assertEqual(16, self.g.score())
dced68096d5c84c831866cf92e7430df6cf5f477
src/nodeconductor_saltstack/sharepoint/cost_tracking.py
src/nodeconductor_saltstack/sharepoint/cost_tracking.py
from django.contrib.contenttypes.models import ContentType from nodeconductor.cost_tracking import CostTrackingBackend from nodeconductor.cost_tracking.models import DefaultPriceListItem from .models import SharepointTenant class Type(object): USAGE = 'usage' STORAGE = 'storage' STORAGE_KEY = '1 MB' USAGE_KEY = 'basic' CHOICES = { USAGE: USAGE_KEY, STORAGE: STORAGE_KEY, } class SaltStackCostTrackingBackend(CostTrackingBackend): NUMERICAL = [Type.STORAGE] @classmethod def get_default_price_list_items(cls): content_type = ContentType.objects.get_for_model(SharepointTenant) for item, key in Type.CHOICES.iteritems(): yield DefaultPriceListItem(item_type=item, key=key, resource_content_type=content_type) @classmethod def get_used_items(cls, resource): backend = resource.get_backend() storage = sum(s.usage for s in backend.site_collections.list()) return [ (Type.USAGE, Type.CHOICES[Type.USAGE], 1), (Type.STORAGE, Type.CHOICES[Type.STORAGE], storage), ]
from django.contrib.contenttypes.models import ContentType from nodeconductor.cost_tracking import CostTrackingBackend from nodeconductor.cost_tracking.models import DefaultPriceListItem from .models import SharepointTenant class Type(object): USAGE = 'usage' STORAGE = 'storage' STORAGE_KEY = '1 MB' USAGE_KEY = 'basic' CHOICES = { USAGE: USAGE_KEY, STORAGE: STORAGE_KEY, } class SaltStackCostTrackingBackend(CostTrackingBackend): NUMERICAL = [Type.STORAGE] @classmethod def get_default_price_list_items(cls): content_type = ContentType.objects.get_for_model(SharepointTenant) for item, key in Type.CHOICES.iteritems(): yield DefaultPriceListItem(item_type=item, key=key, resource_content_type=content_type) @classmethod def get_used_items(cls, resource): tenant = resource storage = tenant.quotas.get(name=SharepointTenant.Quotas.storage).usage return [ (Type.USAGE, Type.CHOICES[Type.USAGE], 1), (Type.STORAGE, Type.CHOICES[Type.STORAGE], storage), ]
Fix sharepoint tenant cost tracking
Fix sharepoint tenant cost tracking - itacloud-6123
Python
mit
opennode/nodeconductor-saltstack
--- +++ @@ -30,8 +30,8 @@ @classmethod def get_used_items(cls, resource): - backend = resource.get_backend() - storage = sum(s.usage for s in backend.site_collections.list()) + tenant = resource + storage = tenant.quotas.get(name=SharepointTenant.Quotas.storage).usage return [ (Type.USAGE, Type.CHOICES[Type.USAGE], 1), (Type.STORAGE, Type.CHOICES[Type.STORAGE], storage),
29fb910695eef3b5d10ffbb914b2d605ae7bf4e2
setup.py
setup.py
"""Package setup configuration. To Install package, run: >>> python setup.py install To install package with a symlink, so that changes to the source files will be immediately available, run: >>> python setup.py develop """ from __future__ import print_function from setuptools import setup, find_packages __version__ = '0.1' setup( name='mutabletuple.mutabletuple', version=__version__, description='Mutable named tuple that behave like dict with fixed keys.', long_description=open('README.rst').read() + '\n' + open('CHANGES.rst').read(), url='https://bitbucket.org/nicolas_bessou/mutabletuple', include_package_data=True, author='Nicolas BESSOU', author_email='nicolas.bessou@gmail.com', license='MIT', packages=find_packages(), install_requires=['namedlist'], classifiers=['Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Topic :: Software Development :: Libraries :: Python Modules', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', ], test_suite='mutabletuple.tests', )
"""Package setup configuration. To Install package, run: >>> python setup.py install To install package with a symlink, so that changes to the source files will be immediately available, run: >>> python setup.py develop """ from __future__ import print_function from setuptools import setup, find_packages __version__ = '0.1' setup( name='mutabletuple.mutabletuple', version=__version__, description='Mutable named tuple that behave like dict with fixed keys.', long_description=open('README.rst').read() + '\n' + open('CHANGES.rst').read(), url='https://github.com/nicolasbessou/mutabletuple', include_package_data=True, author='Nicolas BESSOU', author_email='nicolas.bessou@gmail.com', license='MIT', packages=find_packages(), install_requires=['namedlist'], classifiers=['Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Topic :: Software Development :: Libraries :: Python Modules', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', ], test_suite='mutabletuple.tests', )
Change the URL where the repository is hosted.
Change the URL where the repository is hosted.
Python
mit
nicolasbessou/mutabletuple
--- +++ @@ -17,7 +17,7 @@ version=__version__, description='Mutable named tuple that behave like dict with fixed keys.', long_description=open('README.rst').read() + '\n' + open('CHANGES.rst').read(), - url='https://bitbucket.org/nicolas_bessou/mutabletuple', + url='https://github.com/nicolasbessou/mutabletuple', include_package_data=True, author='Nicolas BESSOU', author_email='nicolas.bessou@gmail.com',
1aa2fb37239bb1f7045ad93b5362e90a12216517
setup.py
setup.py
#!/usr/bin/python # -*- coding: utf-8 -*- from setuptools import setup with open('README.rst') as stream: long_description = stream.read().decode('utf-8') setup( name='pyudev', version='0.3', url='http://packages.python.org/pyudev', author='Sebastian Wiesner', author_email='lunaryorn@googlemail.com', description='A libudev binding', long_description=long_description, platforms='Linux', license='MIT/X11', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX :: Linux', 'Programming Language :: Python', 'Topic :: Software Development :: Libraries', 'Topic :: System :: Hardware', 'Topic :: System :: Operating System Kernels :: Linux', ], py_modules=['udev', '_udev', 'qudev'], )
#!/usr/bin/python # -*- coding: utf-8 -*- from setuptools import setup import udev with open('README.rst') as stream: long_description = stream.read().decode('utf-8') setup( name='pyudev', version=udev.__version__, url='http://packages.python.org/pyudev', author='Sebastian Wiesner', author_email='lunaryorn@googlemail.com', description='A libudev binding', long_description=long_description, platforms='Linux', license='MIT/X11', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX :: Linux', 'Programming Language :: Python', 'Topic :: Software Development :: Libraries', 'Topic :: System :: Hardware', 'Topic :: System :: Operating System Kernels :: Linux', ], py_modules=['udev', '_udev', 'qudev'], )
Use version from udev module instead of declaring it manually
Use version from udev module instead of declaring it manually
Python
lgpl-2.1
mulkieran/pyudev,mulkieran/pyudev,deepakkapoor624/pyudev,deepakkapoor624/pyudev,pyudev/pyudev,mulkieran/pyudev
--- +++ @@ -4,6 +4,7 @@ from setuptools import setup +import udev with open('README.rst') as stream: long_description = stream.read().decode('utf-8') @@ -11,7 +12,7 @@ setup( name='pyudev', - version='0.3', + version=udev.__version__, url='http://packages.python.org/pyudev', author='Sebastian Wiesner', author_email='lunaryorn@googlemail.com',
d0ba20520b69bf561ef71255c526b34c5fd0d2be
setup.py
setup.py
# /setup.py # # Installation and setup script for parse-shebang # # See /LICENCE.md for Copyright information """Installation and setup script for parse-shebang.""" from setuptools import find_packages, setup setup(name="parse-shebang", version="0.0.12", description="""Parse shebangs and return their components.""", long_description_markdown_filename="README.md", author="Sam Spilsbury", author_email="smspillaz@gmail.com", classifiers=["Development Status :: 3 - Alpha", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.1", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Intended Audience :: Developers", "Topic :: System :: Shells", "Topic :: Utilities", "License :: OSI Approved :: MIT License"], url="http://github.com/polysquare/parse-shebang", license="MIT", keywords="development", packages=find_packages(exclude=["test"]), install_requires=["setuptools"], extras_require={ "upload": ["setuptools-markdown"] }, test_suite="nose.collector", zip_safe=True, include_package_data=True)
# /setup.py # # Installation and setup script for parse-shebang # # See /LICENCE.md for Copyright information """Installation and setup script for parse-shebang.""" from setuptools import find_packages, setup setup(name="parse-shebang", version="0.0.13", description="""Parse shebangs and return their components.""", long_description_markdown_filename="README.md", author="Sam Spilsbury", author_email="smspillaz@gmail.com", classifiers=["Development Status :: 3 - Alpha", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.1", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Intended Audience :: Developers", "Topic :: System :: Shells", "Topic :: Utilities", "License :: OSI Approved :: MIT License"], url="http://github.com/polysquare/parse-shebang", license="MIT", keywords="development", packages=find_packages(exclude=["test"]), install_requires=["setuptools"], extras_require={ "upload": ["setuptools-markdown"] }, test_suite="nose.collector", zip_safe=True, include_package_data=True)
Bump version: 0.0.12 -> 0.0.13
Bump version: 0.0.12 -> 0.0.13 [ci skip]
Python
mit
polysquare/python-parse-shebang
--- +++ @@ -8,7 +8,7 @@ from setuptools import find_packages, setup setup(name="parse-shebang", - version="0.0.12", + version="0.0.13", description="""Parse shebangs and return their components.""", long_description_markdown_filename="README.md", author="Sam Spilsbury",
691e25dbc258d94a80a729924d76aa10a693c08e
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup, find_packages from dreambeam import __version__ setup(name='dreamBeam', version=__version__, description='Measurement equation framework for radio interferometry.', author='Tobia D. Carozzi', author_email='tobia.carozzi@chalmers.se', packages=find_packages(), package_data={'dreambeam.telescopes.LOFAR': ['share/*.cc', 'share/simmos/*.cfg', 'share/alignment/*.txt', 'data/*teldat.p']}, license='ISC', classifiers=[ 'Development Status :: 1 - Planning', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: ISC License', 'Programming Language :: Python :: 2.7', 'Topic :: Scientific/Engineering :: Astronomy', 'Topic :: Scientific/Engineering :: Mathematics', 'Topic :: Scientific/Engineering :: Visualization' ], install_requires=[ 'numpy>=1.10', 'python-casacore', 'matplotlib>2.0', 'antpat>=0.4' ], entry_points={ 'console_scripts': [ 'pointing_jones = scripts.pointing_jones:cli_main', 'FoV_jones = scripts.FoV_jones:main' ] } )
#!/usr/bin/env python from setuptools import setup, find_packages from dreambeam import __version__ setup(name='dreamBeam', version=__version__, description='Measurement equation framework for radio interferometry.', author='Tobia D. Carozzi', author_email='tobia.carozzi@chalmers.se', packages=find_packages(), package_data={'dreambeam.telescopes.LOFAR': ['share/*.cc', 'share/simmos/*.cfg', 'share/alignment/*.txt', 'data/*teldat.p'], 'dreambeam': ['configs/*.txt']}, include_package_data=True, license='ISC', classifiers=[ 'Development Status :: 1 - Planning', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: ISC License', 'Programming Language :: Python :: 2.7', 'Topic :: Scientific/Engineering :: Astronomy', 'Topic :: Scientific/Engineering :: Mathematics', 'Topic :: Scientific/Engineering :: Visualization' ], install_requires=[ 'numpy>=1.10', 'python-casacore', 'matplotlib>2.0', 'antpat>=0.4' ], entry_points={ 'console_scripts': [ 'pointing_jones = scripts.pointing_jones:cli_main', 'FoV_jones = scripts.FoV_jones:main' ] } )
Include config files in the pip install
Include config files in the pip install
Python
isc
2baOrNot2ba/dreamBeam,2baOrNot2ba/dreamBeam
--- +++ @@ -11,7 +11,10 @@ packages=find_packages(), package_data={'dreambeam.telescopes.LOFAR': ['share/*.cc', 'share/simmos/*.cfg', - 'share/alignment/*.txt', 'data/*teldat.p']}, + 'share/alignment/*.txt', 'data/*teldat.p'], + 'dreambeam': + ['configs/*.txt']}, + include_package_data=True, license='ISC', classifiers=[ 'Development Status :: 1 - Planning',
cca830c874741df7456449a6901d367a48d79392
setup.py
setup.py
from setuptools import setup, find_packages install_requires = [ 'dill==0.2.5', 'easydict==1.6', 'h5py==2.6.0', 'jsonpickle==0.9.3', 'Keras==1.2.0', 'nflgame==1.2.20', 'numpy==1.11.2', 'pandas==0.19.1', 'scikit-learn==0.18.1', 'scipy==0.18.1', 'tensorflow==0.12.0rc1', 'Theano==0.8.2', 'tabulate==0.7.7', ] with open('README.md', 'r') as f: readme = f.read() setup( name="wincast", version='0.0.8', url='https://github.com/kahnjw/wincast', author_email='jarrod.kahn+wincast@gmail.com', long_description=readme, license='MIT', packages=find_packages(exclude=['tests', 'tests.*']), install_requires=install_requires, package_data={ 'wincast': ['models/wincast.clf.pkl', 'models/wincast.scaler.pkl'] } )
from setuptools import setup, find_packages install_requires = [ 'dill==0.2.5', 'easydict==1.6', 'h5py==2.6.0', 'jsonpickle==0.9.3', 'Keras==1.2.0', 'nflgame==1.2.20', 'numpy==1.11.2', 'pandas==0.19.1', 'scikit-learn==0.18.1', 'scipy==0.18.1', 'tensorflow==0.12.0rc1', 'Theano==0.8.2', 'tabulate==0.7.7', ] setup( name="wincast", version='0.0.8', url='https://github.com/kahnjw/wincast', author_email='jarrod.kahn+wincast@gmail.com', license='MIT', packages=find_packages(exclude=['tests', 'tests.*']), install_requires=install_requires, package_data={ 'wincast': ['models/wincast.clf.pkl', 'models/wincast.scaler.pkl'] } )
Remove README from pypi descriptioj
Remove README from pypi descriptioj
Python
mit
kahnjw/wincast
--- +++ @@ -17,15 +17,11 @@ 'tabulate==0.7.7', ] -with open('README.md', 'r') as f: - readme = f.read() - setup( name="wincast", version='0.0.8', url='https://github.com/kahnjw/wincast', author_email='jarrod.kahn+wincast@gmail.com', - long_description=readme, license='MIT', packages=find_packages(exclude=['tests', 'tests.*']), install_requires=install_requires,
9f744a84300bf9f7d88db952863005b4765528fd
setup.py
setup.py
from setuptools import setup, find_packages import sys, os here = os.path.abspath(os.path.dirname(__file__)) try: README = open(os.path.join(here, 'README.rst')).read() except IOError: README = '' version = "0.0.1" setup(name='backlash', version=version, description="standalone version of the Werkzeug Debugger based on WebOb", long_description=README, classifiers=['Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware', 'Topic :: Internet :: WWW/HTTP :: WSGI'], keywords='wsgi', author='Alessandro Molina', author_email='amol@turbogears.org', url='', license='MIT', packages=find_packages(exclude=['ez_setup', 'examples', 'tests']), include_package_data=True, zip_safe=False, install_requires=[ "WebOb" # -*- Extra requirements: -*- ], entry_points=""" # -*- Entry points: -*- """, )
from setuptools import setup, find_packages import sys, os here = os.path.abspath(os.path.dirname(__file__)) try: README = open(os.path.join(here, 'README.rst')).read() except IOError: README = '' version = "0.0.1a" setup(name='backlash', version=version, description="Standalone WebOb port of the Werkzeug Debugger with Python3 support meant to replace WebError in future TurboGears2", long_description=README, classifiers=['Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware', 'Topic :: Internet :: WWW/HTTP :: WSGI'], keywords='wsgi', author='Alessandro Molina', author_email='amol@turbogears.org', url='', license='MIT', packages=find_packages(exclude=['ez_setup', 'examples', 'tests']), include_package_data=True, zip_safe=False, install_requires=[ "WebOb" # -*- Extra requirements: -*- ], entry_points=""" # -*- Entry points: -*- """, )
Prepare for making alpha release
Prepare for making alpha release
Python
mit
TurboGears/backlash,TurboGears/backlash
--- +++ @@ -7,11 +7,11 @@ except IOError: README = '' -version = "0.0.1" +version = "0.0.1a" setup(name='backlash', version=version, - description="standalone version of the Werkzeug Debugger based on WebOb", + description="Standalone WebOb port of the Werkzeug Debugger with Python3 support meant to replace WebError in future TurboGears2", long_description=README, classifiers=['Intended Audience :: Developers', 'License :: OSI Approved :: MIT License',
e78b832fe1ce10a682be836c0752c0674087b5c4
setup.py
setup.py
#!/usr/bin/env python """Pygme: Python Gaussian ModElling - a python implementation of the Multi-Gaussian Expansion Method. Fit MGE models, and Generate initial conditions for N body simulations See Monnet et al. 1992 and Emsellem et al. 1994 for more details """ ## Distribution for the PyMGE package import sys # simple hack to allow use of "python setup.py develop". Should not affect # users, only developers. if 'develop' in sys.argv: # use setuptools for develop, but nothing else from setuptools import setup else: from distutils.core import setup import os if os.path.exists('MANIFEST'): os.remove('MANIFEST') setup(name='pygme', version='0.0.4', description='PYthon Gaussian ModElling - Python MGE Tool', author='Eric Emsellem', author_email='eric.emsellem@eso.org', maintainer='Eric Emsellem', # url='http://', # requires=['pymodelfit'], # requires=['openopt'], license='LICENSE', packages=['pygme', 'pygme.binning', 'pygme.astroprofiles', 'pygme.fitting', 'pygme.utils', 'pygme.colormaps'], package_dir={'pygme.astroprofiles': 'pygme/astroprofiles'}, package_data={'pygme.astroprofiles': ['data/*.dat']}, )
#!/usr/bin/env python """Pygme: Python Gaussian ModElling - a python implementation of the Multi-Gaussian Expansion Method. Fit MGE models, and Generate initial conditions for N body simulations See Monnet et al. 1992 and Emsellem et al. 1994 for more details """ ## Distribution for the PyMGE package import sys # simple hack to allow use of "python setup.py develop". Should not affect # users, only developers. if 'develop' in sys.argv: # use setuptools for develop, but nothing else from setuptools import setup else: from distutils.core import setup import os if os.path.exists('MANIFEST'): os.remove('MANIFEST') setup(name='pygme', version='0.1.0', description='PYthon Gaussian ModElling - Python MGE Tool', author='Eric Emsellem', author_email='eric.emsellem@eso.org', maintainer='Eric Emsellem', # url='http://', # requires=['pymodelfit'], # requires=['openopt'], license='LICENSE', packages=['pygme', 'pygme.binning', 'pygme.astroprofiles', 'pygme.fitting', 'pygme.utils', 'pygme.colormaps'], package_dir={'pygme.astroprofiles': 'pygme/astroprofiles'}, package_data={'pygme.astroprofiles': ['data/*.dat']}, )
Introduce PCC transformation plus adding new plotting options
Introduce PCC transformation plus adding new plotting options
Python
bsd-3-clause
emsellem/pygme
--- +++ @@ -19,7 +19,7 @@ if os.path.exists('MANIFEST'): os.remove('MANIFEST') setup(name='pygme', - version='0.0.4', + version='0.1.0', description='PYthon Gaussian ModElling - Python MGE Tool', author='Eric Emsellem', author_email='eric.emsellem@eso.org',
08347214e93d9cfe9ad53a528aea3428afeb49a0
setup.py
setup.py
from setuptools import setup setup( name='pyretry', version="0.9", description='Separate your retry logic from your business logic', author='Bob Renwick', author_email='bob.renwick@gmail.com', url='https://github.com/bobbyrenwick/pyretry', packages=['pyretry'], classifiers=( 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Natural Language :: English', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', ), )
from setuptools import setup setup( name='pyretry', version="0.9", description='Separate your retry logic from your business logic', author='Bob Renwick', author_email='bob.renwick@gmail.com', url='https://github.com/bobbyrenwick/pyretry', packages=['pyretry'], tests_require=[ 'mock>=1.0,<1.1', ], classifiers=( 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Natural Language :: English', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', ), )
Add missing `tests_require` for `mock`
Add missing `tests_require` for `mock`
Python
mit
bobbyrenwick/pyretry
--- +++ @@ -9,6 +9,9 @@ author_email='bob.renwick@gmail.com', url='https://github.com/bobbyrenwick/pyretry', packages=['pyretry'], + tests_require=[ + 'mock>=1.0,<1.1', + ], classifiers=( 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers',
fe9a7c9bb597460062fdf9987b98a3a085b2ad48
setup.py
setup.py
#!/usr/bin/env python import os from setuptools import setup with open(os.path.join(os.path.dirname(__file__), 'README.rst'), "r") as readme_file: readme = readme_file.read() setup( name = "outpan", version = "0.1.3", description = "Easily use Outpan.com API to get product info from their barcode", long_description = readme, py_modules = ["outpan"], author = "Bertrand Vidal", author_email = "vidal.bertrand@gmail.com", download_url = "https://pypi.python.org/pypi/outpan", url = "https://github.com/bertrandvidal/outpan_api", classifiers = [ "Intended Audience :: Developers", "Intended Audience :: Information Technology", "Operating System :: OS Independent", "Programming Language :: Python :: 2", "Programming Language :: Python :: 3", ], install_requires = [ "requests", ], )
#!/usr/bin/env python import os from setuptools import setup with open(os.path.join(os.path.dirname(__file__), 'README.rst'), "r") as readme_file: readme = readme_file.read() setup( name = "outpan", version = "0.1.3", description = "Easily use Outpan.com API to get product info from their barcode", long_description = readme, py_modules = ["outpan"], author = "Bertrand Vidal", author_email = "vidal.bertrand@gmail.com", download_url = "https://pypi.python.org/pypi/outpan", url = "https://github.com/bertrandvidal/outpan_api", classifiers = [ "Intended Audience :: Developers", "Intended Audience :: Information Technology", "Operating System :: OS Independent", "Programming Language :: Python :: 2", "Programming Language :: Python :: 3", ], install_requires = [ "requests", "parse_this", ], )
Make it possible to parse command line arguments directly
Make it possible to parse command line arguments directly Using parse_this awesome module we can create the whole parser needed to use the OutpanAPI class from the command line
Python
mit
bertrandvidal/outpan_api
--- +++ @@ -24,5 +24,6 @@ ], install_requires = [ "requests", + "parse_this", ], )
aba7dce93a4e13bba9b6fed6b31bf2d370fd54bc
setup.py
setup.py
from setuptools import setup from setuptools.command.test import test as TestCommand from setuptools import setup from hbite import VERSION as version class Tox(TestCommand): def finalize_options(self): TestCommand.finalize_options(self) self.test_args = [] self.test_suite = True def run_tests(self): #import here, cause outside the eggs aren't loaded import tox errno = tox.cmdline(self.test_args) sys.exit(errno) install_requires = [ 'protobuf==2.5.0', 'protobuf-socket-rpc==1.3' ] test_requires = [ 'tox' ] setup( name='hbite', version=version, url='https://github.com/ravwojdyla/hbite', author='Rafal Wojdyla', author_email='ravwojdyla@gmail.com', description='A high-level Python library for Hadoop RPCs', packages=['hbite'], install_requires=install_requires, tests_require=test_requires, cmdclass={'test': Tox} )
import sys from setuptools.command.test import test as TestCommand from setuptools import setup from hbite import VERSION as version class Tox(TestCommand): def finalize_options(self): TestCommand.finalize_options(self) self.test_args = [] self.test_suite = True def run_tests(self): # import here, cause outside the eggs aren't loaded import tox errno = tox.cmdline(self.test_args) sys.exit(errno) install_requires = [ 'protobuf==2.5.0', 'protobuf-socket-rpc==1.3' ] test_requires = [ 'tox' ] setup( name='hbite', version=version, url='https://github.com/ravwojdyla/hbite', author='Rafal Wojdyla', author_email='ravwojdyla@gmail.com', description='A high-level Python library for Hadoop RPCs', packages=['hbite'], install_requires=install_requires, tests_require=test_requires, cmdclass={'test': Tox} )
Fix flake8 and missing sys module
Fix flake8 and missing sys module
Python
apache-2.0
ravwojdyla/hbite,ravwojdyla/hbite
--- +++ @@ -1,4 +1,4 @@ -from setuptools import setup +import sys from setuptools.command.test import test as TestCommand from setuptools import setup @@ -13,7 +13,7 @@ self.test_suite = True def run_tests(self): - #import here, cause outside the eggs aren't loaded + # import here, cause outside the eggs aren't loaded import tox errno = tox.cmdline(self.test_args) sys.exit(errno)
259dab25fb2aceb3b5fa229eb41d3eacf8f1c71c
nanomon/scheduler/__init__.py
nanomon/scheduler/__init__.py
import base64 import json import Queue import time import logging from boto import sns from boto import sqs from boto.sqs.message import Message, RawMessage from nanomon.utils import yaml_includes from nanomon.message import NanoMessage from nanomon.queue import QueueWorker from nanomon.queue.backends.sns_sqs import SQSQueue, SNSTopic logger = logging.getLogger(__name__) class YamlNodeBackend(object): def __init__(self, path): self.path = path def get_nodes(self): logger.debug("Loading node config from %s" % (self.path)) return yaml_includes.load_config(self.path) class Scheduler(QueueWorker): def __init__(self, node_backend, topic, queue): self.node_backend = node_backend super(Scheduler, self).__init__(topic, queue) def run(self, sleep=300): while True: start = time.time() sleep = float(sleep) nodes = self.node_backend.get_nodes() for node, settings in nodes.iteritems(): task = json.dumps({node: settings}) logger.debug("Sending task for node '%s'." % (node)) self.send_task(task) real_sleep = sleep - (time.time() - start) if real_sleep <= 0: continue logger.debug("Sleeping for %.02f." % (real_sleep)) time.sleep(real_sleep)
import base64 import json import Queue import time import logging from boto import sns from boto import sqs from boto.sqs.message import Message, RawMessage from nanomon.utils import yaml_includes from nanomon.message import NanoMessage from nanomon.queue import QueueWorker from nanomon.queue.backends.sns_sqs import SQSQueue, SNSTopic logger = logging.getLogger(__name__) class YamlNodeBackend(object): def __init__(self, path): self.path = path def get_nodes(self): logger.debug("Loading node config from %s" % (self.path)) return yaml_includes.load_config(self.path) class Scheduler(QueueWorker): def __init__(self, node_backend, topic, queue): self.node_backend = node_backend super(Scheduler, self).__init__(topic, queue) def run(self, sleep=300): while True: start = time.time() sleep = float(sleep) nodes = self.node_backend.get_nodes() for node in nodes: task = json.dumps(node) logger.debug("Sending task for node '%s'." % (node['name'])) self.send_task(task) real_sleep = sleep - (time.time() - start) if real_sleep <= 0: continue logger.debug("Sleeping for %.02f." % (real_sleep)) time.sleep(real_sleep)
Update for new node definition syntax
Update for new node definition syntax
Python
bsd-2-clause
cloudtools/nymms
--- +++ @@ -35,9 +35,9 @@ start = time.time() sleep = float(sleep) nodes = self.node_backend.get_nodes() - for node, settings in nodes.iteritems(): - task = json.dumps({node: settings}) - logger.debug("Sending task for node '%s'." % (node)) + for node in nodes: + task = json.dumps(node) + logger.debug("Sending task for node '%s'." % (node['name'])) self.send_task(task) real_sleep = sleep - (time.time() - start) if real_sleep <= 0:
c22873e2f213ff81e9bd44b37aeb9d66418c5b90
setup.py
setup.py
#!/usr/bin/env python """setup.py for get_user_headers""" __author__ = "Stephan Sokolow (deitarion/SSokolow)" __license__ = "MIT" import sys if __name__ == '__main__' and 'flake8' not in sys.modules: # FIXME: Why does this segfault flake8 under PyPy? from setuptools import setup setup( name="get_user_headers", version="0.1.1", description="Helper for retrieving identifying headers from the user's" "default browser", long_description="""A self-contained script which allows your script to retrieve headers like User-Agent from the user's preferred browser to ensure that requests from your (hopefully well-behaved) script don't stick out like sore thumbs for overzealous site admins to block without cause.""", author="Stephan Sokolow", author_email="http://www.ssokolow.com/ContactMe", # No spam harvesting url="https://github.com/ssokolow/get_user_headers", license="MIT", classifiers=[ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Topic :: Internet :: WWW/HTTP", "Topic :: Software Development :: Libraries :: Python Modules", ], keywords="http web bot spider automation", py_modules=['get_user_headers'], zip_safe=True )
#!/usr/bin/env python """setup.py for get_user_headers""" __author__ = "Stephan Sokolow (deitarion/SSokolow)" __license__ = "MIT" import sys if __name__ == '__main__' and 'flake8' not in sys.modules: # FIXME: Why does this segfault flake8 under PyPy? from setuptools import setup setup( name="get_user_headers", version="0.1.1", description="Helper for retrieving identifying headers from the user's" "default browser", long_description="""A self-contained module with no extra dependencies which allows your script to retrieve headers like User-Agent from the user's preferred browser to ensure that requests from your (hopefully well-behaved) script don't stick out like sore thumbs for overzealous site admins to block without cause.""", author="Stephan Sokolow", author_email="http://www.ssokolow.com/ContactMe", # No spam harvesting url="https://github.com/ssokolow/get_user_headers", license="MIT", classifiers=[ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Topic :: Internet :: WWW/HTTP", "Topic :: Software Development :: Libraries :: Python Modules", ], keywords="http web bot spider automation", py_modules=['get_user_headers'], zip_safe=True )
Adjust phrasing of PyPI long_description
Adjust phrasing of PyPI long_description
Python
mit
ssokolow/get_user_headers
--- +++ @@ -15,10 +15,11 @@ version="0.1.1", description="Helper for retrieving identifying headers from the user's" "default browser", - long_description="""A self-contained script which allows your script to -retrieve headers like User-Agent from the user's preferred browser to ensure -that requests from your (hopefully well-behaved) script don't stick out like -sore thumbs for overzealous site admins to block without cause.""", + long_description="""A self-contained module with no extra dependencies +which allows your script to retrieve headers like User-Agent from the user's +preferred browser to ensure that requests from your (hopefully well-behaved) +script don't stick out like sore thumbs for overzealous site admins to block +without cause.""", author="Stephan Sokolow", author_email="http://www.ssokolow.com/ContactMe", # No spam harvesting url="https://github.com/ssokolow/get_user_headers",
38349308a5848883a58257c080f3563c806c3db0
setup.py
setup.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages import materializecssform setup( name='django-materializecss-form', version=materializecssform.__version__, packages=find_packages(), author="Florent CLAPIÉ", author_email="clapie.florent@gmail.com", description="A simple Django form template tag to work with Materializecss", long_description=open('README.rst').read(), # install_requires= , include_package_data=True, url='https://github.com/florent1933/django-materializecss-form', classifiers=[ "Programming Language :: Python", "Development Status :: 3 - Alpha", "Framework :: Django", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python :: 2.7", "Topic :: Documentation :: Sphinx", ], license="MIT", )
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages import materializecssform setup( name='django-materializecss-form', version=materializecssform.__version__, packages=find_packages(), author="Florent CLAPIÉ", author_email="clapie.florent@gmail.com", description="A simple Django form template tag to work with Materializecss", long_description=open('README.rst').read(), # install_requires= , include_package_data=True, url='https://github.com/florent1933/django-materializecss-form', classifiers=[ "Programming Language :: Python", "Development Status :: 3 - Alpha", "Framework :: Django", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python :: 2.7", "Topic :: Documentation :: Sphinx", ], license="MIT", zip_safe=False )
Fix template loading when installed with easy_install
Fix template loading when installed with easy_install If you install a package using `easy_install` or `setup.py install`, it will default to installing as a zipped egg. Django is by default unable to find templates inside zipped archives and even with an egg template loader enabled it's very slow as it involves unpacking the archive when the template is needed. Setting `zip_safe=False` forces `setuptools` to always install the package uncompressed.
Python
mit
florent1933/django-materializecss-form,florent1933/django-materializecss-form
--- +++ @@ -40,4 +40,6 @@ ], license="MIT", + + zip_safe=False )
134bcc0a0f1fefb7afbcef1e54dfa7c1581f9193
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup setup( name='django-longform', version='0.1.1', description='A Django application for longform blogging.', author='Martey Dodoo', author_email='django-longform@marteydodoo.com', url='https://github.com/martey/django-longform', license='MIT', long_description=open('README').read(), classifiers=[], install_requires=[ 'django', 'django-markdown', 'django-taggit', 'markdown', ], )
#!/usr/bin/env python from setuptools import setup setup( name='django-longform', version='0.1.2', description='A Django application for longform blogging.', author='Martey Dodoo', author_email='django-longform@marteydodoo.com', url='https://github.com/martey/django-longform', license='MIT', packages=['longform'], long_description=open('README').read(), classifiers=[], install_requires=[ 'django', 'django-markdown', 'django-taggit', 'markdown', ], )
Make sure `longform` dir is properly installed.
Make sure `longform` dir is properly installed. I am officially *the worst* at packaging.
Python
mit
martey/django-longform
--- +++ @@ -4,12 +4,13 @@ setup( name='django-longform', - version='0.1.1', + version='0.1.2', description='A Django application for longform blogging.', author='Martey Dodoo', author_email='django-longform@marteydodoo.com', url='https://github.com/martey/django-longform', license='MIT', + packages=['longform'], long_description=open('README').read(), classifiers=[], install_requires=[
e118e493122726239c0f610b5da1c2b9c0765cdb
setup.py
setup.py
from setuptools import setup, find_packages setup( name='Matador', version='0.0.1', author='Owen Campbell', author_email='owen.campbell@empiria.co.uk', entry_points={ 'console_scripts': [ 'matador = core.management:hello', ], }, url='http://www.empiria.co.uk', packages=find_packages(), license='The MIT License (MIT)', description='Change management for Agresso systems', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Environment :: Console', 'License :: OSI Approved :: MIT License', 'Natural Language :: English' ] )
from setuptools import setup, find_packages setup( name='Matador', version='0.0.1', author='Owen Campbell', author_email='owen.campbell@empiria.co.uk', entry_points={ 'console_scripts': [ 'matador = core.management:execute_command', ], }, url='http://www.empiria.co.uk', packages=find_packages(), license='The MIT License (MIT)', description='Change management for Agresso systems', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Environment :: Console', 'License :: OSI Approved :: MIT License', 'Natural Language :: English' ] )
Add correct function to script endpoint
Add correct function to script endpoint
Python
mit
Empiria/matador
--- +++ @@ -7,7 +7,7 @@ author_email='owen.campbell@empiria.co.uk', entry_points={ 'console_scripts': [ - 'matador = core.management:hello', + 'matador = core.management:execute_command', ], }, url='http://www.empiria.co.uk',
ba9c55206b814d580965fb305c23fdb01c99af7d
setup.py
setup.py
from distutils.core import setup setup( name = 'dp2ppgen', packages = ['dp2ppgen'], # this must be the same as the name above version = '0.1', description = 'Translates pgdp.org formatted text files into ppgen syntax.', author = 'David Maranhao', author_email = 'david.maranhao@gmail.com', url = 'https://github.com/davem2/dp2ppgen', # use the URL to the github repo download_url = 'https://github.com/davem2/dp2ppgen/tarball/0.1', # I'll explain this in a second keywords = ['text', 'processing', 'books', 'gutenberg', 'distributedproofers'], # arbitrary keywords classifiers = [], )
from distutils.core import setup setup( name = 'dp2ppgen', packages = ['dp2ppgen'], # this must be the same as the name above version = '0.1', description = 'Translates pgdp.org formatted text files into ppgen syntax.', author = 'David Maranhao', author_email = 'david.maranhao@gmail.com', url = 'https://github.com/davem2/dp2ppgen', # use the URL to the github repo download_url = 'https://github.com/davem2/dp2ppgen/tarball/0.1', # I'll explain this in a second keywords = ['text', 'processing', 'book', 'gutenberg', 'distributedproofreaders'], # arbitrary keywords classifiers = [], )
Update keywords, add 0.1 tag
Update keywords, add 0.1 tag
Python
mit
davem2/dp2ppgen
--- +++ @@ -8,6 +8,6 @@ author_email = 'david.maranhao@gmail.com', url = 'https://github.com/davem2/dp2ppgen', # use the URL to the github repo download_url = 'https://github.com/davem2/dp2ppgen/tarball/0.1', # I'll explain this in a second - keywords = ['text', 'processing', 'books', 'gutenberg', 'distributedproofers'], # arbitrary keywords + keywords = ['text', 'processing', 'book', 'gutenberg', 'distributedproofreaders'], # arbitrary keywords classifiers = [], )
d7688948f05366e278f67d72646a05f7985d4cfa
setup.py
setup.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # # Copyright 2012 ShopWiki # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from setuptools import setup VERSION = '0.0.0' DESCRIPTION = 'Web Authentication with SQLAlchemy' setup( name='Clortho', version=VERSION, description=DESCRIPTION, author='Patrick Lawson', license='Apache 2', author_email='plawson@shopwiki.com', url='http://github.com/shopwiki/clortho', packages=['clortho', 'clortho.tests'], install_requires=['sqlalchemy', 'py-bcrypt', 'pysqlite'], tests_require=['nose'], classifiers = [ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Database', 'Topic :: Software Development :: Libraries :: Python Modules', ] )
#!/usr/bin/env python # -*- coding: utf-8 -*- # # # Copyright 2012 ShopWiki # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from setuptools import setup VERSION = '0.0.0' DESCRIPTION = 'Web Authentication with SQLAlchemy' setup( name='Clortho', version=VERSION, description=DESCRIPTION, author='Patrick Lawson', license='Apache 2', author_email='plawson@shopwiki.com', url='http://github.com/shopwiki/clortho', packages=['clortho', 'clortho.tests'], install_requires=['sqlalchemy', 'py-bcrypt', 'pysqlite'], tests_require=['nose', 'sqlalchemy', 'py-bcrypt', 'pysqlite'], classifiers = [ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Database', 'Topic :: Software Development :: Libraries :: Python Modules', ] )
Add all used packages to tests_requires (for Readthedocs)
Add all used packages to tests_requires (for Readthedocs)
Python
apache-2.0
patricklaw/clortho
--- +++ @@ -32,7 +32,7 @@ url='http://github.com/shopwiki/clortho', packages=['clortho', 'clortho.tests'], install_requires=['sqlalchemy', 'py-bcrypt', 'pysqlite'], - tests_require=['nose'], + tests_require=['nose', 'sqlalchemy', 'py-bcrypt', 'pysqlite'], classifiers = [ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers',
f7982e2194c096d0020426a02543fea70c69dce7
tasks.py
tasks.py
import datetime from google.cloud import storage from invoke import task import os BUCKET_NAME = 'lekcije' @task def backup_mysql(ctx): mysqldump = os.getenv('MYSQLDUMP') if not mysqldump: mysqldump = './bin/mysqldump' user = os.getenv('MYSQL_USER') password = os.getenv('MYSQL_PASSWORD') host = os.getenv('MYSQL_HOST') port = os.getenv('MYSQL_PORT') database = os.getenv('MYSQL_DATABASE') dump_file = 'lekcije_' + datetime.datetime.now().strftime('%Y%m%d') + '.dump.bz2' ctx.run('{mysqldump} -u{user} -p{password} -h{host} -P{port} --no-tablespaces {database} | bzip2 -9 > {dump_file}'.format(**locals())) client = storage.Client() bucket = client.get_bucket(BUCKET_NAME) new_blob = bucket.blob('backup/' + dump_file) new_blob.upload_from_filename(dump_file) delete_date = (datetime.datetime.now() - datetime.timedelta(days=7)).strftime('%Y%m%d') delete_blob_name = 'backup/lekcije_' + delete_date + '.dump.bz2' delete_blob = bucket.get_blob(delete_blob_name) if delete_blob: delete_blob.delete()
import datetime from google.cloud import storage from invoke import task import os BUCKET_NAME = 'lekcije' @task def backup_mysql(ctx): mysqldump = os.getenv('MYSQLDUMP') if not mysqldump: mysqldump = './bin/mysqldump' user = os.getenv('MYSQL_USER') password = os.getenv('MYSQL_PASSWORD') host = os.getenv('MYSQL_HOST') port = os.getenv('MYSQL_PORT') database = os.getenv('MYSQL_DATABASE') dump_file = 'lekcije_' + datetime.datetime.now().strftime('%Y%m%d') + '.dump.bz2' ctx.run('{mysqldump} -u{user} -p{password} -h{host} -P{port} --no-tablespaces --quick {database} | bzip2 -9 > {dump_file}'.format(**locals())) client = storage.Client() bucket = client.get_bucket(BUCKET_NAME) new_blob = bucket.blob('backup/' + dump_file) new_blob.upload_from_filename(dump_file) delete_date = (datetime.datetime.now() - datetime.timedelta(days=7)).strftime('%Y%m%d') delete_blob_name = 'backup/lekcije_' + delete_date + '.dump.bz2' delete_blob = bucket.get_blob(delete_blob_name) if delete_blob: delete_blob.delete()
Add --quick option to mysqldump
Add --quick option to mysqldump
Python
mit
oinume/lekcije,oinume/lekcije,oinume/lekcije,oinume/lekcije,oinume/lekcije,oinume/lekcije
--- +++ @@ -17,7 +17,7 @@ port = os.getenv('MYSQL_PORT') database = os.getenv('MYSQL_DATABASE') dump_file = 'lekcije_' + datetime.datetime.now().strftime('%Y%m%d') + '.dump.bz2' - ctx.run('{mysqldump} -u{user} -p{password} -h{host} -P{port} --no-tablespaces {database} | bzip2 -9 > {dump_file}'.format(**locals())) + ctx.run('{mysqldump} -u{user} -p{password} -h{host} -P{port} --no-tablespaces --quick {database} | bzip2 -9 > {dump_file}'.format(**locals())) client = storage.Client() bucket = client.get_bucket(BUCKET_NAME)
893540d492b731b93a31f3c5158c99f4db9fc3e4
tasks.py
tasks.py
import urlparse import requests def purge_fastly_tags(domain, api_key, service_id, tags, max_tries=25): session = requests.session() headers = {"X-Fastly-Key": api_key, "Accept": "application/json"} all_tags = set(tags) purges = {} count = 0 while all_tags and not count > max_tries: try: for tag in set(all_tags): # Build the URL url_path = "/service/%s/purge/%s" % (service_id, tag) url = urlparse.urljoin(domain, url_path) # Issue the Purge resp = session.post(url, headers=headers) resp.raise_for_status() # Store the Purge ID so we can track it later purges[tag] = resp.json()["id"] # for tag, purge_id in purges.iteritems(): # # Ensure that the purge completed successfully # url = urlparse.urljoin(domain, "/purge") # status = session.get(url, params={"id": purge_id}) # status.raise_for_status() # # If the purge completely successfully remove the tag from # # our list. # if status.json().get("results", {}).get("complete", None): # all_tags.remove(tag) except Exception: if count > max_tries: raise
import urlparse import requests def purge_fastly_tags(domain, api_key, service_id, tags, max_tries=25): session = requests.session() headers = {"X-Fastly-Key": api_key, "Accept": "application/json"} all_tags = set(tags) purges = {} count = 0 while all_tags and not count > max_tries: count += 1 try: for tag in set(all_tags): # Build the URL url_path = "/service/%s/purge/%s" % (service_id, tag) url = urlparse.urljoin(domain, url_path) # Issue the Purge resp = session.post(url, headers=headers) resp.raise_for_status() # Store the Purge ID so we can track it later purges[tag] = resp.json()["id"] # for tag, purge_id in purges.iteritems(): # # Ensure that the purge completed successfully # url = urlparse.urljoin(domain, "/purge") # status = session.get(url, params={"id": purge_id}) # status.raise_for_status() # # If the purge completely successfully remove the tag from # # our list. # if status.json().get("results", {}).get("complete", None): # all_tags.remove(tag) except Exception: if count > max_tries: raise
Increase the count so we don't spin forever
Increase the count so we don't spin forever
Python
bsd-3-clause
pydotorg/pypi,pydotorg/pypi,pydotorg/pypi,pydotorg/pypi
--- +++ @@ -13,6 +13,8 @@ count = 0 while all_tags and not count > max_tries: + count += 1 + try: for tag in set(all_tags): # Build the URL
e817f726c20ccf40cd43d4e6cf36235187a27c20
objects/utils.py
objects/utils.py
"""Utils module.""" from inspect import isclass from .errors import Error def is_provider(instance): """Check if instance is provider instance.""" return (not isclass(instance) and hasattr(instance, '__IS_OBJECTS_PROVIDER__')) def ensure_is_provider(instance): """Check if instance is provider instance, otherwise raise and error.""" if not is_provider(instance): raise Error('Expected provider instance, ' 'got {0}'.format(str(instance))) return instance def is_injection(instance): """Check if instance is injection instance.""" return (not isclass(instance) and hasattr(instance, '__IS_OBJECTS_INJECTION__')) def is_init_arg_injection(instance): """Check if instance is init arg injection instance.""" return (not isclass(instance) and hasattr(instance, '__IS_OBJECTS_INIT_ARG_INJECTION__')) def is_attribute_injection(instance): """Check if instance is attribute injection instance.""" return (not isclass(instance) and hasattr(instance, '__IS_OBJECTS_ATTRIBUTE_INJECTION__')) def is_method_injection(instance): """Check if instance is method injection instance.""" return (not isclass(instance) and hasattr(instance, '__IS_OBJECTS_METHOD_INJECTION__'))
"""Utils module.""" from six import class_types from .errors import Error def is_provider(instance): """Check if instance is provider instance.""" return (not isinstance(instance, class_types) and hasattr(instance, '__IS_OBJECTS_PROVIDER__')) def ensure_is_provider(instance): """Check if instance is provider instance, otherwise raise and error.""" if not is_provider(instance): raise Error('Expected provider instance, ' 'got {0}'.format(str(instance))) return instance def is_injection(instance): """Check if instance is injection instance.""" return (not isinstance(instance, class_types) and hasattr(instance, '__IS_OBJECTS_INJECTION__')) def is_init_arg_injection(instance): """Check if instance is init arg injection instance.""" return (not isinstance(instance, class_types) and hasattr(instance, '__IS_OBJECTS_INIT_ARG_INJECTION__')) def is_attribute_injection(instance): """Check if instance is attribute injection instance.""" return (not isinstance(instance, class_types) and hasattr(instance, '__IS_OBJECTS_ATTRIBUTE_INJECTION__')) def is_method_injection(instance): """Check if instance is method injection instance.""" return (not isinstance(instance, class_types) and hasattr(instance, '__IS_OBJECTS_METHOD_INJECTION__'))
Fix of bug in Python 2.6 with failed isclass check in inspect module
Fix of bug in Python 2.6 with failed isclass check in inspect module
Python
bsd-3-clause
rmk135/dependency_injector,ets-labs/dependency_injector,ets-labs/python-dependency-injector,rmk135/objects
--- +++ @@ -1,13 +1,13 @@ """Utils module.""" -from inspect import isclass +from six import class_types from .errors import Error def is_provider(instance): """Check if instance is provider instance.""" - return (not isclass(instance) and + return (not isinstance(instance, class_types) and hasattr(instance, '__IS_OBJECTS_PROVIDER__')) @@ -21,23 +21,23 @@ def is_injection(instance): """Check if instance is injection instance.""" - return (not isclass(instance) and + return (not isinstance(instance, class_types) and hasattr(instance, '__IS_OBJECTS_INJECTION__')) def is_init_arg_injection(instance): """Check if instance is init arg injection instance.""" - return (not isclass(instance) and + return (not isinstance(instance, class_types) and hasattr(instance, '__IS_OBJECTS_INIT_ARG_INJECTION__')) def is_attribute_injection(instance): """Check if instance is attribute injection instance.""" - return (not isclass(instance) and + return (not isinstance(instance, class_types) and hasattr(instance, '__IS_OBJECTS_ATTRIBUTE_INJECTION__')) def is_method_injection(instance): """Check if instance is method injection instance.""" - return (not isclass(instance) and + return (not isinstance(instance, class_types) and hasattr(instance, '__IS_OBJECTS_METHOD_INJECTION__'))