commit
stringlengths 40
40
| old_file
stringlengths 4
150
| new_file
stringlengths 4
150
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
501
| message
stringlengths 15
4.06k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
| diff
stringlengths 0
4.35k
|
|---|---|---|---|---|---|---|---|---|---|---|
f1506a36ab4d9970fb7661aaf2a0f7da05812cec
|
setup.py
|
setup.py
|
import os
from setuptools import setup, find_packages
setup(
name='mnp',
version='1.0.0',
author='Heryandi',
author_email='heryandi@gmail.com',
packages=find_packages(exclude='test'),
scripts=[],
url='https://github.com/heryandi/mnp',
license='MIT',
description='Wrapper tools to manage Mininet package',
long_description=open('README.rst').read(),
install_requires=[
"pip",
"requests",
"setuptools",
],
entry_points={'console_scripts': [
'mnp = mnp:main',
]}
)
|
import os
from setuptools import setup, find_packages
setup(
name="mnp",
version="1.0.0",
author="Heryandi",
author_email="heryandi@gmail.com",
packages=find_packages(exclude="test"),
scripts=[],
url="https://github.com/heryandi/mnp",
license="MIT",
description="Tools to manage Mininet package",
long_description=open("README.rst").read(),
install_requires=[
"pip",
"requests",
"setuptools",
],
entry_points={"console_scripts": [
"mnp = mnp:main",
]},
classifiers=[
"Mininet :: Tool",
],
keywords="command-line commandline mininet package packaging tool"
)
|
Add classifier and keyword to metadata
|
Add classifier and keyword to metadata
|
Python
|
mit
|
heryandi/mnp
|
---
+++
@@ -3,22 +3,26 @@
from setuptools import setup, find_packages
setup(
- name='mnp',
- version='1.0.0',
- author='Heryandi',
- author_email='heryandi@gmail.com',
- packages=find_packages(exclude='test'),
+ name="mnp",
+ version="1.0.0",
+ author="Heryandi",
+ author_email="heryandi@gmail.com",
+ packages=find_packages(exclude="test"),
scripts=[],
- url='https://github.com/heryandi/mnp',
- license='MIT',
- description='Wrapper tools to manage Mininet package',
- long_description=open('README.rst').read(),
+ url="https://github.com/heryandi/mnp",
+ license="MIT",
+ description="Tools to manage Mininet package",
+ long_description=open("README.rst").read(),
install_requires=[
"pip",
"requests",
"setuptools",
],
- entry_points={'console_scripts': [
- 'mnp = mnp:main',
- ]}
+ entry_points={"console_scripts": [
+ "mnp = mnp:main",
+ ]},
+ classifiers=[
+ "Mininet :: Tool",
+ ],
+ keywords="command-line commandline mininet package packaging tool"
)
|
b22c7b78586ee07d686d06ccfd6213ca35afef1b
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='slacker',
version='0.9.29',
packages=['slacker'],
description='Slack API client',
author='Oktay Sancak',
author_email='oktaysancak@gmail.com',
url='http://github.com/os/slacker/',
install_requires=['requests >= 2.2.1'],
license='http://www.apache.org/licenses/LICENSE-2.0',
test_suite='tests',
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'
),
keywords='slack api'
)
|
from setuptools import setup
setup(
name='slacker',
version='0.9.30',
packages=['slacker'],
description='Slack API client',
author='Oktay Sancak',
author_email='oktaysancak@gmail.com',
url='http://github.com/os/slacker/',
install_requires=['requests >= 2.2.1'],
license='http://www.apache.org/licenses/LICENSE-2.0',
test_suite='tests',
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'
),
keywords='slack api'
)
|
Set version number to 0.9.30
|
Set version number to 0.9.30
|
Python
|
apache-2.0
|
os/slacker
|
---
+++
@@ -3,7 +3,7 @@
setup(
name='slacker',
- version='0.9.29',
+ version='0.9.30',
packages=['slacker'],
description='Slack API client',
author='Oktay Sancak',
|
e7e519f1d1636240022b7961fe949bc41a97c091
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
import shutil
import sys
VERSION = '0.0.0git'
setup_info = dict(
name = 'nimp',
version = VERSION,
author = 'Dontnod Entertainment',
description = 'DNE build tool',
packages = [
'nimp',
'nimp/commands',
'nimp',
],
install_requires = [
'glob2',
],
entry_points = {
'console_scripts' : [ 'nimp = nimp.nimp_cli:main' ],
},
)
setuptools_info = dict(
zip_safe = True,
)
from setuptools import setup
setup(**setup_info)
|
#!/usr/bin/env python
import os
import shutil
import sys
VERSION = '0.0.1'
setup_info = dict(
name = 'nimp-cli',
version = VERSION,
author = 'Dontnod Entertainment',
description = 'Multipurpose build tool',
packages = [
'nimp',
'nimp/commands',
'nimp',
],
install_requires = [
'glob2',
],
entry_points = {
'console_scripts' : [ 'nimp = nimp.nimp_cli:main' ],
},
)
setuptools_info = dict(
zip_safe = True,
)
from setuptools import setup
setup(**setup_info)
|
Call the package nimp-cli because nimp is already taken.
|
Call the package nimp-cli because nimp is already taken.
|
Python
|
mit
|
dontnod/nimp
|
---
+++
@@ -4,14 +4,14 @@
import shutil
import sys
-VERSION = '0.0.0git'
+VERSION = '0.0.1'
setup_info = dict(
- name = 'nimp',
+ name = 'nimp-cli',
version = VERSION,
author = 'Dontnod Entertainment',
- description = 'DNE build tool',
+ description = 'Multipurpose build tool',
packages = [
'nimp',
|
03cb3e001a25467319d0d82a5fc95e1c07ea3dd4
|
setup.py
|
setup.py
|
from distutils.core import setup
import multi_schema
setup(
name = "django-multi-schema",
version = multi_schema.__version__,
description = "Postgres schema support in django.",
url = "http://hg.schinckel.net/django-multi-schema",
author = "Matthew Schinckel",
author_email = "matt@schinckel.net",
packages = [
"multi_schema",
],
classifiers = [
'Programming Language :: Python',
'Operating System :: OS Independent',
'Framework :: Django',
],
)
|
from distutils.core import setup
import multi_schema
setup(
name = "django-multi-schema",
version = multi_schema.__version__,
description = "Postgres schema support in django.",
url = "http://hg.schinckel.net/django-multi-schema",
author = "Matthew Schinckel",
author_email = "matt@schinckel.net",
packages = [
"multi_schema",
],
install_requires = [
'south==0.7.4',
],
classifiers = [
'Programming Language :: Python',
'Operating System :: OS Independent',
'Framework :: Django',
],
)
|
Add south as a dependency, so we can apply a version. Does not need to be installed in INSTALLED_APPS.
|
Add south as a dependency, so we can apply a version.
Does not need to be installed in INSTALLED_APPS.
|
Python
|
bsd-3-clause
|
luzfcb/django-boardinghouse,luzfcb/django-boardinghouse,luzfcb/django-boardinghouse
|
---
+++
@@ -11,6 +11,9 @@
packages = [
"multi_schema",
],
+ install_requires = [
+ 'south==0.7.4',
+ ],
classifiers = [
'Programming Language :: Python',
'Operating System :: OS Independent',
|
f97e5585386e7e9417689cceff1bf49386473551
|
setup.py
|
setup.py
|
from ez_setup import use_setuptools # https://pypi.python.org/pypi/setuptools
use_setuptools()
from setuptools import setup, find_packages
from packager import __version__
# Get the long description from the README file.
def get_long_description():
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
try:
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
except:
return []
else:
return long_description
setup(
name='packagebuilder',
version=__version__,
description='Tools for building rpm and deb packages for CSDMS software',
long_description=get_long_description(),
url='https://github.com/csdms/packagebuilder',
author='Mark Piper',
author_email='mark.piper@colorado.edu',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
keywords='CSDMS, earth system modeling, packaging, Linux, rpm, deb',
packages=find_packages(),
install_requires=['nose'],
package_data={
'packager': ['repositories.txt'],
},
entry_points={
'console_scripts': [
'build_rpm=packager.rpm.build:main',
],
},
)
|
from ez_setup import use_setuptools # https://pypi.python.org/pypi/setuptools
use_setuptools()
from setuptools import setup, find_packages
from packager import __version__
# Get the long description from the README file.
def get_long_description():
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
try:
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
except:
return []
else:
return long_description
setup(
name='packagebuilder',
version=__version__,
description='Tools for building rpm and deb packages for CSDMS software',
long_description=get_long_description(),
url='https://github.com/csdms/packagebuilder',
author='Mark Piper',
author_email='mark.piper@colorado.edu',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
keywords='CSDMS, earth system modeling, packaging, Linux, rpm, deb',
packages=find_packages(exclude=['*test']),
install_requires=['nose'],
package_data={
'packager': ['repositories.txt'],
},
entry_points={
'console_scripts': [
'build_rpm=packager.rpm.build:main',
],
},
)
|
Exclude tests from set of installed packages
|
Exclude tests from set of installed packages
|
Python
|
mit
|
csdms/packagebuilder
|
---
+++
@@ -34,7 +34,7 @@
'Programming Language :: Python :: 2.7',
],
keywords='CSDMS, earth system modeling, packaging, Linux, rpm, deb',
- packages=find_packages(),
+ packages=find_packages(exclude=['*test']),
install_requires=['nose'],
package_data={
'packager': ['repositories.txt'],
|
f0579373b2c3261b00228d4c30832f51d713a7c2
|
setup.py
|
setup.py
|
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-oidc-provider',
version='0.0.4',
packages=[
'oidc_provider', 'oidc_provider/lib', 'oidc_provider/lib/endpoints',
'oidc_provider/lib/utils', 'oidc_provider/tests',
],
include_package_data=True,
license='MIT License',
description='OpenID Connect Provider implementation for Django.',
long_description=README,
url='http://github.com/juanifioren/django-oidc-provider',
author='Juan Ignacio Fiorentino',
author_email='juanifioren@gmail.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
install_requires=[
'pyjwt==1.1.0',
],
)
|
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-oidc-provider',
version='0.0.4',
packages=[
'oidc_provider', 'oidc_provider/lib', 'oidc_provider/lib/endpoints',
'oidc_provider/lib/utils', 'oidc_provider/tests', 'oidc_provider/migrations',
],
include_package_data=True,
license='MIT License',
description='OpenID Connect Provider implementation for Django.',
long_description=README,
url='http://github.com/juanifioren/django-oidc-provider',
author='Juan Ignacio Fiorentino',
author_email='juanifioren@gmail.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
install_requires=[
'pyjwt==1.1.0',
],
)
|
Add migrations folder to build.
|
Add migrations folder to build.
|
Python
|
mit
|
django-py/django-openid-provider,nmohoric/django-oidc-provider,wojtek-fliposports/django-oidc-provider,wayward710/django-oidc-provider,nmohoric/django-oidc-provider,ByteInternet/django-oidc-provider,bunnyinc/django-oidc-provider,ByteInternet/django-oidc-provider,juanifioren/django-oidc-provider,torreco/django-oidc-provider,Sjord/django-oidc-provider,torreco/django-oidc-provider,django-py/django-openid-provider,juanifioren/django-oidc-provider,Sjord/django-oidc-provider,bunnyinc/django-oidc-provider,wayward710/django-oidc-provider,wojtek-fliposports/django-oidc-provider
|
---
+++
@@ -12,7 +12,7 @@
version='0.0.4',
packages=[
'oidc_provider', 'oidc_provider/lib', 'oidc_provider/lib/endpoints',
- 'oidc_provider/lib/utils', 'oidc_provider/tests',
+ 'oidc_provider/lib/utils', 'oidc_provider/tests', 'oidc_provider/migrations',
],
include_package_data=True,
license='MIT License',
|
c1955ceeb08f960cef631e6a812862052d42d85c
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import find_packages, setup
setup(
name='blanc-contentfiles',
version='0.2.1',
description='Blanc Content Files',
long_description=open('README.rst').read(),
url='https://github.com/blancltd/blanc-contentfiles',
maintainer='Alex Tomkins',
maintainer_email='alex@blanc.ltd.uk',
platforms=['any'],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
license='BSD',
)
|
#!/usr/bin/env python
from setuptools import find_packages, setup
setup(
name='blanc-contentfiles',
version='0.2.1',
description='Blanc Content Files',
long_description=open('README.rst').read(),
url='https://github.com/blancltd/blanc-contentfiles',
maintainer='Blanc Ltd',
maintainer_email='studio@blanc.ltd.uk',
platforms=['any'],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
license='BSD',
)
|
Update contact details to the company
|
Update contact details to the company
|
Python
|
bsd-3-clause
|
blancltd/blanc-contentfiles
|
---
+++
@@ -7,8 +7,8 @@
description='Blanc Content Files',
long_description=open('README.rst').read(),
url='https://github.com/blancltd/blanc-contentfiles',
- maintainer='Alex Tomkins',
- maintainer_email='alex@blanc.ltd.uk',
+ maintainer='Blanc Ltd',
+ maintainer_email='studio@blanc.ltd.uk',
platforms=['any'],
packages=find_packages(),
classifiers=[
|
e059af57acec9c077ddb348ac6dd84ff58d312fe
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='blanc-basic-pages',
version='0.2.1',
description='Blanc Basic Pages for Django',
long_description=open('README.rst').read(),
url='https://github.com/blancltd/blanc-basic-pages',
maintainer='Alex Tomkins',
maintainer_email='alex@blanc.ltd.uk',
platforms=['any'],
install_requires=[
'django-mptt>=0.6.0',
'django-mptt-admin==0.1.8',
],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
],
license='BSD',
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='blanc-basic-pages',
version='0.2.1',
description='Blanc Basic Pages for Django',
long_description=open('README.rst').read(),
url='https://github.com/blancltd/blanc-basic-pages',
maintainer='Alex Tomkins',
maintainer_email='alex@blanc.ltd.uk',
platforms=['any'],
install_requires=[
'django-mptt>=0.6.1',
'django-mptt-admin>=0.1.8',
],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
],
license='BSD',
)
|
Fix dependencies for Django 1.7
|
Fix dependencies for Django 1.7
Older versions of django-mptt will generate warnings
|
Python
|
bsd-3-clause
|
blancltd/blanc-basic-pages
|
---
+++
@@ -11,8 +11,8 @@
maintainer_email='alex@blanc.ltd.uk',
platforms=['any'],
install_requires=[
- 'django-mptt>=0.6.0',
- 'django-mptt-admin==0.1.8',
+ 'django-mptt>=0.6.1',
+ 'django-mptt-admin>=0.1.8',
],
packages=find_packages(),
classifiers=[
|
5865599eee7014a02e07413dea71a79e878ac6b1
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import ptwit
requires = ['python-twitter>=1.0']
def readme():
with open('README.rst') as f:
return f.read()
setup(name='ptwit',
version=ptwit.__version__,
description='A simple twitter command line client',
long_description=readme(),
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'Topic :: Utilities'],
url='http://github.com/ptpt/ptwit',
author=ptwit.__author__,
author_email='ptpttt+ptwit@gmail.com',
keywords='twitter, command-line, client',
license=ptwit.__license__,
py_modules=['ptwit'],
install_requires=requires,
entry_points={
'console_scripts': ['ptwit=ptwit:main']},
zip_safe=False)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import ptwit
requires = ['python-twitter>=1.0']
def readme():
with open('README.rst') as f:
return f.read()
setup(name='ptwit',
version=ptwit.__version__,
description='A simple twitter command line client',
long_description=readme(),
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'Topic :: Utilities'],
url='http://github.com/ptpt/ptwit',
author=ptwit.__author__,
author_email='ptpttt+ptwit@gmail.com',
keywords='twitter, command-line, client',
license=ptwit.__license__,
py_modules=['ptwit'],
install_requires=requires,
entry_points={
'console_scripts': ['ptwit=ptwit:cmd']},
zip_safe=False)
|
Change the entry point to cmd
|
Change the entry point to cmd
|
Python
|
mit
|
ptpt/ptwit
|
---
+++
@@ -36,5 +36,5 @@
py_modules=['ptwit'],
install_requires=requires,
entry_points={
- 'console_scripts': ['ptwit=ptwit:main']},
+ 'console_scripts': ['ptwit=ptwit:cmd']},
zip_safe=False)
|
669280351b04d61df1de5ff03c4c7a258b37ad32
|
sell/views.py
|
sell/views.py
|
from decimal import Decimal
from django.shortcuts import render
from django.utils.translation import ugettext_lazy as _
from books.models import BookType, Book
from common.bookchooserwizard import BookChooserWizard
class SellWizard(BookChooserWizard):
@property
def page_title(self):
return _("Sell books")
@property
def url_namespace(self):
return "sell"
@property
def session_var_name(self):
return "sell_chosen_books"
@property
def feature_add_new(self):
return True
def process_books_summary(self, session, user, book_list):
for book in book_list:
amount = book['amount']
del book['amount']
user.save()
dbbook = Book(owner=user, accepted=False, sold=False)
if 'pk' in book:
dbbook.book_type_id = book['pk']
else:
book['price'] = Decimal(book['price'])
if book['publication_year'] == "":
book['publication_year'] = 1970
book_type = BookType(**book)
book_type.save()
dbbook.book_type = book_type
for i in range(0, amount):
dbbook.pk = None
dbbook.save()
return True, None
def success(self, request):
return render(request, 'sell/success.html')
|
from decimal import Decimal
import re
from django.shortcuts import render
from django.utils.translation import ugettext_lazy as _
from books.models import BookType, Book
from common.bookchooserwizard import BookChooserWizard
class SellWizard(BookChooserWizard):
@property
def page_title(self):
return _("Sell books")
@property
def url_namespace(self):
return "sell"
@property
def session_var_name(self):
return "sell_chosen_books"
@property
def feature_add_new(self):
return True
def process_books_summary(self, session, user, book_list):
for book in book_list:
amount = book['amount']
del book['amount']
user.save()
dbbook = Book(owner=user, accepted=False, sold=False)
if 'pk' in book:
dbbook.book_type_id = book['pk']
else:
book['isbn'] = re.sub(r'[^\d.]+', '', book['isbn'])
book['price'] = Decimal(book['price'])
if book['publication_year'] == "":
book['publication_year'] = 1970
book_type = BookType(**book)
book_type.save()
dbbook.book_type = book_type
for i in range(0, amount):
dbbook.pk = None
dbbook.save()
return True, None
def success(self, request):
return render(request, 'sell/success.html')
|
Delete non-digit characters in ISBN in server side
|
Delete non-digit characters in ISBN in server side
|
Python
|
agpl-3.0
|
m4tx/egielda,m4tx/egielda,m4tx/egielda
|
---
+++
@@ -1,4 +1,5 @@
from decimal import Decimal
+import re
from django.shortcuts import render
from django.utils.translation import ugettext_lazy as _
@@ -34,6 +35,7 @@
if 'pk' in book:
dbbook.book_type_id = book['pk']
else:
+ book['isbn'] = re.sub(r'[^\d.]+', '', book['isbn'])
book['price'] = Decimal(book['price'])
if book['publication_year'] == "":
book['publication_year'] = 1970
|
eb5dab3b3231688966254a1797ced7eec67b6e8a
|
setup.py
|
setup.py
|
import multiprocessing
from setuptools import setup, find_packages
setup(
name='sow-generator',
version='0.1',
description='Create a scope of work from templates and version controlled documentation.',
long_description = open('README.rst', 'r').read() + open('CHANGELOG.rst', 'r').read() + open('AUTHORS.rst', 'r').read(),
author='Hedley Roos',
author_email='hedley@praekelt.com',
license='BSD',
url='http://github.com/praekelt/sow-generator',
packages = find_packages(),
install_requires = [
'Django<1.7',
'South',
'celery',
'django-celery',
'raven',
'PyYAML',
'requests',
'github3.py',
'pyandoc',
'django-object-tools',
'django-adminplus'
],
include_package_data=True,
tests_require=[
'django-setuptest>=0.1.4',
],
test_suite="setuptest.setuptest.SetupTestSuite",
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: BSD License",
"Development Status :: 4 - Beta",
"Operating System :: OS Independent",
"Framework :: Django",
"Intended Audience :: Developers",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
zip_safe=False,
)
|
import multiprocessing
from setuptools import setup, find_packages
setup(
name='sow-generator',
version='0.1.1',
description='Create a scope of work from templates and version controlled documentation.',
long_description = open('README.rst', 'r').read() + open('CHANGELOG.rst', 'r').read() + open('AUTHORS.rst', 'r').read(),
author='Hedley Roos',
author_email='hedley@praekelt.com',
license='BSD',
url='http://github.com/praekelt/sow-generator',
packages = find_packages(),
install_requires = [
'Django<1.7',
'South',
'celery',
'django-celery',
'raven',
'PyYAML',
'requests',
'github3.py',
'pyandoc',
'django-object-tools',
'django-adminplus'
],
include_package_data=True,
tests_require=[
'django-setuptest>=0.1.4',
],
test_suite="setuptest.setuptest.SetupTestSuite",
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: BSD License",
"Development Status :: 4 - Beta",
"Operating System :: OS Independent",
"Framework :: Django",
"Intended Audience :: Developers",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
zip_safe=False,
)
|
Bump version to see if pip installs it
|
Bump version to see if pip installs it
|
Python
|
bsd-3-clause
|
praekelt/sow-generator
|
---
+++
@@ -3,7 +3,7 @@
setup(
name='sow-generator',
- version='0.1',
+ version='0.1.1',
description='Create a scope of work from templates and version controlled documentation.',
long_description = open('README.rst', 'r').read() + open('CHANGELOG.rst', 'r').read() + open('AUTHORS.rst', 'r').read(),
author='Hedley Roos',
|
79f9a028215f6765898ae43faf2ba7e4aaf98f0c
|
setup.py
|
setup.py
|
import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='django-pgallery',
version=__import__('pgallery').__version__,
description='Photo gallery app for PostgreSQL and Django.',
long_description=read('README.rst'),
author='Zbigniew Siciarz',
author_email='zbigniew@siciarz.net',
url='http://github.com/zsiciarz/django-pgallery',
download_url='http://pypi.python.org/pypi/django-pgallery',
license='MIT',
install_requires=[
'Django>=1.4',
'Pillow',
'psycopg2>=2.4',
'django-markitup>=1.0',
'django-model-utils>=1.1',
'djorm-ext-core>=0.4.2',
'djorm-ext-expressions>=0.4.4',
'djorm-ext-hstore>=0.4.2',
'djorm-ext-pgarray',
'sorl-thumbnail>=11',
],
packages=find_packages(),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities',
],
)
|
import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='django-pgallery',
version=__import__('pgallery').__version__,
description='Photo gallery app for PostgreSQL and Django.',
long_description=read('README.rst'),
author='Zbigniew Siciarz',
author_email='zbigniew@siciarz.net',
url='http://github.com/zsiciarz/django-pgallery',
download_url='http://pypi.python.org/pypi/django-pgallery',
license='MIT',
install_requires=[
'Django>=1.4',
'Pillow',
'psycopg2>=2.4',
'django-markitup>=1.0',
'django-model-utils>=1.1',
'djorm-ext-core>=0.4.2',
'djorm-ext-expressions>=0.4.4',
'djorm-ext-hstore>=0.4.2',
'djorm-ext-pgarray',
'sorl-thumbnail>=11',
],
packages=find_packages(exclude=['tests']),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities',
],
)
|
Exclude tests package from installing.
|
Exclude tests package from installing.
|
Python
|
mit
|
zsiciarz/django-pgallery,zsiciarz/django-pgallery
|
---
+++
@@ -26,7 +26,7 @@
'djorm-ext-pgarray',
'sorl-thumbnail>=11',
],
- packages=find_packages(),
+ packages=find_packages(exclude=['tests']),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
|
eeecf68d2d59bc2233478b01748cbf88bab85722
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from distutils.core import setup
execfile('facebook/version.py')
setup(
name = 'Facebook',
version = __version__,
description = 'Facebook makes it even easier to interact with Facebook\'s Graph API',
long_description = open('README.rst').read() + '\n\n' + open('HISTORY.rst').read(),
author = 'Johannes Gorset',
author_email = 'jgorset@gmail.com',
url = 'http://github.com/jgorset/facebook',
packages = [
'facebook'
]
)
|
#!/usr/bin/env python
from distutils.core import setup
execfile('facebook/version.py')
setup(
name='Facebook',
version=__version__,
description='Facebook makes it even easier to interact "+\
"with Facebook\'s Graph API',
long_description=open('README.rst').read() + '\n\n' +
open('HISTORY.rst').read(),
author='Johannes Gorset',
author_email='jgorset@gmail.com',
url='http://github.com/jgorset/facebook',
requires=['facepy'],
packages=[
'facebook'
]
)
|
Add missing requires and PEP8ize.
|
Add missing requires and PEP8ize.
|
Python
|
mit
|
jgorset/facebook,vyyvyyv/facebook,jgorset/facebook,vyyvyyv/facebook
|
---
+++
@@ -5,14 +5,17 @@
execfile('facebook/version.py')
setup(
- name = 'Facebook',
- version = __version__,
- description = 'Facebook makes it even easier to interact with Facebook\'s Graph API',
- long_description = open('README.rst').read() + '\n\n' + open('HISTORY.rst').read(),
- author = 'Johannes Gorset',
- author_email = 'jgorset@gmail.com',
- url = 'http://github.com/jgorset/facebook',
- packages = [
+ name='Facebook',
+ version=__version__,
+ description='Facebook makes it even easier to interact "+\
+ "with Facebook\'s Graph API',
+ long_description=open('README.rst').read() + '\n\n' +
+ open('HISTORY.rst').read(),
+ author='Johannes Gorset',
+ author_email='jgorset@gmail.com',
+ url='http://github.com/jgorset/facebook',
+ requires=['facepy'],
+ packages=[
'facebook'
]
)
|
932182858efcdf3d76d3a19cba09967c680e907f
|
setup.py
|
setup.py
|
# -*- coding: utf-8; mode: python; -*-
"""
A package that implements offline messages for Django
Web Framework.
(C) 2011 oDesk www.oDesk.com w/revisions by Zapier.com
"""
from setuptools import setup
setup(
name='django-offline-messages',
version='0.3.1',
description='A package that implements offline messages for Django plus more',
long_description='A package that implements offline messages for Django Web Framework',
license='BSD',
keywords='django offline messages',
url='https://github.com/zapier/django-offline-messages',
author='oDesk, www.odesk.com',
author_email='developers@odesk.com',
maintainer='Bryan Helmig',
maintainer_email='bryan@zapier.com',
packages=['offline_messages', 'offline_messages.migrations'],
classifiers=['Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
test_suite='tests.runtests.runtests'
)
|
# -*- coding: utf-8; mode: python; -*-
"""
A package that implements offline messages for Django
Web Framework.
(C) 2011 oDesk www.oDesk.com w/revisions by Zapier.com
"""
from setuptools import setup
setup(
name='django-offline-messages',
version='0.3.1',
description='A package that implements offline messages for Django plus more',
long_description='A package that implements offline messages for Django Web Framework',
license='BSD',
keywords='django offline messages',
url='https://github.com/zapier/django-offline-messages',
author='oDesk, www.odesk.com',
author_email='developers@odesk.com',
maintainer='Bryan Helmig',
maintainer_email='bryan@zapier.com',
packages=['offline_messages', 'offline_messages.migrations'],
classifiers=['Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
test_suite='tests.runtests.runtests',
install_requires=['django-jsonfield']
)
|
Add django-jsonfield as a requirement
|
Add django-jsonfield as a requirement
|
Python
|
bsd-3-clause
|
dym/django-offline-messages
|
---
+++
@@ -30,5 +30,6 @@
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
- test_suite='tests.runtests.runtests'
- )
+ test_suite='tests.runtests.runtests',
+ install_requires=['django-jsonfield']
+)
|
e10d5b8a975f1d9482f845c1476edcc4598bb84a
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
import os
from setuptools import find_packages, setup
from pyglins import __version__, __description__
def read_readme():
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as file:
return file.read()
setup(name='pyglins',
version=__version__,
description=__description__,
long_description=read_readme(),
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
'Operating System :: OS Independent'],
keywords='plugin manager',
author='Javier Caballero',
author_email='paxet83@gmail.com',
url='https://github.com/paxet/pyglins',
license='MIT',
packages=find_packages(exclude=['tests']),
)
|
# -*- coding: utf-8 -*-
from setuptools import find_packages, setup
from pyglins import __version__, __description__
def read_readme():
try:
import pypandoc
description = pypandoc.convert('README.md', 'rst')
except (IOError, ImportError):
with open('README.md') as file:
description = file.read()
return description
setup(name='pyglins',
version=__version__,
description=__description__,
long_description=read_readme(),
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
'Operating System :: OS Independent'],
keywords='plugin manager',
author='Javier Caballero',
author_email='paxet83@gmail.com',
url='https://github.com/paxet/pyglins',
license='MIT',
packages=find_packages(exclude=['tests']),
)
|
Convert README.md to reStructuredText with pypandoc
|
Convert README.md to reStructuredText with pypandoc
|
Python
|
mit
|
paxet/pyglins
|
---
+++
@@ -1,12 +1,17 @@
# -*- coding: utf-8 -*-
-import os
+
from setuptools import find_packages, setup
from pyglins import __version__, __description__
def read_readme():
- with open(os.path.join(os.path.dirname(__file__), 'README.md')) as file:
- return file.read()
+ try:
+ import pypandoc
+ description = pypandoc.convert('README.md', 'rst')
+ except (IOError, ImportError):
+ with open('README.md') as file:
+ description = file.read()
+ return description
setup(name='pyglins',
|
1b4ca927245e424a340f5caa2ed9bda615dd8a5c
|
setup.py
|
setup.py
|
"""setup.py
..codeauthor:: John Lane <jlane@fanthreesixty.com>
"""
from setuptools import setup, find_packages
from sda import __author__, __email__, __license__, __version__
setup(
name='sda',
version=__version__,
packages=find_packages(),
scripts=[],
description='A wrapper for Selenium. This library uses custom data attributes to accelerate '
'testing through the Selenium framework',
author=__author__,
author_email=__email__,
url='https://github.com/jlane9/selenium-data-attributes',
download_url='https://github.com/jlane9/selenium-data-attributes/tarball/{}'.format(__version__),
keywords='testing selenium qa web automation',
install_requires=['lxml', 'cssselect'],
license=__license__,
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Software Development :: Testing'])
|
"""setup.py
..codeauthor:: John Lane <jlane@fanthreesixty.com>
"""
from setuptools import setup, find_packages
from sda import __author__, __email__, __license__, __version__
setup(
name='sda',
version=__version__,
packages=find_packages(),
scripts=[],
description='A wrapper for Selenium. This library uses custom data attributes to accelerate '
'testing through the Selenium framework',
author=__author__,
author_email=__email__,
url='https://github.com/jlane9/selenium-data-attributes',
download_url='https://github.com/jlane9/selenium-data-attributes/tarball/{}'.format(__version__),
keywords='testing selenium qa web automation',
install_requires=['lxml', 'cssselect'],
license=__license__,
classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Software Development :: Testing'])
|
Upgrade project to production ready
|
Upgrade project to production ready
|
Python
|
mit
|
jlane9/selenium_data_attributes,jlane9/selenium_data_attributes
|
---
+++
@@ -22,7 +22,7 @@
keywords='testing selenium qa web automation',
install_requires=['lxml', 'cssselect'],
license=__license__,
- classifiers=['Development Status :: 4 - Beta',
+ classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
|
9a424163fa97bbb5ab7b19ecb9707fa05cb1bef7
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup
# Version info -- read without importing
_locals = {}
with open('releases/_version.py') as fp:
exec(fp.read(), None, _locals)
version = _locals['__version__']
setup(
name='releases',
version=version,
description='A Sphinx extension for changelog manipulation',
long_description=open("README.rst").read(),
author='Jeff Forcier',
author_email='jeff@bitprophet.org',
url='https://github.com/bitprophet/releases',
packages=['releases'],
install_requires=[
'semantic_version<2.7',
'sphinx>=1.3,<1.8',
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Unix',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development',
'Topic :: Software Development :: Documentation',
'Topic :: Documentation',
'Topic :: Documentation :: Sphinx',
],
)
|
#!/usr/bin/env python
from setuptools import setup
# Version info -- read without importing
_locals = {}
with open('releases/_version.py') as fp:
exec(fp.read(), None, _locals)
version = _locals['__version__']
setup(
name='releases',
version=version,
description='A Sphinx extension for changelog manipulation',
long_description=open("README.rst").read(),
author='Jeff Forcier',
author_email='jeff@bitprophet.org',
url='https://github.com/bitprophet/releases',
packages=['releases'],
install_requires=[
'semantic_version<2.7',
'sphinx>=1.3',
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Unix',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development',
'Topic :: Software Development :: Documentation',
'Topic :: Documentation',
'Topic :: Documentation :: Sphinx',
],
)
|
Remove upper limit for Sphinx version
|
Remove upper limit for Sphinx version
|
Python
|
bsd-2-clause
|
bitprophet/releases
|
---
+++
@@ -19,7 +19,7 @@
packages=['releases'],
install_requires=[
'semantic_version<2.7',
- 'sphinx>=1.3,<1.8',
+ 'sphinx>=1.3',
],
classifiers=[
'Development Status :: 5 - Production/Stable',
|
b389a7c0a3129573d28308cd175002b612b016f6
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
"""Setup for the DeepOBS package"""
import setuptools
def readme():
with open('README.md') as f:
return f.read()
setuptools.setup(
name='deepobs',
version='1.1.1',
description='Deep Learning Optimizer Benchmark Suite',
long_description=readme(),
author='Frank Schneider, Lukas Balles and Philipp Hennig,',
author_email='frank.schneider@tue.mpg.de',
license='MIT',
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3.6",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
],
install_requires=[
'argparse', 'numpy', 'pandas', 'matplotlib', 'matplotlib2tikz',
'seaborn'
],
scripts=[
'deepobs/scripts/deepobs_prepare_data.sh',
'deepobs/scripts/deepobs_get_baselines.sh',
'deepobs/scripts/deepobs_plot_results.py',
'deepobs/scripts/deepobs_estimate_runtime.py'
],
zip_safe=False)
|
# -*- coding: utf-8 -*-
"""Setup for the DeepOBS package"""
import setuptools
def readme():
with open("README.md") as f:
return f.read()
setuptools.setup(
name="deepobs",
version="1.1.2",
description="Deep Learning Optimizer Benchmark Suite",
long_description=readme(),
author="Frank Schneider, Lukas Balles and Philipp Hennig,",
author_email="frank.schneider@tue.mpg.de",
license="MIT",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3.6",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
],
install_requires=[
"argparse",
"numpy",
"pandas",
"matplotlib",
"matplotlib2tikz==0.6.18",
"seaborn",
],
scripts=[
"deepobs/scripts/deepobs_prepare_data.sh",
"deepobs/scripts/deepobs_get_baselines.sh",
"deepobs/scripts/deepobs_plot_results.py",
"deepobs/scripts/deepobs_estimate_runtime.py",
],
zip_safe=False,
)
|
Install specific version of matplotlib2tikz
|
Install specific version of matplotlib2tikz
|
Python
|
mit
|
fsschneider/DeepOBS,fsschneider/DeepOBS
|
---
+++
@@ -5,17 +5,18 @@
def readme():
- with open('README.md') as f:
+ with open("README.md") as f:
return f.read()
+
setuptools.setup(
- name='deepobs',
- version='1.1.1',
- description='Deep Learning Optimizer Benchmark Suite',
+ name="deepobs",
+ version="1.1.2",
+ description="Deep Learning Optimizer Benchmark Suite",
long_description=readme(),
- author='Frank Schneider, Lukas Balles and Philipp Hennig,',
- author_email='frank.schneider@tue.mpg.de',
- license='MIT',
+ author="Frank Schneider, Lukas Balles and Philipp Hennig,",
+ author_email="frank.schneider@tue.mpg.de",
+ license="MIT",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3.6",
@@ -26,13 +27,18 @@
"Topic :: Scientific/Engineering :: Artificial Intelligence",
],
install_requires=[
- 'argparse', 'numpy', 'pandas', 'matplotlib', 'matplotlib2tikz',
- 'seaborn'
+ "argparse",
+ "numpy",
+ "pandas",
+ "matplotlib",
+ "matplotlib2tikz==0.6.18",
+ "seaborn",
],
scripts=[
- 'deepobs/scripts/deepobs_prepare_data.sh',
- 'deepobs/scripts/deepobs_get_baselines.sh',
- 'deepobs/scripts/deepobs_plot_results.py',
- 'deepobs/scripts/deepobs_estimate_runtime.py'
+ "deepobs/scripts/deepobs_prepare_data.sh",
+ "deepobs/scripts/deepobs_get_baselines.sh",
+ "deepobs/scripts/deepobs_plot_results.py",
+ "deepobs/scripts/deepobs_estimate_runtime.py",
],
- zip_safe=False)
+ zip_safe=False,
+)
|
51d0498f1c444f00ce982a93d8c9fdfb72a196b4
|
setup.py
|
setup.py
|
#! /usr/bin/env python
'''
This file is part of targetcli.
Copyright (c) 2011-2013 by Datera, Inc
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
'''
from distutils.core import setup
__version__ = ''
execfile('targetcli/version.py')
setup(
name = 'targetcli-fb',
version = __version__,
description = 'An administration shell for RTS storage targets.',
license = 'Apache 2.0',
maintainer = 'Andy Grover',
maintainer_email = 'agrover@redhat.com',
url = 'http://github.com/agrover/targetcli-fb',
packages = ['targetcli'],
scripts = ['scripts/targetcli'],
)
|
#! /usr/bin/env python
'''
This file is part of targetcli.
Copyright (c) 2011-2013 by Datera, Inc
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
'''
from distutils.core import setup
__version__ = ''
exec(open('targetcli/version.py').read())
setup(
name = 'targetcli-fb',
version = __version__,
description = 'An administration shell for RTS storage targets.',
license = 'Apache 2.0',
maintainer = 'Andy Grover',
maintainer_email = 'agrover@redhat.com',
url = 'http://github.com/agrover/targetcli-fb',
packages = ['targetcli'],
scripts = ['scripts/targetcli'],
)
|
Replace execfile() with exec() since it does not work with Python 3
|
Replace execfile() with exec() since it does not work with Python 3
Signed-off-by: Christophe Vu-Brugier <1930e27f67e1e10d51770b88cb06d386f1aa46ae@yahoo.fr>
|
Python
|
apache-2.0
|
agrover/targetcli-fb,cloud4life/targetcli-fb,cvubrugier/targetcli-fb
|
---
+++
@@ -19,7 +19,7 @@
from distutils.core import setup
__version__ = ''
-execfile('targetcli/version.py')
+exec(open('targetcli/version.py').read())
setup(
name = 'targetcli-fb',
|
99b79326fa18f46fe449e11fd0bfa17814d7a148
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='resync',
version='0.6.1',
packages=['resync'],
classifiers=["Development Status :: 3 - Alpha",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
"Environment :: Web Environment"],
author='Simeon Warner',
author_email='simeon.warner@cornell.edu',
long_description=open('README.md').read(),
url='http://github.com/resync/resync',
)
|
from distutils.core import setup
setup(
name='resync',
version='0.6.1',
packages=['resync'],
scripts=['bin/resync'],
classifiers=["Development Status :: 3 - Alpha",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
"Environment :: Web Environment"],
author='Simeon Warner',
author_email='simeon.warner@cornell.edu',
long_description=open('README.md').read(),
url='http://github.com/resync/resync',
)
|
Add config to install resync script
|
Add config to install resync script
|
Python
|
apache-2.0
|
resync/resync,dans-er/resync,lindareijnhoudt/resync,lindareijnhoudt/resync,dans-er/resync
|
---
+++
@@ -4,6 +4,7 @@
name='resync',
version='0.6.1',
packages=['resync'],
+ scripts=['bin/resync'],
classifiers=["Development Status :: 3 - Alpha",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
|
1594644990fe6c621d309b587d844669ec273dec
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
import sys
from setuptools import setup, find_packages
from pkg_resources import resource_filename
# depending on your execution context the version file
# may be located in a different place!
vsn_path = resource_filename(__name__, 'hvac/version')
if not os.path.exists(vsn_path):
vsn_path = resource_filename(__name__, 'version')
if not os.path.exists(vsn_path):
print("%s is missing" % vsn_path)
sys.exit(1)
setup(
name='hvac',
version=open(vsn_path, 'r').read(),
description='HashiCorp Vault API client',
author='Ian Unruh',
author_email='ianunruh@gmail.com',
url='https://github.com/ianunruh/hvac',
keywords=['hashicorp', 'vault'],
classifiers=['License :: OSI Approved :: Apache Software License'],
packages=find_packages(),
install_requires=[
'requests>=2.7.0',
],
include_package_data=True,
package_data={'hvac': ['version']},
extras_require={
'parser': ['pyhcl>=0.2.1,<0.3.0']
}
)
|
#!/usr/bin/env python
import os
import sys
from setuptools import setup, find_packages
from pkg_resources import resource_filename
with open("README.md", "r") as fh:
long_description = fh.read()
# depending on your execution context the version file
# may be located in a different place!
vsn_path = resource_filename(__name__, 'hvac/version')
if not os.path.exists(vsn_path):
vsn_path = resource_filename(__name__, 'version')
if not os.path.exists(vsn_path):
print("%s is missing" % vsn_path)
sys.exit(1)
setup(
name='hvac',
version=open(vsn_path, 'r').read(),
description='HashiCorp Vault API client',
long_description=long_description,
long_description_content_type="text/markdown",
author='Ian Unruh',
author_email='ianunruh@gmail.com',
url='https://github.com/ianunruh/hvac',
keywords=['hashicorp', 'vault'],
classifiers=['License :: OSI Approved :: Apache Software License'],
packages=find_packages(),
install_requires=[
'requests>=2.7.0',
],
include_package_data=True,
package_data={'hvac': ['version']},
extras_require={
'parser': ['pyhcl>=0.2.1,<0.3.0']
}
)
|
Include README.md in package metadata
|
Include README.md in package metadata
|
Python
|
apache-2.0
|
ianunruh/hvac,ianunruh/hvac
|
---
+++
@@ -3,6 +3,9 @@
import sys
from setuptools import setup, find_packages
from pkg_resources import resource_filename
+
+with open("README.md", "r") as fh:
+ long_description = fh.read()
# depending on your execution context the version file
# may be located in a different place!
@@ -17,6 +20,8 @@
name='hvac',
version=open(vsn_path, 'r').read(),
description='HashiCorp Vault API client',
+ long_description=long_description,
+ long_description_content_type="text/markdown",
author='Ian Unruh',
author_email='ianunruh@gmail.com',
url='https://github.com/ianunruh/hvac',
|
769cf7b47fde883e4b44cec3adf8944bd62f7363
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
import inflection
setup(
name='inflection',
version=inflection.__version__,
description="A port of Ruby on Rails inflector to Python",
long_description=open('README.rst').read(),
author='Janne Vanhala',
author_email='janne.vanhala@gmail.com',
url='http://github.com/jpvanhal/inflection',
license='MIT',
py_modules=['inflection'],
zip_safe=False,
python_requires='>=3.5',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
import inflection
setup(
name='inflection',
version=inflection.__version__,
description="A port of Ruby on Rails inflector to Python",
long_description=open('README.rst').read(),
author='Janne Vanhala',
author_email='janne.vanhala@gmail.com',
url='https://github.com/jpvanhal/inflection',
license='MIT',
py_modules=['inflection'],
zip_safe=False,
python_requires='>=3.5',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
)
|
Use HTTPS for the project url
|
Use HTTPS for the project url
|
Python
|
mit
|
jpvanhal/inflection
|
---
+++
@@ -12,7 +12,7 @@
long_description=open('README.rst').read(),
author='Janne Vanhala',
author_email='janne.vanhala@gmail.com',
- url='http://github.com/jpvanhal/inflection',
+ url='https://github.com/jpvanhal/inflection',
license='MIT',
py_modules=['inflection'],
zip_safe=False,
|
d96027040f96de15c2bddf8ba5b94711b5af6e1f
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# coding=utf-8
__author__ = 'kulakov.ilya@gmail.com'
from setuptools import setup
from sys import platform
REQUIREMENTS = []
if platform.startswith('darwin'):
REQUIREMENTS.append('pyobjc >= 2.5')
setup(
name="Power",
version="1.1",
description="Cross-platform system power status information.",
author="Ilya Kulakov",
author_email="kulakov.ilya@gmail.com",
url="https://github.com/Kentzo/Power",
platforms=["Mac OS X 10.6+", "Windows XP+", "Linux 2.6+"],
packages=['power'],
classifiers=[
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Topic :: System :: Power (UPS)',
'Topic :: System :: Hardware',
'Topic :: System :: Monitoring'
],
install_requires=REQUIREMENTS
)
|
#!/usr/bin/env python
# coding=utf-8
__author__ = 'kulakov.ilya@gmail.com'
from setuptools import setup
from sys import platform
REQUIREMENTS = []
# Not avaialable at PyPi yet
# if platform.startswith('darwin'):
# REQUIREMENTS.append('pyobjc >= 2.5')
setup(
name="Power",
version="1.1",
description="Cross-platform system power status information.",
author="Ilya Kulakov",
author_email="kulakov.ilya@gmail.com",
url="https://github.com/Kentzo/Power",
platforms=["Mac OS X 10.6+", "Windows XP+", "Linux 2.6+"],
packages=['power'],
classifiers=[
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Topic :: System :: Power (UPS)',
'Topic :: System :: Hardware',
'Topic :: System :: Monitoring'
],
install_requires=REQUIREMENTS
)
|
Remove PyObjC requirement since it's not avialable at PyPi yet.
|
Remove PyObjC requirement since it's not avialable at PyPi yet.
|
Python
|
mit
|
Kentzo/Power
|
---
+++
@@ -9,8 +9,9 @@
REQUIREMENTS = []
-if platform.startswith('darwin'):
- REQUIREMENTS.append('pyobjc >= 2.5')
+# Not avaialable at PyPi yet
+# if platform.startswith('darwin'):
+ # REQUIREMENTS.append('pyobjc >= 2.5')
setup(
|
323498419bfe080c9807586c2e5cc2678b7d2fd4
|
setup.py
|
setup.py
|
from setuptools import setup
from os import path
readme = open(path.join(path.abspath(path.dirname(__file__)), 'README.md')).read()
setup(
name='bottle-websocket',
version='0.2',
author='Zach Kelling',
author_email='zeekayy@gmail.com',
packages=['bottle_websocket',],
description='WebSockets for bottle',
long_description=readme,
install_requires=['bottle', 'gevent-websocket'],
)
|
from setuptools import setup
setup(
name='bottle-websocket',
version='0.2.5',
author='Zach Kelling',
author_email='zeekayy@gmail.com',
packages=['bottle_websocket',],
package_data={'': ['README.md']},
description='WebSockets for bottle',
long_description=open('README.md'),
install_requires=['bottle', 'gevent-websocket'],
)
|
Use package_data, and bump version.
|
Use package_data, and bump version.
|
Python
|
mit
|
xujun10110/bottle-websocket,zeekay/bottle-websocket
|
---
+++
@@ -1,15 +1,13 @@
from setuptools import setup
-from os import path
-
-readme = open(path.join(path.abspath(path.dirname(__file__)), 'README.md')).read()
setup(
name='bottle-websocket',
- version='0.2',
+ version='0.2.5',
author='Zach Kelling',
author_email='zeekayy@gmail.com',
packages=['bottle_websocket',],
+ package_data={'': ['README.md']},
description='WebSockets for bottle',
- long_description=readme,
+ long_description=open('README.md'),
install_requires=['bottle', 'gevent-websocket'],
)
|
7b825d860c84eae8b5a74ef16ae8e1da08dde888
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='synapse',
version='0.1.0',
description='Synapse Distributed Key-Value Hypergraph Analysis Framework',
author='Invisigoth Kenshoto',
author_email='invisigoth.kenshoto@gmail.com',
url='https://github.com/vertexproject/synapse',
license='Apache License 2.0',
packages=find_packages(exclude=['scripts',
]),
include_package_data=True,
install_requires=[
'tornado>=3.2.2,<5.0.0',
'pyOpenSSL>=16.2.0,<18.0.0',
'msgpack==0.5.1',
'xxhash>=1.0.1,<2.0.0',
'lmdb>=0.94,<1.0.0',
'regex>=2017.9.23',
'PyYAML>=3.12,<4.0',
],
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: Apache Software License',
'Topic :: System :: Clustering',
'Topic :: System :: Distributed Computing',
'Topic :: System :: Software Distribution',
'Programming Language :: Python :: 3.6',
],
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='synapse',
version='0.1.0-alpha',
description='Synapse Distributed Key-Value Hypergraph Analysis Framework',
author='Invisigoth Kenshoto',
author_email='invisigoth.kenshoto@gmail.com',
url='https://github.com/vertexproject/synapse',
license='Apache License 2.0',
packages=find_packages(exclude=['scripts',
]),
include_package_data=True,
install_requires=[
'tornado>=3.2.2,<5.0.0',
'pyOpenSSL>=16.2.0,<18.0.0',
'msgpack==0.5.1',
'xxhash>=1.0.1,<2.0.0',
'lmdb>=0.94,<1.0.0',
'regex>=2017.9.23',
'PyYAML>=3.12,<4.0',
],
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: Apache Software License',
'Topic :: System :: Clustering',
'Topic :: System :: Distributed Computing',
'Topic :: System :: Software Distribution',
'Programming Language :: Python :: 3.6',
],
)
|
Add -alpha prerelease version to version.
|
Add -alpha prerelease version to version.
|
Python
|
apache-2.0
|
vertexproject/synapse,vertexproject/synapse,vertexproject/synapse
|
---
+++
@@ -4,7 +4,7 @@
setup(
name='synapse',
- version='0.1.0',
+ version='0.1.0-alpha',
description='Synapse Distributed Key-Value Hypergraph Analysis Framework',
author='Invisigoth Kenshoto',
author_email='invisigoth.kenshoto@gmail.com',
|
8a97210aa5d83f6eac266a19fbad9a2159b14328
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist bdist_wininst upload -r pypi')
sys.exit()
with open('README.rst') as f:
readme = f.read()
with open('LICENSE') as f:
license = f.read()
setup(
name='django-rest-surveys',
version='0.1.0',
description='A RESTful backend for giving surveys.',
long_description=readme,
author='Designlab',
author_email='hello@trydesignlab.com',
url='https://github.com/danxshap/django-rest-surveys',
packages=['rest_surveys'],
package_data={'': ['LICENSE']},
package_dir={'rest_surveys': 'rest_surveys'},
install_requires=['Django', 'django-inline-ordering'],
license=license,
)
|
#!/usr/bin/env python
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist bdist_wininst upload -r pypi')
sys.exit()
with open('README.rst') as f:
readme = f.read()
with open('LICENSE') as f:
license = f.read()
setup(
name='django-rest-surveys',
version='0.1.0',
description='A RESTful backend for giving surveys.',
long_description=readme,
author='Designlab',
author_email='hello@trydesignlab.com',
url='https://github.com/danxshap/django-rest-surveys',
packages=['rest_surveys'],
package_data={'': ['LICENSE']},
package_dir={'rest_surveys': 'rest_surveys'},
install_requires=['Django>=1.7', 'djangorestframework>=3.0', 'django-inline-ordering'],
license=license,
)
|
Set the required versions of required packages
|
Set the required versions of required packages
|
Python
|
mit
|
danxshap/django-rest-surveys
|
---
+++
@@ -28,6 +28,6 @@
packages=['rest_surveys'],
package_data={'': ['LICENSE']},
package_dir={'rest_surveys': 'rest_surveys'},
- install_requires=['Django', 'django-inline-ordering'],
+ install_requires=['Django>=1.7', 'djangorestframework>=3.0', 'django-inline-ordering'],
license=license,
)
|
eb339f661aec7c01a727a01fd8b9e2880d99cc9c
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
# Get version string
with open('gdx2py/version.py') as f: exec(f.read())
setup(name='GDX2py',
version=__version__, # pylint: disable=undefined-variable
author='Erkka Rinne',
author_email='erkka.rinne@vtt.fi',
description='Read and write GAMS Data eXchange (GDX) files using Python',
python_requires='>=3.6',
install_requires=[
'gdxcc>=7',
],
setup_requires=['pytest-runner'],
tests_require=['pytest', 'pytest-datadir'],
url='https://github.com/ererkka/GDX2py',
packages=find_packages(exclude=['tests']),
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
# Get version string
with open('gdx2py/version.py') as f: exec(f.read())
setup(name='GDX2py',
version=__version__, # pylint: disable=undefined-variable
author='Erkka Rinne',
author_email='erkka.rinne@vtt.fi',
description='Read and write GAMS Data eXchange (GDX) files using Python',
python_requires='>=3.6',
install_requires=[
'gdxcc>=7',
],
setup_requires=['pytest-runner'],
tests_require=['pytest', 'pytest-datadir'],
url='https://github.com/ererkka/GDX2py',
packages=find_packages(exclude=['tests']),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Scientific/Engineering",
"Intended Audience :: Developers",
"Development Status :: 4 - Beta"
],
)
|
Add classifiers to package info
|
Add classifiers to package info
|
Python
|
mit
|
ererkka/GDX2py
|
---
+++
@@ -18,4 +18,12 @@
tests_require=['pytest', 'pytest-datadir'],
url='https://github.com/ererkka/GDX2py',
packages=find_packages(exclude=['tests']),
+ classifiers=[
+ "Programming Language :: Python :: 3",
+ "License :: OSI Approved :: MIT License",
+ "Operating System :: OS Independent",
+ "Topic :: Scientific/Engineering",
+ "Intended Audience :: Developers",
+ "Development Status :: 4 - Beta"
+ ],
)
|
170317364ae1ee97bad94fb9d53187ce674b5ebb
|
setup.py
|
setup.py
|
from os.path import isdir, isfile, join
from distutils import log
from setuptools import setup, find_packages
INSTALL_REQUIRES = [
'BTrees',
'zope.component',
'zodbpickle',
'ZODB',
'zope.index',
'zerodbext.catalog',
'cachetools',
'zc.zlibstorage',
'pycryptodome',
'flask-cors',
'flask',
'requests',
'jsonpickle',
'pyelliptic',
'ecdsa',
'zope.event',
'zope.lifecycleevent',
'ZEO',
'six'
]
# This is to avoid build errors on brand new Amazon Ubuntu instances
# which may not have libffi-dev installed.
#
# If we have neither cffi nor ffi.h we fall back to pycryptodome.
# Note that the warning is only visible if pip is run with -v.
def have_cffi():
try:
import cffi
except ImportError:
return False
else:
return True
def have_ffi_h():
include_dirs = ["/usr/include", "/usr/local/include"]
for dir in include_dirs:
if isdir(dir):
if isfile(join(dir, "ffi.h")) or isfile(join(dir, "ffi", "ffi.h")):
return True
return False
if have_cffi() or have_ffi_h():
INSTALL_REQUIRES.append("aes256gcm-nacl")
else:
log.warn("warning: *** ffi.h not found - aes256gcm-nacl optimization disabled ***")
INSTALL_REQUIRES.append("pycryptodome")
setup(
name="zerodb",
version="0.97.2.1",
description="End-to-end encrypted database",
author="ZeroDB Inc.",
author_email="michael@zerodb.io",
license="AGPLv3",
url="http://zerodb.io",
packages=find_packages(),
install_requires=INSTALL_REQUIRES,
)
|
from setuptools import setup, find_packages
INSTALL_REQUIRES = [
'BTrees',
'zope.component',
'zodbpickle',
'ZODB',
'zope.index',
'zerodbext.catalog',
'cachetools',
'zc.zlibstorage',
'pycryptodome',
'flask-cors',
'flask',
'requests',
'jsonpickle',
'pyelliptic',
'ecdsa',
'zope.event',
'zope.lifecycleevent',
'aes256gcm-nacl',
'ZEO',
'six'
]
setup(
name="zerodb",
version="0.97.2.1",
description="End-to-end encrypted database",
author="ZeroDB Inc.",
author_email="michael@zerodb.io",
license="AGPLv3",
url="http://zerodb.io",
packages=find_packages(),
install_requires=INSTALL_REQUIRES,
)
|
Revert "Fall back to pycryptodome if aes256gcm-nacl cannot be built."
|
Revert "Fall back to pycryptodome if aes256gcm-nacl cannot be built."
This reverts commit f457b06dd1dd9d3b1d3577b5a8415868ff0cb3d8.
|
Python
|
agpl-3.0
|
zerodb/zerodb,zero-db/zerodb,zerodb/zerodb,zero-db/zerodb
|
---
+++
@@ -1,6 +1,3 @@
-from os.path import isdir, isfile, join
-from distutils import log
-
from setuptools import setup, find_packages
INSTALL_REQUIRES = [
@@ -21,39 +18,10 @@
'ecdsa',
'zope.event',
'zope.lifecycleevent',
+ 'aes256gcm-nacl',
'ZEO',
'six'
]
-
-
-# This is to avoid build errors on brand new Amazon Ubuntu instances
-# which may not have libffi-dev installed.
-#
-# If we have neither cffi nor ffi.h we fall back to pycryptodome.
-# Note that the warning is only visible if pip is run with -v.
-
-def have_cffi():
- try:
- import cffi
- except ImportError:
- return False
- else:
- return True
-
-def have_ffi_h():
- include_dirs = ["/usr/include", "/usr/local/include"]
- for dir in include_dirs:
- if isdir(dir):
- if isfile(join(dir, "ffi.h")) or isfile(join(dir, "ffi", "ffi.h")):
- return True
- return False
-
-if have_cffi() or have_ffi_h():
- INSTALL_REQUIRES.append("aes256gcm-nacl")
-else:
- log.warn("warning: *** ffi.h not found - aes256gcm-nacl optimization disabled ***")
- INSTALL_REQUIRES.append("pycryptodome")
-
setup(
name="zerodb",
|
d9771c3c0f78b55de4ae00be4553975c2f934c12
|
setup.py
|
setup.py
|
import os
import re
import sys
import platform
import subprocess
import multiprocessing
from setuptools import setup, Extension
from setuptools.command.build_ext import build_ext
from distutils.version import LooseVersion
cores = multiprocessing.cpu_count()*1.25
threads="-j" + str(int(cores))
class CMakeExtension(Extension):
def __init__(self, name, sourcedir=''):
Extension.__init__(self, name, sources=[])
self.sourcedir = os.path.abspath(sourcedir)
def readme():
with open("PYTHONREADME.md", "r") as fh:
return fh.read()
def operatingsystem():
if (platform.platform().find("Darwin") >= 0):
return "Operating System :: MacOS"
else:
return "Operating System :: POSIX :: Linux"
setup(
name='sharkbite',
version='1.2.0.0',
author='Marc Parisi',
author_email='phrocker@apache.org',
url='https://docs.sharkbite.io/',
description='Apache Accumulo and Apache HDFS Python Connector',
long_description=readme(),
long_description_content_type='text/markdown',
ext_modules=[CMakeExtension('sharkbite.pysharkbite')],
zip_safe=False,
classifiers=[
"Programming Language :: C++",
"License :: OSI Approved :: Apache Software License",
operatingsystem(),
],
python_requires='>=3.6',
packages=['sharkbite']
)
|
import os
import re
import sys
import platform
import subprocess
import multiprocessing
from setuptools import setup, Extension
from setuptools.command.build_ext import build_ext
from distutils.version import LooseVersion
cores = multiprocessing.cpu_count()*1.25
threads="-j" + str(int(cores))
class CMakeExtension(Extension):
def __init__(self, name, sourcedir=''):
Extension.__init__(self, name, sources=[])
self.sourcedir = os.path.abspath(sourcedir)
def readme():
with open("PYTHONREADME.md", "r") as fh:
return fh.read()
def operatingsystem():
if (platform.platform().find("Darwin") >= 0):
return "Operating System :: MacOS"
else:
return "Operating System :: POSIX :: Linux"
setup(
name='sharkbite',
version='1.2.0.1',
author='Marc Parisi',
author_email='phrocker@apache.org',
url='https://docs.sharkbite.io/',
description='Apache Accumulo and Apache HDFS Python Connector',
long_description=readme(),
long_description_content_type='text/markdown',
ext_modules=[CMakeExtension('sharkbite.pysharkbite')],
zip_safe=False,
classifiers=[
"Programming Language :: C++",
"License :: OSI Approved :: Apache Software License",
operatingsystem(),
],
python_requires='>=3.6',
packages=['sharkbite']
)
|
Update error in 3.7 release
|
Update error in 3.7 release
|
Python
|
apache-2.0
|
phrocker/sharkbite,phrocker/sharkbite,phrocker/sharkbite,phrocker/sharkbite,phrocker/sharkbite
|
---
+++
@@ -32,7 +32,7 @@
setup(
name='sharkbite',
- version='1.2.0.0',
+ version='1.2.0.1',
author='Marc Parisi',
author_email='phrocker@apache.org',
url='https://docs.sharkbite.io/',
|
13ef6879aeca9881483bd9f575d66377f1dde0c1
|
tests/test_io.py
|
tests/test_io.py
|
import numpy as np
from tempfile import NamedTemporaryFile
from microscopium import io as mio
def test_imsave_tif_compress():
im = np.random.randint(0, 256, size=(1024, 1024, 3)).astype(np.uint8)
with NamedTemporaryFile(suffix='.tif') as fout:
fname = fout.name
fout.close()
mio.imsave(im, fname, compress=2)
imin = mio.imread(fname)
np.testing.assert_array_equal(im, imin)
|
import os
import numpy as np
from tempfile import NamedTemporaryFile
from microscopium import io as mio
from microscopium import pathutils as pth
def test_recursive_glob():
abspath = os.path.dirname(__file__)
tiffs0 = pth.all_matching_files(abspath, '*.tif')
assert len(tiffs0) == 8
assert tiffs0[0].startswith('/')
tiffs1 = pth.all_matching_files(abspath, '*.TIF')
assert len(tiffs1) == 0
tiffs2 = pth.all_matching_files(abspath, '*.TIF', case_sensitive=False,
full=False)
assert len(tiffs2) == 8
assert tiffs2[0].startswith('MYORES')
def test_imsave_tif_compress():
im = np.random.randint(0, 256, size=(1024, 1024, 3)).astype(np.uint8)
with NamedTemporaryFile(suffix='.tif') as fout:
fname = fout.name
fout.close()
mio.imsave(im, fname, compress=2)
imin = mio.imread(fname)
np.testing.assert_array_equal(im, imin)
|
Improve test coverage by testing recursive glob
|
Improve test coverage by testing recursive glob
|
Python
|
bsd-3-clause
|
jni/microscopium,microscopium/microscopium,Don86/microscopium,microscopium/microscopium,jni/microscopium,Don86/microscopium
|
---
+++
@@ -1,6 +1,22 @@
+import os
import numpy as np
from tempfile import NamedTemporaryFile
from microscopium import io as mio
+from microscopium import pathutils as pth
+
+
+def test_recursive_glob():
+ abspath = os.path.dirname(__file__)
+ tiffs0 = pth.all_matching_files(abspath, '*.tif')
+ assert len(tiffs0) == 8
+ assert tiffs0[0].startswith('/')
+ tiffs1 = pth.all_matching_files(abspath, '*.TIF')
+ assert len(tiffs1) == 0
+ tiffs2 = pth.all_matching_files(abspath, '*.TIF', case_sensitive=False,
+ full=False)
+ assert len(tiffs2) == 8
+ assert tiffs2[0].startswith('MYORES')
+
def test_imsave_tif_compress():
im = np.random.randint(0, 256, size=(1024, 1024, 3)).astype(np.uint8)
|
cddb15f25df404d849601e7c8db2df15d2443958
|
intellipaste.py
|
intellipaste.py
|
#!/usr/bin/env python3
import os
import time
import json
import requests
import pyperclip
API_KEY = os.environ.get('API_KEY')
# API_KEY = ""
def google_url_shorten(url):
req_url = "https://www.googleapis.com/urlshortener/v1/url?" + API_KEY
payload = {'longUrl': url}
headers = {'content-type': 'application/json'}
r = requests.post(req_url, data=json.dumps(payload), headers=headers)
resp = json.loads(r.text)
return resp["id"]
recent_value = ""
while True:
tmp_value = pyperclip.paste()
if (tmp_value != recent_value and not tmp_value.startswith("https://goo.gl") and not tmp_value.startswith("https://git")):
recent_value = tmp_value
url = str(recent_value)
if url.startswith("http://") or url.startswith("https://") or url.startswith("www."):
pyperclip.copy(google_url_shorten(url))
time.sleep(0.5)
|
#!/usr/bin/env python3
import os
import time
import json
import requests
import pyperclip
API_KEY = os.environ.get('API_KEY')
# API_KEY = ""
def google_url_shorten(url):
req_url = "https://www.googleapis.com/urlshortener/v1/url?" + str(API_KEY)
payload = {'longUrl': url}
headers = {'content-type': 'application/json'}
r = requests.post(req_url, data=json.dumps(payload), headers=headers)
resp = json.loads(r.text)
return resp["id"]
recent_value = ""
while True:
tmp_value = pyperclip.paste()
if (tmp_value != recent_value and not tmp_value.startswith("https://goo.gl") and not tmp_value.startswith("https://git")):
recent_value = tmp_value
url = str(recent_value)
if url.startswith("http://") or url.startswith("https://") or url.startswith("www."):
pyperclip.copy(google_url_shorten(url))
time.sleep(0.5)
|
Convert environment variable to string
|
Convert environment variable to string
|
Python
|
apache-2.0
|
LucasHMS/intelliPaste
|
---
+++
@@ -11,7 +11,7 @@
def google_url_shorten(url):
- req_url = "https://www.googleapis.com/urlshortener/v1/url?" + API_KEY
+ req_url = "https://www.googleapis.com/urlshortener/v1/url?" + str(API_KEY)
payload = {'longUrl': url}
headers = {'content-type': 'application/json'}
r = requests.post(req_url, data=json.dumps(payload), headers=headers)
|
11c1f5a9806f4a21abdb5ac7e4310ef3242389b0
|
server_dev.py
|
server_dev.py
|
from projects_controller import ProjectsController
from redirects_controller import RedirectsController
from flask import Flask, render_template, redirect, abort
DATA_DIR = 'data'
app = Flask(__name__)
app.url_map.strict_slashes = False
projects_controller = ProjectsController(DATA_DIR)
redirects_controller = RedirectsController(DATA_DIR)
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
@app.route('/')
def index():
projects = projects_controller.get_current_projects()
return render_template('index.html', projects=projects)
@app.route('/start')
def start_project():
return render_template('start_project.html')
@app.route('/<dynamic>')
def project(dynamic):
# First, test if if it's a project
projects = projects_controller.get_all_projects()
redirects = redirects_controller.get_redirects()
if dynamic in projects:
project_data = projects[dynamic]
if 'conclusion_post' in project_data:
# The project is over, we should redirect to the post
return redirect(project_data['conclusion_post'])
else:
return render_template('project.html', project_data=project_data)
# Next, check if it's a redirect
elif dynamic in redirects:
return redirect(redirects[dynamic])
else:
abort(404)
if __name__ == '__main__':
app.run(debug=True)
|
from projects_controller import ProjectsController
from redirects_controller import RedirectsController
from flask import Flask, render_template, redirect, abort
DATA_DIR = 'data'
app = Flask(__name__)
app.url_map.strict_slashes = False
projects_controller = ProjectsController(DATA_DIR)
redirects_controller = RedirectsController(DATA_DIR)
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
@app.route('/')
def index():
projects = projects_controller.get_current_projects()
return render_template('index.html', projects=projects)
@app.route('/start')
def start_project():
return render_template('start_project.html')
@app.route('/<dynamic>')
def project(dynamic):
projects = projects_controller.get_all_projects()
redirects = redirects_controller.get_redirects()
# First, test if if it's a project
if dynamic in projects:
project_data = projects[dynamic]
if 'conclusion_post' in project_data:
# The project is over, we should redirect to the post
return redirect(project_data['conclusion_post'])
else:
return render_template('project.html', project_data=project_data)
# Next, check if it's a redirect
elif dynamic in redirects:
return redirect(redirects[dynamic])
else:
abort(404)
if __name__ == '__main__':
app.run(debug=True)
|
Move comment to rational location
|
Move comment to rational location
|
Python
|
mit
|
teslaworksumn/teslaworks.net,teslaworksumn/teslaworks.net
|
---
+++
@@ -28,10 +28,10 @@
@app.route('/<dynamic>')
def project(dynamic):
- # First, test if if it's a project
projects = projects_controller.get_all_projects()
redirects = redirects_controller.get_redirects()
+ # First, test if if it's a project
if dynamic in projects:
project_data = projects[dynamic]
if 'conclusion_post' in project_data:
|
97edbee5813b8a87606b8fb3d09b4f116cdaf025
|
mordecai/tests/conftest.py
|
mordecai/tests/conftest.py
|
from ..geoparse import Geoparser
import pytest
import spacy
nlp = spacy.load('en_core_web_lg', disable=['parser', 'tagger'])
@pytest.fixture(scope='session', autouse=True)
def geo():
return Geoparser(nlp=nlp, threads=False, models_path = "/Users/ahalterman/MIT/Geolocation/mordecai_new/mordecai/mordecai/models/")
@pytest.fixture(scope='session', autouse=True)
def geo_thread():
return Geoparser(nlp=nlp, threads=True, models_path = "/Users/ahalterman/MIT/Geolocation/mordecai_new/mordecai/mordecai/models/")
|
from ..geoparse import Geoparser
import pytest
import spacy
nlp = spacy.load('en_core_web_lg', disable=['parser', 'tagger'])
@pytest.fixture(scope='session', autouse=True)
def geo():
return Geoparser(nlp=nlp, threads=False)
@pytest.fixture(scope='session', autouse=True)
def geo_thread():
return Geoparser(nlp=nlp, threads=True)
|
Remove hardcoded paths from testing
|
Remove hardcoded paths from testing
|
Python
|
mit
|
openeventdata/mordecai
|
---
+++
@@ -6,8 +6,8 @@
@pytest.fixture(scope='session', autouse=True)
def geo():
- return Geoparser(nlp=nlp, threads=False, models_path = "/Users/ahalterman/MIT/Geolocation/mordecai_new/mordecai/mordecai/models/")
+ return Geoparser(nlp=nlp, threads=False)
@pytest.fixture(scope='session', autouse=True)
def geo_thread():
- return Geoparser(nlp=nlp, threads=True, models_path = "/Users/ahalterman/MIT/Geolocation/mordecai_new/mordecai/mordecai/models/")
+ return Geoparser(nlp=nlp, threads=True)
|
37aa1c9f8faeefe7305cca526a7424a349939add
|
tests/smoke_test.py
|
tests/smoke_test.py
|
# -*- coding: utf-8 -*-
import unittest
import sys
sys.path.insert(0, '../mafia')
from game import Game
from game import Player
from testclient.testmessenger import TestMessenger
class SmokeTest(unittest.TestCase):
def setUp(self):
self.messenger = TestMessenger()
def test_smoke_test(self):
game = Game('t,c,c,m', self.messenger)
player_one = Player('one', 'one')
player_two = Player('two', 'two')
player_three = Player('three', 'three')
player_four = Player('four', 'four')
game.join(player_one)
game.join(player_two)
game.join(player_three)
game.join(player_four)
game.vote('one', 'three')
game.vote('three', 'one')
game.vote('two', 'three')
game.vote('four', 'three')
game.target('one', 'two')
game.target('two', 'one')
game.target('four', 'one')
if __name__ == '__main__':
unittest.main()
|
# -*- coding: utf-8 -*-
import unittest
import sys
sys.path.insert(0, '../')
from mafia.game import Game
from mafia.game import Player
from testclient.testmessenger import TestMessenger
class SmokeTest(unittest.TestCase):
def setUp(self):
self.messenger = TestMessenger()
def test_smoke_test(self):
game = Game('t,c,c,m', self.messenger)
player_one = Player('one', 'one')
player_two = Player('two', 'two')
player_three = Player('three', 'three')
player_four = Player('four', 'four')
game.join(player_one)
game.join(player_two)
game.join(player_three)
game.join(player_four)
game.vote('one', 'three')
game.vote('three', 'one')
game.vote('two', 'three')
game.vote('four', 'three')
game.target('one', 'two')
game.target('two', 'one')
game.target('four', 'one')
if __name__ == '__main__':
unittest.main()
|
Change the smoke test imports to a relative import for consistency.
|
Change the smoke test imports to a relative import for consistency.
|
Python
|
mit
|
BitokuOokami/PloungeMafiaToolkit
|
---
+++
@@ -2,10 +2,10 @@
import unittest
import sys
-sys.path.insert(0, '../mafia')
+sys.path.insert(0, '../')
-from game import Game
-from game import Player
+from mafia.game import Game
+from mafia.game import Player
from testclient.testmessenger import TestMessenger
|
11d19d1756f6227db894aabcf4bd02e327e292c7
|
tests/test_basic.py
|
tests/test_basic.py
|
from hello_world import hello_world
from unittest import TestCase
class BasicTest(TestCase):
def test_basic_hello_world(self):
"""
Test basic hello world messaging
"""
False
|
from hello_world import hello_world
from unittest import TestCase
class BasicTest(TestCase):
def test_basic_hello_world(self):
"""
Test basic hello world messaging
"""
self.assertTrue(callable(hello_world))
|
Make things a little better
|
Make things a little better
|
Python
|
mit
|
jeansaad/hello_world
|
---
+++
@@ -8,4 +8,4 @@
"""
Test basic hello world messaging
"""
- False
+ self.assertTrue(callable(hello_world))
|
f23d90d441194d270b3bcf7997550f2f8e7e4c1d
|
sample-functions/BaseFunctions/python/handler.py
|
sample-functions/BaseFunctions/python/handler.py
|
import sys
def get_stdin():
buf = ""
for line in sys.stdin:
buf = buf + line
return buf
if(__name__ == "__main__"):
st = get_stdin()
print(st)
|
import sys
def get_stdin():
buf = ""
for line in sys.stdin:
buf = buf + line
return buf
if __name__ == "__main__":
st = get_stdin()
print(st)
|
Remove braces for Python sample
|
Remove braces for Python sample
Suggested by community member in an un-merged PR.
Signed-off-by: Alex Ellis (VMware) <5f7133baa0f5c7ca63ff11e11f2e2b0d2cf077c8@gmail.com>
|
Python
|
mit
|
openfaas/faas,rgee0/faas,alexellis/faas,openfaas/faas,alexellis/faas,rgee0/faas,alexellis/faas,alexellis/faas,rgee0/faas,rgee0/faas,openfaas/faas,rgee0/faas,rgee0/faas,rgee0/faas,alexellis/faas,rgee0/faas,alexellis/faas,rgee0/faas,alexellis/faas,alexellis/faas,alexellis/faas,rgee0/faas
|
---
+++
@@ -6,6 +6,6 @@
buf = buf + line
return buf
-if(__name__ == "__main__"):
+if __name__ == "__main__":
st = get_stdin()
print(st)
|
acd92d6a9e8c710657a4bcf1c46076f9d06f3d46
|
test_results/plot_all.py
|
test_results/plot_all.py
|
import glob
import csv
import numpy as np
import matplotlib.pyplot as plt
for file in glob.glob("*.csv"):
data = np.genfromtxt(file, delimiter = ',', names = True)
plt.figure(figsize=(10,20))
plt.suptitle(file)
num_plots = len(data.dtype.names)
count = 1
for col_name in data.dtype.names:
plt.subplot(num_plots, 1, count)
plt.plot(data[col_name], label=col_name)
plt.legend()
count += 1
ymin, ymax = plt.ylim()
if ymin < 0 < ymax:
plt.axhline(0, hold=True, color = 'grey') # plot line through zero
mng = plt.get_current_fig_manager()
if plt.get_backend() == 'TkAgg':
mng.window.state('zoomed')
elif plt.get_backend() == 'wxAgg':
mng.frame.Maximize(True)
elif plt.get_backend() == 'QT4Agg':
mng.window.showMaximized()
plt.savefig(file.rstrip('.csv') + '.pdf')
plt.show()
|
import glob
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.backends.backend_pdf
pdf = matplotlib.backends.backend_pdf.PdfPages("test-results-plots.pdf")
for file in glob.glob("*.csv"):
data = np.genfromtxt(file, delimiter = ',', names = True)
plt.figure(figsize=(10,20))
plt.suptitle(file)
num_plots = len(data.dtype.names)
count = 1
for col_name in data.dtype.names:
plt.subplot(num_plots, 1, count)
plt.plot(data[col_name], label=col_name)
plt.legend()
count += 1
ymin, ymax = plt.ylim()
if ymin < 0 < ymax:
plt.axhline(0, hold=True, color = 'grey') # plot line through zero
pdf.savefig()
mng = plt.get_current_fig_manager()
if plt.get_backend() == 'TkAgg':
mng.window.state('zoomed')
elif plt.get_backend() == 'wxAgg':
mng.frame.Maximize(True)
elif plt.get_backend() == 'QT4Agg':
mng.window.showMaximized()
plt.show()
plt.close()
pdf.close()
|
Save all simulation plots to one PDF instead of multiple
|
Save all simulation plots to one PDF instead of multiple
|
Python
|
agpl-3.0
|
BrewPi/firmware,glibersat/firmware,glibersat/firmware,BrewPi/firmware,glibersat/firmware,etk29321/firmware,etk29321/firmware,etk29321/firmware,etk29321/firmware,glibersat/firmware,glibersat/firmware,BrewPi/firmware,BrewPi/firmware,BrewPi/firmware,BrewPi/firmware,BrewPi/firmware,etk29321/firmware,BrewPi/firmware,etk29321/firmware,glibersat/firmware,glibersat/firmware
|
---
+++
@@ -1,7 +1,8 @@
import glob
-import csv
import numpy as np
import matplotlib.pyplot as plt
+import matplotlib.backends.backend_pdf
+pdf = matplotlib.backends.backend_pdf.PdfPages("test-results-plots.pdf")
for file in glob.glob("*.csv"):
data = np.genfromtxt(file, delimiter = ',', names = True)
@@ -21,6 +22,8 @@
if ymin < 0 < ymax:
plt.axhline(0, hold=True, color = 'grey') # plot line through zero
+ pdf.savefig()
+
mng = plt.get_current_fig_manager()
if plt.get_backend() == 'TkAgg':
mng.window.state('zoomed')
@@ -29,8 +32,12 @@
elif plt.get_backend() == 'QT4Agg':
mng.window.showMaximized()
- plt.savefig(file.rstrip('.csv') + '.pdf')
+
plt.show()
+ plt.close()
+
+pdf.close()
+
|
cc8f5b35d4c227f82b2872d5bfad24bef37209e5
|
overtime_calculator/__main__.py
|
overtime_calculator/__main__.py
|
import hug
from overtime_calculator import auth, api
@hug.get("/", output=hug.output_format.html)
def base():
return "<h1>Hello, world</h1>"
@hug.extend_api()
def with_other_apis():
return [
auth,
api
]
|
import sys
import pathlib
import hug
from overtime_calculator import auth, api
@hug.get("/", output=hug.output_format.html)
def base():
return "<h1>Hello, world</h1>"
@hug.extend_api()
def with_other_apis():
return [
auth,
api
]
if __name__ == '__main__':
_file = pathlib.Path(sys.argv[0])
module = _file.parent.name
print(
f"Start {module} with Hug, like so: hug --file {_file}",
file=sys.stderr,
)
sys.exit(1)
|
Improve UX for those who use CLI
|
Enhancement: Improve UX for those who use CLI
|
Python
|
mit
|
x10an14/overtime-calculator
|
---
+++
@@ -1,3 +1,6 @@
+import sys
+import pathlib
+
import hug
from overtime_calculator import auth, api
@@ -14,3 +17,12 @@
auth,
api
]
+
+if __name__ == '__main__':
+ _file = pathlib.Path(sys.argv[0])
+ module = _file.parent.name
+ print(
+ f"Start {module} with Hug, like so: hug --file {_file}",
+ file=sys.stderr,
+ )
+ sys.exit(1)
|
f3c99d8a8a9d485ebc9a18419a142f03d4730fba
|
examples/guv_simple_http_response.py
|
examples/guv_simple_http_response.py
|
# FIXME: pyuv_cffi needs to build the library BEFORE the standard library is patched
import pyuv_cffi
print('pyuv_cffi imported', pyuv_cffi)
import guv
guv.monkey_patch()
import guv.server
import logging
import time
from util import create_example
import logger
if not hasattr(time, 'perf_counter'):
time.perf_counter = time.clock
logger.configure()
log = logging.getLogger()
response_times = []
def get_avg_time():
global response_times
times = response_times[-1000:]
avg = sum(times) / len(times)
if len(response_times) > 5000:
response_times = times
return avg
def handle(sock, addr):
# client connected
start_time = time.perf_counter()
sock.sendall(create_example())
sock.close()
total_time = time.perf_counter() - start_time
response_times.append(total_time)
if __name__ == '__main__':
pool = guv.GreenPool()
try:
log.debug('Start')
server_sock = guv.listen(('0.0.0.0', 8001))
server = guv.server.Server(server_sock, handle, pool, 'spawn_n')
server.start()
except (SystemExit, KeyboardInterrupt):
log.debug('average response time: {}'.format(get_avg_time()))
log.debug('Bye!')
|
import guv
guv.monkey_patch()
import guv.server
import logging
import time
from util import create_example
import logger
from pympler import tracker
tr = tracker.SummaryTracker()
if not hasattr(time, 'perf_counter'):
time.perf_counter = time.clock
logger.configure()
log = logging.getLogger()
response_times = []
def get_avg_time():
global response_times
times = response_times[-1000:]
avg = sum(times) / len(times)
if len(response_times) > 5000:
response_times = times
return avg
def handle(sock, addr):
# client connected
start_time = time.perf_counter()
sock.sendall(create_example())
sock.close()
total_time = time.perf_counter() - start_time
response_times.append(total_time)
if __name__ == '__main__':
pool = guv.GreenPool()
try:
log.debug('Start')
server_sock = guv.listen(('0.0.0.0', 8001))
server = guv.server.Server(server_sock, handle, pool, 'spawn_n')
server.start()
except (SystemExit, KeyboardInterrupt):
tr.print_diff()
log.debug('Bye!')
|
Use pympler to look for memory leaks
|
Use pympler to look for memory leaks
|
Python
|
mit
|
veegee/guv,veegee/guv
|
---
+++
@@ -1,7 +1,3 @@
-# FIXME: pyuv_cffi needs to build the library BEFORE the standard library is patched
-import pyuv_cffi
-
-print('pyuv_cffi imported', pyuv_cffi)
import guv
guv.monkey_patch()
@@ -11,6 +7,9 @@
from util import create_example
import logger
+from pympler import tracker
+
+tr = tracker.SummaryTracker()
if not hasattr(time, 'perf_counter'):
time.perf_counter = time.clock
@@ -51,5 +50,5 @@
server = guv.server.Server(server_sock, handle, pool, 'spawn_n')
server.start()
except (SystemExit, KeyboardInterrupt):
- log.debug('average response time: {}'.format(get_avg_time()))
+ tr.print_diff()
log.debug('Bye!')
|
e68d0b269b5c632bb96cdc04f37b622b15a0382e
|
dataproc/dataproc_e2e_test.py
|
dataproc/dataproc_e2e_test.py
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Integration tests for Dataproc samples.
Creates a Dataproc cluster, uploads a pyspark file to Google Cloud Storage,
submits a job to Dataproc that runs the pyspark file, then downloads
the output logs from Cloud Storage and verifies the expected output."""
import create_cluster_and_submit_job
from gcp.testing.flaky import flaky
CLUSTER_NAME = 'testcluster2'
ZONE = 'us-central1-b'
@flaky
def test_e2e(cloud_config):
output = create_cluster_and_submit_job.main(
cloud_config.project, ZONE, CLUSTER_NAME, cloud_config.storage_bucket)
assert "['Hello,', 'dog', 'elephant', 'panther', 'world!']" in output
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Integration tests for Dataproc samples.
Creates a Dataproc cluster, uploads a pyspark file to Google Cloud Storage,
submits a job to Dataproc that runs the pyspark file, then downloads
the output logs from Cloud Storage and verifies the expected output."""
import create_cluster_and_submit_job
from gcp.testing.flaky import flaky
CLUSTER_NAME = 'testcluster2'
ZONE = 'us-central1-b'
@flaky
def test_e2e(cloud_config):
output = create_cluster_and_submit_job.main(
cloud_config.project, ZONE, CLUSTER_NAME, cloud_config.storage_bucket)
assert b"['Hello,', 'dog', 'elephant', 'panther', 'world!']" in output
|
Fix Dataproc e2e for Python 3
|
Fix Dataproc e2e for Python 3
|
Python
|
apache-2.0
|
JavaRabbit/CS496_capstone,sharbison3/python-docs-samples,hashems/Mobile-Cloud-Development-Projects,sharbison3/python-docs-samples,GoogleCloudPlatform/python-docs-samples,BrandonY/python-docs-samples,BrandonY/python-docs-samples,BrandonY/python-docs-samples,sharbison3/python-docs-samples,sharbison3/python-docs-samples,hashems/Mobile-Cloud-Development-Projects,JavaRabbit/CS496_capstone,JavaRabbit/CS496_capstone,GoogleCloudPlatform/python-docs-samples,JavaRabbit/CS496_capstone,canglade/NLP,canglade/NLP,GoogleCloudPlatform/python-docs-samples,BrandonY/python-docs-samples,hashems/Mobile-Cloud-Development-Projects,hashems/Mobile-Cloud-Development-Projects,canglade/NLP,canglade/NLP,GoogleCloudPlatform/python-docs-samples
|
---
+++
@@ -27,4 +27,4 @@
def test_e2e(cloud_config):
output = create_cluster_and_submit_job.main(
cloud_config.project, ZONE, CLUSTER_NAME, cloud_config.storage_bucket)
- assert "['Hello,', 'dog', 'elephant', 'panther', 'world!']" in output
+ assert b"['Hello,', 'dog', 'elephant', 'panther', 'world!']" in output
|
b0b232297f55cd38db85bb2ec5b30a6022a3f4d1
|
tweepy/asynchronous/__init__.py
|
tweepy/asynchronous/__init__.py
|
# Tweepy
# Copyright 2009-2021 Joshua Roesslein
# See LICENSE for details.
"""
Tweepy.asynchronoous
Asynchronous interfaces with the Twitter API
"""
try:
import aiohttp
except ModuleNotFoundError:
from tweepy.errors import TweepyException
raise TweepyException("tweepy.asynchronous requires aiohttp to be installed")
from tweepy.asynchronous.streaming import AsyncStream
|
# Tweepy
# Copyright 2009-2021 Joshua Roesslein
# See LICENSE for details.
"""
Tweepy.asynchronoous
Asynchronous interfaces with the Twitter API
"""
try:
import aiohttp
import oauthlib
except ModuleNotFoundError:
from tweepy.errors import TweepyException
raise TweepyException(
"tweepy.asynchronous requires aiohttp and oauthlib to be installed"
)
from tweepy.asynchronous.streaming import AsyncStream
|
Check oauthlib installation when importing asynchronous subpackage
|
Check oauthlib installation when importing asynchronous subpackage
|
Python
|
mit
|
svven/tweepy,tweepy/tweepy
|
---
+++
@@ -10,8 +10,11 @@
try:
import aiohttp
+ import oauthlib
except ModuleNotFoundError:
from tweepy.errors import TweepyException
- raise TweepyException("tweepy.asynchronous requires aiohttp to be installed")
+ raise TweepyException(
+ "tweepy.asynchronous requires aiohttp and oauthlib to be installed"
+ )
from tweepy.asynchronous.streaming import AsyncStream
|
f5f850e53a889a5afe483ae2ca07e147d4a94c08
|
tests.py
|
tests.py
|
#!/usr/bin/env python
# encoding: utf-8
import datetime
import unittest
import mock
from nose.tools import assert_equal, assert_is_instance
from pandas_finance import get_stock
class PandasFinanceTestCase(unittest.TestCase):
@mock.patch('pandas_finance.web.DataReader')
def test_get_stock_called_correctly(self, mock_datareader):
mock_datareader()
start = datetime.datetime(1999, 4, 3, 0, 0)
end = datetime.datetime(2005, 2, 5, 0, 0)
get_stock('AAPL', start, end)
mock_datareader.assert_called_with('AAPL', 'yahoo', start, end)
|
#!/usr/bin/env python
# encoding: utf-8
import datetime
import unittest
import mock
from nose.tools import assert_equal, assert_is_instance
from pandas_finance import get_stock, get_required_tickers
class PandasFinanceTestCase(unittest.TestCase):
@mock.patch('pandas_finance.web.DataReader')
def test_get_stock_called_correctly(self, mock_datareader):
mock_datareader()
start = datetime.datetime(1999, 4, 3, 0, 0)
end = datetime.datetime(2005, 2, 5, 0, 0)
get_stock('AAPL', start, end)
mock_datareader.assert_called_with('AAPL', 'yahoo', start, end)
def test_get_required_tickers_parses_tickers_with_newline(self):
m = mock.mock_open(read_data='TWTR,FB,AAPL,MSFT\n')
textfile = None # only used to provide valid argument
with mock.patch('pandas_finance.open', m, create=True):
result = get_required_tickers(textfile)
assert_equal('TWTR,FB,AAPL,MSFT', result)
|
Add test for parsing tickers.
|
Add test for parsing tickers.
|
Python
|
agpl-3.0
|
scraperwiki/stock-tool,scraperwiki/stock-tool
|
---
+++
@@ -6,8 +6,7 @@
import mock
from nose.tools import assert_equal, assert_is_instance
-
-from pandas_finance import get_stock
+from pandas_finance import get_stock, get_required_tickers
class PandasFinanceTestCase(unittest.TestCase):
@mock.patch('pandas_finance.web.DataReader')
@@ -17,3 +16,10 @@
end = datetime.datetime(2005, 2, 5, 0, 0)
get_stock('AAPL', start, end)
mock_datareader.assert_called_with('AAPL', 'yahoo', start, end)
+
+ def test_get_required_tickers_parses_tickers_with_newline(self):
+ m = mock.mock_open(read_data='TWTR,FB,AAPL,MSFT\n')
+ textfile = None # only used to provide valid argument
+ with mock.patch('pandas_finance.open', m, create=True):
+ result = get_required_tickers(textfile)
+ assert_equal('TWTR,FB,AAPL,MSFT', result)
|
019f259ae42a95802dce644511399332506ad1cc
|
tracing/tracing/metrics/metric_runner.py
|
tracing/tracing/metrics/metric_runner.py
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from perf_insights import map_single_trace
from perf_insights import function_handle
from perf_insights.mre import file_handle
from perf_insights.mre import job as job_module
_METRIC_MAP_FUNCTION_FILENAME = 'metric_map_function.html'
_METRIC_MAP_FUNCTION_NAME = 'metricMapFunction'
def _GetMetricsDir():
return os.path.dirname(os.path.abspath(__file__))
def _GetMetricRunnerHandle(metric):
assert isinstance(metric, basestring)
metrics_dir = _GetMetricsDir()
metric_mapper_path = os.path.join(metrics_dir, _METRIC_MAP_FUNCTION_FILENAME)
modules_to_load = [function_handle.ModuleToLoad(filename=metric_mapper_path)]
map_function_handle = function_handle.FunctionHandle(
modules_to_load, _METRIC_MAP_FUNCTION_NAME, {'metric': metric})
return job_module.Job(map_function_handle, None)
def RunMetric(filename, metric, extra_import_options=None):
th = file_handle.URLFileHandle(filename, 'file://' + filename)
result = map_single_trace.MapSingleTrace(
th, _GetMetricRunnerHandle(metric), extra_import_options)
return result
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from perf_insights import map_single_trace
from perf_insights import function_handle
from perf_insights.mre import file_handle
from perf_insights.mre import job as job_module
_METRIC_MAP_FUNCTION_FILENAME = 'metric_map_function.html'
_METRIC_MAP_FUNCTION_NAME = 'metricMapFunction'
def _GetMetricsDir():
return os.path.dirname(os.path.abspath(__file__))
def _GetMetricRunnerHandle(metric):
assert isinstance(metric, basestring)
metrics_dir = _GetMetricsDir()
metric_mapper_path = os.path.join(metrics_dir, _METRIC_MAP_FUNCTION_FILENAME)
modules_to_load = [function_handle.ModuleToLoad(filename=metric_mapper_path)]
map_function_handle = function_handle.FunctionHandle(
modules_to_load, _METRIC_MAP_FUNCTION_NAME, {'metric': metric})
return job_module.Job(map_function_handle, None)
def RunMetric(filename, metric, extra_import_options=None):
url = 'file://' + os.path.abspath(filename)
th = file_handle.URLFileHandle(filename, url)
result = map_single_trace.MapSingleTrace(
th, _GetMetricRunnerHandle(metric), extra_import_options)
return result
|
Support relative paths in bin/run_metric
|
Support relative paths in bin/run_metric
As a result of this patch, it will be possible to run:
bin/run_metric MemoryMetric test_data/memory_dumps.json
^^^^^^^^^^^^^^^^^^^^^^^^^^^
instead of:
bin/run_metric MemoryMetric $PWD/test_data/memory_dumps.json
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Review URL: https://codereview.chromium.org/1836283008
|
Python
|
bsd-3-clause
|
catapult-project/catapult,sahiljain/catapult,benschmaus/catapult,catapult-project/catapult-csm,sahiljain/catapult,benschmaus/catapult,benschmaus/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult-csm,catapult-project/catapult-csm,sahiljain/catapult,benschmaus/catapult,catapult-project/catapult-csm,sahiljain/catapult,benschmaus/catapult,catapult-project/catapult-csm,sahiljain/catapult,catapult-project/catapult-csm,benschmaus/catapult,sahiljain/catapult,catapult-project/catapult,catapult-project/catapult-csm,catapult-project/catapult,benschmaus/catapult
|
---
+++
@@ -27,7 +27,8 @@
return job_module.Job(map_function_handle, None)
def RunMetric(filename, metric, extra_import_options=None):
- th = file_handle.URLFileHandle(filename, 'file://' + filename)
+ url = 'file://' + os.path.abspath(filename)
+ th = file_handle.URLFileHandle(filename, url)
result = map_single_trace.MapSingleTrace(
th, _GetMetricRunnerHandle(metric), extra_import_options)
|
611aee2e704ffbad8579e5005ca36232097f96c5
|
bot/utils.py
|
bot/utils.py
|
from enum import IntEnum
from discord import Embed
class OpStatus(IntEnum):
SUCCESS = 0x2ECC71,
FAILURE = 0xc0392B,
WARNING = 0xf39C12,
NONE = None
def build_embed(ctx, desc: str, title: str = '', status: OpStatus = OpStatus.SUCCESS) -> Embed:
name = ctx.message.server.me.nick if ctx.message.server.me.nick is not None else ctx.bot.user.name
embed = Embed(
title=title,
description=desc,
color=status.value if status is not None else OpStatus.WARNING
)
embed.set_author(name=name, icon_url=ctx.bot.user.avatar_url)
return embed
|
from enum import IntEnum
from discord import Embed
class OpStatus(IntEnum):
SUCCESS = 0x2ECC71,
FAILURE = 0xc0392B,
WARNING = 0xf39C12,
NONE = -1
def build_embed(ctx, desc: str, title: str = '', status: OpStatus = OpStatus.SUCCESS) -> Embed:
name = ctx.message.server.me.nick if ctx.message.server.me.nick is not None else ctx.bot.user.name
embed = Embed(
title=title,
description=desc,
color=status.value if status is not None and status is not -1 else None if status is -1 else OpStatus.WARNING
)
embed.set_author(name=name, icon_url=ctx.bot.user.avatar_url)
return embed
|
Fix issue with enum not accepting null
|
Fix issue with enum not accepting null
|
Python
|
apache-2.0
|
HellPie/discord-reply-bot
|
---
+++
@@ -6,7 +6,7 @@
SUCCESS = 0x2ECC71,
FAILURE = 0xc0392B,
WARNING = 0xf39C12,
- NONE = None
+ NONE = -1
def build_embed(ctx, desc: str, title: str = '', status: OpStatus = OpStatus.SUCCESS) -> Embed:
@@ -14,7 +14,7 @@
embed = Embed(
title=title,
description=desc,
- color=status.value if status is not None else OpStatus.WARNING
+ color=status.value if status is not None and status is not -1 else None if status is -1 else OpStatus.WARNING
)
embed.set_author(name=name, icon_url=ctx.bot.user.avatar_url)
return embed
|
b3466fc14e9616c620258eea382b644ac2585845
|
rest/urls.py
|
rest/urls.py
|
# Author: Braedy Kuzma
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^posts/$', views.PostsView.as_view(), name='posts'),
url(r'^posts/(?P<pid>[0-9a-fA-F\-]+)/$', views.PostView.as_view(),
name='post'),
url(r'^posts/(?P<pid>[0-9a-fA-F\-]+)/comments/$',
views.CommentView.as_view(), name='comments'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/$', views.AuthorView.as_view(),
name='author'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/friends/$',
views.AuthorFriendsView.as_view(), name='friends')
]
|
# Author: Braedy Kuzma
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^posts/$', views.PostsView.as_view(), name='posts'),
url(r'^posts/(?P<pid>[0-9a-fA-F\-]+)/$', views.PostView.as_view(),
name='post'),
url(r'^posts/(?P<pid>[0-9a-fA-F\-]+)/comments/$',
views.CommentView.as_view(), name='comments'),
url(r'^author/posts/$', views.PostView.as_view(), name='authorpost'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/$', views.AuthorView.as_view(),
name='author'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/friends/$',
views.AuthorFriendsView.as_view(), name='friends')
]
|
Add extra endpoint for posts?
|
Add extra endpoint for posts?
|
Python
|
apache-2.0
|
CMPUT404W17T06/CMPUT404-project,CMPUT404W17T06/CMPUT404-project,CMPUT404W17T06/CMPUT404-project
|
---
+++
@@ -8,6 +8,7 @@
name='post'),
url(r'^posts/(?P<pid>[0-9a-fA-F\-]+)/comments/$',
views.CommentView.as_view(), name='comments'),
+ url(r'^author/posts/$', views.PostView.as_view(), name='authorpost'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/$', views.AuthorView.as_view(),
name='author'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/friends/$',
|
ab7ce76c47ea99080c105feb1a4f8aed39554597
|
doc/examples/example_world.py
|
doc/examples/example_world.py
|
from __future__ import unicode_literals
from imaginary.world import ImaginaryWorld
from imaginary.objects import Thing, Container, Exit
from imaginary.garments import createShirt, createPants
from imaginary.iimaginary import IClothing, IClothingWearer
from examplegame.squeaky import Squeaker
def world(store):
def room(name):
it = Thing(store=store, name=name)
Container.createFor(it, capacity=1000)
return it
world = ImaginaryWorld(store=store,
origin=room("The Beginning"))
protagonist = world.create("An Example Player")
shirt = createShirt(store=store, name="shirt", location=world.origin)
pants = createPants(store=store, name="pants", location=world.origin)
middle = room("The Middle")
wearer = IClothingWearer(protagonist)
wearer.putOn(IClothing(shirt))
wearer.putOn(IClothing(pants))
Exit.link(world.origin, middle, "north")
squeakerThing = Thing(name="squeaker", location=middle, store=store)
Squeaker.createFor(squeakerThing)
return world
|
from __future__ import unicode_literals
from imaginary.world import ImaginaryWorld
from imaginary.objects import Thing, Container, Exit
from imaginary.garments import createShirt, createPants
from imaginary.iimaginary import IClothing, IClothingWearer
from examplegame.squeaky import Squeaker
def world(store):
def room(name):
it = Thing(
store=store,
name=name,
proper=True,
)
Container.createFor(it, capacity=1000)
return it
world = ImaginaryWorld(store=store,
origin=room("The Beginning"))
protagonist = world.create("An Example Player")
shirt = createShirt(store=store, name="shirt", location=world.origin)
pants = createPants(store=store, name="pants", location=world.origin)
middle = room("The Middle")
wearer = IClothingWearer(protagonist)
wearer.putOn(IClothing(shirt))
wearer.putOn(IClothing(pants))
Exit.link(world.origin, middle, "north")
squeakerThing = Thing(name="squeaker", location=middle, store=store)
Squeaker.createFor(squeakerThing)
return world
|
Make rooms in the example game (grammatically) "proper" (nouns)
|
Make rooms in the example game (grammatically) "proper" (nouns)
|
Python
|
mit
|
twisted/imaginary
|
---
+++
@@ -11,7 +11,11 @@
def world(store):
def room(name):
- it = Thing(store=store, name=name)
+ it = Thing(
+ store=store,
+ name=name,
+ proper=True,
+ )
Container.createFor(it, capacity=1000)
return it
world = ImaginaryWorld(store=store,
|
94e8b7bf8b24dfa36f240e601cb0894b10cab21a
|
tools/examples/geturl.py
|
tools/examples/geturl.py
|
#!/usr/bin/env python2
#
# USAGE: geturl.py FILE_OR_DIR1 FILE_OR_DIR2 ...
#
# prints out the URL associated with each item
#
import sys
import svn._wc
import svn.util
def main(pool, files):
for f in files:
entry = svn._wc.svn_wc_entry(f, 0, pool)
print svn._wc.svn_wc_entry_t_url_get(entry)
if __name__ == '__main__':
svn.util.run_app(main, sys.argv[1:])
|
#!/usr/bin/env python2
#
# USAGE: geturl.py FILE_OR_DIR1 FILE_OR_DIR2 ...
#
# prints out the URL associated with each item
#
import os
import sys
import svn.wc
import svn.util
def main(pool, files):
for f in files:
dirpath = fullpath = os.path.abspath(f)
if not os.path.isdir(dirpath):
dirpath = os.path.dirname(dirpath)
adm_baton = svn.wc.svn_wc_adm_open(None, dirpath, 1, 1, pool)
try:
entry = svn.wc.svn_wc_entry(fullpath, adm_baton, 0, pool)
print svn.wc.svn_wc_entry_t_url_get(entry)
except:
svn.wc.svn_wc_adm_close(adm_baton)
if __name__ == '__main__':
svn.util.run_app(main, sys.argv[1:])
|
Update the example to use the new access baton stuff.
|
Update the example to use the new access baton stuff.
git-svn-id: f8a4e5e023278da1e04e203c7fe051e3c4285d88@844036 13f79535-47bb-0310-9956-ffa450edef68
|
Python
|
apache-2.0
|
YueLinHo/Subversion,YueLinHo/Subversion,wbond/subversion,wbond/subversion,YueLinHo/Subversion,wbond/subversion,YueLinHo/Subversion,wbond/subversion,YueLinHo/Subversion,YueLinHo/Subversion,YueLinHo/Subversion,wbond/subversion,wbond/subversion,YueLinHo/Subversion,wbond/subversion
|
---
+++
@@ -5,14 +5,23 @@
# prints out the URL associated with each item
#
+import os
import sys
-import svn._wc
+
+import svn.wc
import svn.util
def main(pool, files):
for f in files:
- entry = svn._wc.svn_wc_entry(f, 0, pool)
- print svn._wc.svn_wc_entry_t_url_get(entry)
+ dirpath = fullpath = os.path.abspath(f)
+ if not os.path.isdir(dirpath):
+ dirpath = os.path.dirname(dirpath)
+ adm_baton = svn.wc.svn_wc_adm_open(None, dirpath, 1, 1, pool)
+ try:
+ entry = svn.wc.svn_wc_entry(fullpath, adm_baton, 0, pool)
+ print svn.wc.svn_wc_entry_t_url_get(entry)
+ except:
+ svn.wc.svn_wc_adm_close(adm_baton)
if __name__ == '__main__':
svn.util.run_app(main, sys.argv[1:])
|
d76cb1aa296bc800cb24427110910a038a62a311
|
vctk/__init__.py
|
vctk/__init__.py
|
# coding: utf-8
from interface import *
class SpeechParameters(object):
"""
Speech parameters
"""
def __init__(self, f0, spectrum_envelope, aperiodicity):
self.f0 = f0
self.spectrum_envelope = spectrum_envelope
self.aperiodicity = aperiodicity
class VoiceConverter(object):
"""
Voice conversion
This class assumes:
- *_parameterizer implements `Parameterizer`
- *_converter implements `Converter`
- analyzer implements `Analyzer`
- synthesizer implments `Synthesizer`
analyzer and synthesizer must be specified explicitly.
*_parameterizer and *_converter can be None.
TODO:
parameterizerは、デフォでTrasparentParameterizer
(つまり特徴量をそのままパスするだけのparamterizer)にする?
"""
def __init__(self,
f0_parameterizer=None,
f0_converter=None,
spectrum_envelope_parameterizer=None,
spectrum_envelope_converter=None,
aperiodicity_parameterizer=None,
aperiodicity_converter=None,
analyzer=None,
synthesizer=None
):
self.f0_converter = f0_converter
self.f0_parameterizer = f0_parameterizer
self.spectrum_envelope_converter = spectrum_envelope_converter
self.spectrum_envelope_parameterizer = spectrum_envelope_parameterizer
self.aperiodicity_converter = aperiodicity_converter
self.aperiodicity_parameterizer = aperiodicity_parameterizer
if analyzer == None or synthesizer == None:
raise "backend must be specified explicitly!"
self.analyzer = analyzer
self.synthesizer = synthesizer
# speech paramters will be stored.
self.params = None
def analyze(self, x):
"""
Decompose speech into parametric representation
"""
self.params = self.analyzer.analyze(x)
def convert(self):
"""
Perform speech parameter conversion
"""
if self.params == None:
raise "`analyze` must be called before `convert`"
if self.f0_converter != None:
self.params.f0 = self.f0_parameterizer.backward(
self.f0_converter.convert(
self.f0_parameterizer.forward(self.params.f0)
)
)
if self.spectrum_envelope_converter != None:
self.params.spectrum_envelop = \
self.spectrum_envelope_parameterizer.backward(
self.spectrum_envelope_converter.convert(
self.spectrum_envelope_parameterizer.forward(
self.params.spectrum_envelope
)
)
)
if self.aperiodicity_converter != None:
self.params.aperiodicity = self.aperiodicity_parameterizer.backward(
self.aperiodicity_converter.convert(
self.aperiodicity_parameterizer.forward(
self.params.aperiodicity)
)
)
def synthesis(self):
"""
Synthesize speech waveform
"""
if self.params == None:
raise "`analyze` must be called before `synthesis`"
return self.synthesizer.synthesis(self.params)
|
Add class `VoiceConverter` that peforms all required processes in statistical voice conversion: speech analysis, feature parameterization, feature conversion and waveform syntheis
|
Add class `VoiceConverter` that peforms all required processes in statistical voice conversion: speech analysis, feature parameterization, feature conversion and waveform syntheis
|
Python
|
mit
|
k2kobayashi/sprocket
|
---
+++
@@ -0,0 +1,109 @@
+# coding: utf-8
+
+from interface import *
+
+
+class SpeechParameters(object):
+
+ """
+ Speech parameters
+ """
+
+ def __init__(self, f0, spectrum_envelope, aperiodicity):
+ self.f0 = f0
+ self.spectrum_envelope = spectrum_envelope
+ self.aperiodicity = aperiodicity
+
+
+class VoiceConverter(object):
+
+ """
+ Voice conversion
+
+ This class assumes:
+ - *_parameterizer implements `Parameterizer`
+ - *_converter implements `Converter`
+ - analyzer implements `Analyzer`
+ - synthesizer implments `Synthesizer`
+
+ analyzer and synthesizer must be specified explicitly.
+
+ *_parameterizer and *_converter can be None.
+
+ TODO:
+ parameterizerは、デフォでTrasparentParameterizer
+ (つまり特徴量をそのままパスするだけのparamterizer)にする?
+ """
+
+ def __init__(self,
+ f0_parameterizer=None,
+ f0_converter=None,
+ spectrum_envelope_parameterizer=None,
+ spectrum_envelope_converter=None,
+ aperiodicity_parameterizer=None,
+ aperiodicity_converter=None,
+ analyzer=None,
+ synthesizer=None
+ ):
+ self.f0_converter = f0_converter
+ self.f0_parameterizer = f0_parameterizer
+ self.spectrum_envelope_converter = spectrum_envelope_converter
+ self.spectrum_envelope_parameterizer = spectrum_envelope_parameterizer
+ self.aperiodicity_converter = aperiodicity_converter
+ self.aperiodicity_parameterizer = aperiodicity_parameterizer
+
+ if analyzer == None or synthesizer == None:
+ raise "backend must be specified explicitly!"
+
+ self.analyzer = analyzer
+ self.synthesizer = synthesizer
+
+ # speech paramters will be stored.
+ self.params = None
+
+ def analyze(self, x):
+ """
+ Decompose speech into parametric representation
+ """
+ self.params = self.analyzer.analyze(x)
+
+ def convert(self):
+ """
+ Perform speech parameter conversion
+ """
+ if self.params == None:
+ raise "`analyze` must be called before `convert`"
+
+ if self.f0_converter != None:
+ self.params.f0 = self.f0_parameterizer.backward(
+ self.f0_converter.convert(
+ self.f0_parameterizer.forward(self.params.f0)
+ )
+ )
+
+ if self.spectrum_envelope_converter != None:
+ self.params.spectrum_envelop = \
+ self.spectrum_envelope_parameterizer.backward(
+ self.spectrum_envelope_converter.convert(
+ self.spectrum_envelope_parameterizer.forward(
+ self.params.spectrum_envelope
+ )
+ )
+ )
+
+ if self.aperiodicity_converter != None:
+ self.params.aperiodicity = self.aperiodicity_parameterizer.backward(
+ self.aperiodicity_converter.convert(
+ self.aperiodicity_parameterizer.forward(
+ self.params.aperiodicity)
+ )
+ )
+
+ def synthesis(self):
+ """
+ Synthesize speech waveform
+ """
+ if self.params == None:
+ raise "`analyze` must be called before `synthesis`"
+
+ return self.synthesizer.synthesis(self.params)
|
|
15652b6817ad4548881883ee89981aff49c52c56
|
muranoagent/version.py
|
muranoagent/version.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
version_info = pbr.version.VersionInfo('muranoagent')
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
version_info = pbr.version.VersionInfo('murano-agent')
|
Fix murano agent component name
|
Fix murano agent component name
Change-Id: I8f76630d3f007f89585b3418ff661c48004a2b5f
Closes-bug: #1373347
|
Python
|
apache-2.0
|
Bloomie/murano-agent,Bloomie/murano-agent,telefonicaid/murano-agent,openstack/murano-agent,telefonicaid/murano-agent,Bloomie/murano-agent,openstack/murano-agent,telefonicaid/murano-agent,openstack/murano-agent,openstack/murano-agent,Bloomie/murano-agent
|
---
+++
@@ -13,4 +13,4 @@
import pbr.version
-version_info = pbr.version.VersionInfo('muranoagent')
+version_info = pbr.version.VersionInfo('murano-agent')
|
d23b82e89c756e8cd51aa2abfacd6cc7b2907fae
|
pygame/__init__.py
|
pygame/__init__.py
|
""" XXX: fish """
__all__ = ['display', 'color', 'surface', 'Color', 'time', 'event',
'constants', 'sprite', 'Rect', 'Surface', 'QUIT', 'init',
'mouse', 'locals', 'image']
from pygame.color import Color
from pygame.rect import Rect
from pygame.surface import Surface
from pygame.constants import QUIT
from pygame import display, color, surface, time, event, constants, sprite,\
mouse, locals, images
# XXX
from pygame.display import init
|
""" XXX: fish """
__all__ = ['display', 'color', 'surface', 'Color', 'time', 'event',
'constants', 'sprite', 'Rect', 'Surface', 'QUIT', 'init',
'mouse', 'locals', 'image']
from pygame.color import Color
from pygame.rect import Rect
from pygame.surface import Surface
from pygame.constants import QUIT
from pygame import display, color, surface, time, event, constants, sprite,\
mouse, locals, image
# XXX
from pygame.display import init
|
Fix error introduced in rebase
|
Fix error introduced in rebase
|
Python
|
lgpl-2.1
|
GertBurger/pygame_cffi,caseyc37/pygame_cffi,GertBurger/pygame_cffi,CTPUG/pygame_cffi,caseyc37/pygame_cffi,caseyc37/pygame_cffi,GertBurger/pygame_cffi,GertBurger/pygame_cffi,CTPUG/pygame_cffi,CTPUG/pygame_cffi
|
---
+++
@@ -9,6 +9,6 @@
from pygame.surface import Surface
from pygame.constants import QUIT
from pygame import display, color, surface, time, event, constants, sprite,\
- mouse, locals, images
+ mouse, locals, image
# XXX
from pygame.display import init
|
dc10cbafe045d55906d627816a88323fb4a8c948
|
exec_proc.py
|
exec_proc.py
|
#!/usr/bin/env python
# Copyright 2014 Boundary, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from subprocess import Popen,PIPE
import shlex
import logging
class ExecProc:
def __init__(self):
self.command = None
self.debug = False
def setDebug(self,debug):
self.debug = debug
def setCommand(self,command):
if type(command) != str:
raise ValueError
self.command = command
def execute(self):
if self.command == None:
raise ValueError
args = shlex.split(self.command)
if self.debug == True:
logging.info("command=\"%s\"",self.command)
p = Popen(args,stdout=PIPE)
o,e = p.communicate()
if self.debug == True:
logging.info("output=\"%s\"",o)
return o
|
#!/usr/bin/env python
# Copyright 2014 Boundary, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from subprocess import Popen,PIPE
import shlex
import logging
class ExecProc:
def __init__(self):
self.command = None
self.debug = False
def setDebug(self,debug):
self.debug = debug
def setCommand(self,command):
if type(command) != str:
raise ValueError
self.command = command
def execute(self):
if self.command == None:
raise ValueError
# Remove Carriage Returns
command = self.command.strip('\r')
args = shlex.split(command)
if self.debug == True:
logging.info("command=\"%s\"",args)
p = Popen(args,stdout=PIPE)
o,e = p.communicate()
if self.debug == True:
logging.info("output=\"%s\"",o)
logging.info(':'.join(x.encode('hex') for x in o))
return o
|
Add output of command in hex
|
Add output of command in hex
|
Python
|
apache-2.0
|
boundary/boundary-plugin-shell,boundary/boundary-plugin-shell,jdgwartney/boundary-plugin-shell,jdgwartney/boundary-plugin-shell
|
---
+++
@@ -17,8 +17,6 @@
import shlex
import logging
-
-
class ExecProc:
def __init__(self):
@@ -36,11 +34,15 @@
def execute(self):
if self.command == None:
raise ValueError
- args = shlex.split(self.command)
+ # Remove Carriage Returns
+ command = self.command.strip('\r')
+ args = shlex.split(command)
if self.debug == True:
- logging.info("command=\"%s\"",self.command)
+ logging.info("command=\"%s\"",args)
p = Popen(args,stdout=PIPE)
o,e = p.communicate()
if self.debug == True:
logging.info("output=\"%s\"",o)
+ logging.info(':'.join(x.encode('hex') for x in o))
return o
+
|
68b1c3804504ecc14f7c23465ca11db31489e1cd
|
mozcal/events/views.py
|
mozcal/events/views.py
|
from django.shortcuts import render, get_object_or_404
from mozcal.events.models import Event, Space, FunctionalArea
def one(request, slug):
event = get_object_or_404(Event, slug=slug)
return render(request, 'event.html', { 'event': event })
def all(request):
events = Event.objects.all()
spaces = Space.objects.all()
areas = FunctionalArea.objects.all()
return render(request, 'events_all.html', {
'events': events,
'spaces': spaces,
'areas': areas
})
|
from django.shortcuts import render, get_object_or_404
from mozcal.events.models import Event, Space, FunctionalArea
def one(request, slug):
event = get_object_or_404(Event, slug=slug)
return render(request, 'event.html', { 'event': event })
def all(request):
search_string = request.GET.get('search', '')
events = Event.objects.filter(title__icontains=search_string)
spaces = Space.objects.all()
areas = FunctionalArea.objects.all()
return render(request, 'events_all.html', {
'events': events,
'spaces': spaces,
'areas': areas
})
|
Allow filtering of events by title
|
Allow filtering of events by title
|
Python
|
bsd-3-clause
|
ppapadeas/wprevents,yvan-sraka/wprevents,yvan-sraka/wprevents,ppapadeas/wprevents,yvan-sraka/wprevents,ppapadeas/wprevents,yvan-sraka/wprevents
|
---
+++
@@ -9,7 +9,9 @@
def all(request):
- events = Event.objects.all()
+ search_string = request.GET.get('search', '')
+
+ events = Event.objects.filter(title__icontains=search_string)
spaces = Space.objects.all()
areas = FunctionalArea.objects.all()
|
ce191a9ea7bad7493560a7bdd7f7de2e56f94612
|
fuse_util.py
|
fuse_util.py
|
import sublime
import os
def getSetting(key,default=None):
s = sublime.load_settings("Fuse.sublime-settings")
return s.get(key, default)
def getFusePathFromSettings():
path = getSetting("fuse_path_override")
if path == "" or path == None:
return "fuse"
else:
return path+"/fuse"
def setSetting(key,value):
s = sublime.load_settings("Fuse.sublime-settings")
s.set(key, value)
sublime.save_settings("Fuse.sublime-settings")
def isSupportedSyntax(syntaxName):
return syntaxName == "Uno" or syntaxName == "UX"
def getExtension(path):
base = os.path.basename(path)
return os.path.splitext(base)[0]
def getRowCol(view, pos):
rowcol = view.rowcol(pos)
rowcol = (rowcol[0] + 1, rowcol[1] + 1)
return {"Line": rowcol[0], "Character": rowcol[1]}
|
import sublime
import os
def getSetting(key,default=None):
s = sublime.load_settings("Fuse.sublime-settings")
return s.get(key, default)
def getFusePathFromSettings():
path = getSetting("fuse_path_override")
if path == "" or path == None:
if os.path.isfile("/usr/bin/fuse"):
return "/usr/bin/fuse"
else:
return "/usr/local/bin/fuse"
else:
return path+"/fuse"
def setSetting(key,value):
s = sublime.load_settings("Fuse.sublime-settings")
s.set(key, value)
sublime.save_settings("Fuse.sublime-settings")
def isSupportedSyntax(syntaxName):
return syntaxName == "Uno" or syntaxName == "UX"
def getExtension(path):
base = os.path.basename(path)
return os.path.splitext(base)[0]
def getRowCol(view, pos):
rowcol = view.rowcol(pos)
rowcol = (rowcol[0] + 1, rowcol[1] + 1)
return {"Line": rowcol[0], "Character": rowcol[1]}
|
Make Sublime plugin work with new Fuse install location
|
Make Sublime plugin work with new Fuse install location
We changed the location of fuse from /usr/bin to /usr/local/bin to be
compatible with El Capitan. The latter is not on path in Sublime, so use
absolute paths for fuse.
|
Python
|
mit
|
fusetools/Fuse.SublimePlugin,fusetools/Fuse.SublimePlugin
|
---
+++
@@ -8,7 +8,10 @@
def getFusePathFromSettings():
path = getSetting("fuse_path_override")
if path == "" or path == None:
- return "fuse"
+ if os.path.isfile("/usr/bin/fuse"):
+ return "/usr/bin/fuse"
+ else:
+ return "/usr/local/bin/fuse"
else:
return path+"/fuse"
|
02fc723b8d459de1bcf94dc02f7b8531dd9e7cfb
|
events/tests/test_user_get.py
|
events/tests/test_user_get.py
|
# -*- coding: utf-8 -*-
import pytest
from .utils import get, versioned_reverse as reverse, assert_fields_exist
# === util methods ===
def get_list(api_client, version='v1'):
list_url = reverse('user-list', version=version)
return get(api_client, list_url)
def assert_user_fields_exist(data, version='v1'):
# TODO: incorporate version parameter into version aware
# parts of test code
fields = (
'last_login',
'username',
'email',
'date_joined',
'first_name',
'last_name',
'uuid',
'department_name',
'organization',
'is_staff',
'display_name',
)
assert_fields_exist(data, fields)
# === tests ===
@pytest.mark.django_db
def test__get_user_list(api_client, user, organization):
organization.admin_users.add(user)
api_client.force_authenticate(user=user)
response = get_list(api_client)
print(response.data)
assert_user_fields_exist(response.data['data'][0])
|
# -*- coding: utf-8 -*-
import pytest
from .utils import get, versioned_reverse as reverse, assert_fields_exist
# === util methods ===
def get_list(api_client, version='v1'):
list_url = reverse('user-list', version=version)
return get(api_client, list_url)
def get_detail(api_client, detail_pk, version='v1'):
detail_url = reverse('user-detail', version=version, kwargs={'pk': detail_pk})
return get(api_client, detail_url)
def assert_user_fields_exist(data, version='v1'):
# TODO: incorporate version parameter into version aware
# parts of test code
fields = (
'last_login',
'username',
'email',
'date_joined',
'first_name',
'last_name',
'uuid',
'department_name',
'organization',
'is_staff',
'display_name',
)
assert_fields_exist(data, fields)
# === tests ===
@pytest.mark.django_db
def test__get_user_list(api_client, user, organization):
organization.admin_users.add(user)
api_client.force_authenticate(user=user)
response = get_detail(api_client, user.pk)
print(response.data)
assert_user_fields_exist(response.data)
|
Check user detail in test
|
Check user detail in test
|
Python
|
mit
|
aapris/linkedevents,tuomas777/linkedevents,City-of-Helsinki/linkedevents,tuomas777/linkedevents,tuomas777/linkedevents,City-of-Helsinki/linkedevents,aapris/linkedevents,City-of-Helsinki/linkedevents,aapris/linkedevents
|
---
+++
@@ -8,6 +8,10 @@
def get_list(api_client, version='v1'):
list_url = reverse('user-list', version=version)
return get(api_client, list_url)
+
+def get_detail(api_client, detail_pk, version='v1'):
+ detail_url = reverse('user-detail', version=version, kwargs={'pk': detail_pk})
+ return get(api_client, detail_url)
def assert_user_fields_exist(data, version='v1'):
# TODO: incorporate version parameter into version aware
@@ -33,6 +37,6 @@
def test__get_user_list(api_client, user, organization):
organization.admin_users.add(user)
api_client.force_authenticate(user=user)
- response = get_list(api_client)
+ response = get_detail(api_client, user.pk)
print(response.data)
- assert_user_fields_exist(response.data['data'][0])
+ assert_user_fields_exist(response.data)
|
d0908d1e4e5279579a93772210b001c19fae9b10
|
cogs/misc.py
|
cogs/misc.py
|
import discord
from discord.ext import commands
class Misc:
@commands.command()
async def highfive(self, ctx):
"""
Give Yutu a high-five
"""
await ctx.send('{0.mention} :pray: {1.mention}'.format(ctx.me, ctx.author))
@commands.command()
async def cute(self, ctx, user: discord.Member = None):
"""
Tell someone they are cute!
Tells a user that you think they are cute, if you don't give a user, then Yutu will let you know that you are cute.
"""
if user is None:
first = ctx.me
second = ctx.author
else:
first = ctx.author
second = user
post = discord.Embed(description='**{0.display_name}** thinks that **{1.display_name}** is cute!'.format(first,
second))
post.set_image(url="https://i.imgur.com/MuVAkV2.gif")
await ctx.send(embed=post)
|
import discord
from discord.ext import commands
class Misc:
@commands.command()
async def highfive(self, ctx: commands.Context):
"""
Give Yutu a high-five
"""
await ctx.send('{0.mention} :pray: {1.mention}'.format(ctx.me, ctx.author))
@commands.command()
async def cute(self, ctx: commands.Context, user: discord.Member = None):
"""
Tell someone they are cute!
Tells a user that you think they are cute, if you don't give a user, then Yutu will let you know that you are cute.
"""
if user is None:
first = ctx.me
second = ctx.author
else:
first = ctx.author
second = user
post = discord.Embed(description='**{0.display_name}** thinks that **{1.display_name}** is cute!'.format(first,
second))
post.set_image(url="https://i.imgur.com/MuVAkV2.gif")
await ctx.send(embed=post)
|
Add type markers for ctx objects
|
Add type markers for ctx objects
|
Python
|
mit
|
HarkonenBade/yutu
|
---
+++
@@ -3,14 +3,14 @@
class Misc:
@commands.command()
- async def highfive(self, ctx):
+ async def highfive(self, ctx: commands.Context):
"""
Give Yutu a high-five
"""
await ctx.send('{0.mention} :pray: {1.mention}'.format(ctx.me, ctx.author))
@commands.command()
- async def cute(self, ctx, user: discord.Member = None):
+ async def cute(self, ctx: commands.Context, user: discord.Member = None):
"""
Tell someone they are cute!
|
2263d180184c908b0e96d53f43f6c81aa23a3c92
|
push/urls.py
|
push/urls.py
|
from django.conf.urls import url
from push import views
urlpatterns = [
url(r'^$', views.index, name = 'index'),
url(r'^sender', views.sender, name = 'sender'),
url(r'^notification_list', views.notification_list, name = 'notification_list'),
url(r'^settings', views.settings, name = 'settings'),
url(r'^notification', views.notification, name = 'notification'),
url(r'^register', views.device_token_register, name = 'device_token_register'),
url(r'^delete/device_token/(?P<device_token_id>\d+)/$', views.delete_device_token, name = 'delete_device_token'),
]
|
from django.conf.urls import url
from push import views
urlpatterns = [
url(r'^$', views.index, name = 'index'),
url(r'^sender', views.sender, name = 'sender'),
url(r'^notification_list', views.notification_list, name = 'notification_list'),
url(r'^settings', views.settings, name = 'settings'),
url(r'^notification', views.notification, name = 'notification'),
url(r'^(?P<username>\w+)/register', views.device_token_register, name = 'device_token_register'),
url(r'^delete/device_token/(?P<device_token_id>\d+)/$', views.delete_device_token, name = 'delete_device_token'),
]
|
Modify device_token register URL dispatcher
|
Modify device_token register URL dispatcher
|
Python
|
apache-2.0
|
nnsnodnb/django-mbaas,nnsnodnb/django-mbaas,nnsnodnb/django-mbaas
|
---
+++
@@ -7,6 +7,6 @@
url(r'^notification_list', views.notification_list, name = 'notification_list'),
url(r'^settings', views.settings, name = 'settings'),
url(r'^notification', views.notification, name = 'notification'),
- url(r'^register', views.device_token_register, name = 'device_token_register'),
+ url(r'^(?P<username>\w+)/register', views.device_token_register, name = 'device_token_register'),
url(r'^delete/device_token/(?P<device_token_id>\d+)/$', views.delete_device_token, name = 'delete_device_token'),
]
|
d5f0b698831e4bfb35b74ef0d8c7af75c91e67d3
|
dadd/worker/handlers.py
|
dadd/worker/handlers.py
|
import os
import json
import socket
import requests
from flask import request, jsonify, Response, abort
from dadd.worker import app
from dadd.worker.proc import ChildProcess
@app.route('/run/', methods=['POST'])
def run_process():
proc = ChildProcess(request.json)
proc.run()
return jsonify(proc.info())
@app.route('/register/', methods=['POST'])
def register_with_master():
register(app)
return jsonify({'message': 'ok'})
def register(host, port):
sess = requests.Session()
if 'USERNAME' in app.config and 'PASSWORD' in app.config:
sess.auth = (app.config['USERNAME'], app.config['PASSWORD'])
sess.headers = {'content-type': 'application/json'}
try:
url = app.config['MASTER_URL'] + '/api/hosts/'
resp = sess.post(url, data=json.dumps({
'host': app.config.get('HOSTNAME', socket.getfqdn()),
'port': port
}))
if not resp.ok:
app.logger.warning('Error registering with master: %s' %
app.config['MASTER_URL'])
except Exception as e:
app.logger.warning('Connection Error: %s' % e)
@app.route('/logs/<path>', methods=['GET'])
def tail_log(path):
if os.path.exists(path) and path.startswith('/tmp/'):
return Response(open(path), content_type='text/plain')
abort(404)
|
import os
import json
import socket
import requests
from flask import request, jsonify, Response, abort
from dadd.worker import app
from dadd.worker.proc import ChildProcess
@app.route('/run/', methods=['POST'])
def run_process():
proc = ChildProcess(request.json)
proc.run()
return jsonify(proc.info())
@app.route('/register/', methods=['POST'])
def register_with_master():
register(app.config['HOST'], app.config['PORT'])
return jsonify({'message': 'ok'})
def register(host, port):
sess = requests.Session()
if 'USERNAME' in app.config and 'PASSWORD' in app.config:
sess.auth = (app.config['USERNAME'], app.config['PASSWORD'])
sess.headers = {'content-type': 'application/json'}
try:
url = app.config['MASTER_URL'] + '/api/hosts/'
resp = sess.post(url, data=json.dumps({
'host': app.config.get('HOSTNAME', socket.getfqdn()),
'port': port
}))
if not resp.ok:
app.logger.warning('Error registering with master: %s' %
app.config['MASTER_URL'])
except Exception as e:
app.logger.warning('Connection Error: %s' % e)
@app.route('/logs/<path:path>', methods=['GET'])
def tail_log(path):
path = '/' + path
if os.path.exists(path) and path.startswith('/tmp/'):
return Response(open(path), content_type='text/plain')
abort(404)
|
Fix up the manual register URL in the worker and fix the initial log tail.
|
Fix up the manual register URL in the worker and fix the initial log tail.
|
Python
|
bsd-3-clause
|
ionrock/dadd,ionrock/dadd,ionrock/dadd,ionrock/dadd
|
---
+++
@@ -19,7 +19,7 @@
@app.route('/register/', methods=['POST'])
def register_with_master():
- register(app)
+ register(app.config['HOST'], app.config['PORT'])
return jsonify({'message': 'ok'})
@@ -43,8 +43,9 @@
app.logger.warning('Connection Error: %s' % e)
-@app.route('/logs/<path>', methods=['GET'])
+@app.route('/logs/<path:path>', methods=['GET'])
def tail_log(path):
+ path = '/' + path
if os.path.exists(path) and path.startswith('/tmp/'):
return Response(open(path), content_type='text/plain')
abort(404)
|
3501f3404aebf6dc7ba349eafdc80602b98f72a9
|
snaek/ffi.py
|
snaek/ffi.py
|
import os
import re
import cffi
_directive_re = re.compile(r'^\s*#.*?$(?m)')
def make_ffi(module_path, crate_path, cached_header_filename=None):
"""Creates a FFI instance for the given configuration."""
if cached_header_filename is not None and \
os.path.isfile(cached_header_filename):
with open(cached_header_filename, 'rb') as f:
header = f.read()
else:
from .bindgen import generate_header
header = generate_header(crate_path)
header = _directive_re.sub('', header)
ffi = cffi.FFI()
ffi.cdef(header)
ffi.set_source(module_path, None)
return ffi
|
import os
import re
import sys
import cffi
_directive_re = re.compile(r'^\s*#.*?$(?m)')
def make_ffi(module_path, crate_path, cached_header_filename=None):
"""Creates a FFI instance for the given configuration."""
if cached_header_filename is not None and \
os.path.isfile(cached_header_filename):
with open(cached_header_filename, 'rb') as f:
header = f.read()
else:
from .bindgen import generate_header
header = generate_header(crate_path)
header = _directive_re.sub('', header)
if os.environ.get('SNAEK_DEBUG_HEADER') == '1':
sys.stderr.write('/* generated header for "%s" */\n' % module_path)
sys.stderr.write(header)
sys.stderr.write('\n')
sys.stderr.flush()
ffi = cffi.FFI()
ffi.cdef(header)
ffi.set_source(module_path, None)
return ffi
|
Add a way to dump the header during setup.py runs
|
Add a way to dump the header during setup.py runs
|
Python
|
apache-2.0
|
mitsuhiko/snaek,mitsuhiko/snaek,mitsuhiko/snaek
|
---
+++
@@ -1,5 +1,6 @@
import os
import re
+import sys
import cffi
@@ -16,6 +17,13 @@
from .bindgen import generate_header
header = generate_header(crate_path)
header = _directive_re.sub('', header)
+
+ if os.environ.get('SNAEK_DEBUG_HEADER') == '1':
+ sys.stderr.write('/* generated header for "%s" */\n' % module_path)
+ sys.stderr.write(header)
+ sys.stderr.write('\n')
+ sys.stderr.flush()
+
ffi = cffi.FFI()
ffi.cdef(header)
ffi.set_source(module_path, None)
|
fb1d39ed30e73bef49be7a71945d5dfd67af28e3
|
scripting.py
|
scripting.py
|
#!/usr/bin/env python2
import os, shutil
def print_warning(message, *args, **kwargs):
import colortext
if args or kwargs: message = message.format(*args, **kwargs)
colortext.write(message, color='red')
def print_error_and_die(message, *args, **kwargs):
print_warning(message + " Aborting...", *args, **kwargs)
raise SystemExit(1)
def clear_directory(directory):
if os.path.exists(directory): shutil.rmtree(directory)
os.makedirs(directory)
|
#!/usr/bin/env python2
import os, shutil
def print_warning(message, *args, **kwargs):
import colortext
if args or kwargs: message = message.format(*args, **kwargs)
colortext.write(message, color='red')
def print_error_and_die(message, *args, **kwargs):
print_warning(message + " Aborting...", *args, **kwargs)
raise SystemExit(1)
def clear_directory(directory):
if os.path.exists(directory): shutil.rmtree(directory)
os.makedirs(directory)
def mkdir(newdir):
if os.path.isdir(newdir):
pass
elif os.path.isfile(newdir):
raise OSError("a file with the same name as the desired " \
"dir, '%s', already exists." % newdir)
else:
os.makedirs(newdir)
|
Add a friendly mkdir() function.
|
Add a friendly mkdir() function.
|
Python
|
mit
|
Kortemme-Lab/klab,Kortemme-Lab/klab,Kortemme-Lab/klab,Kortemme-Lab/klab
|
---
+++
@@ -15,3 +15,11 @@
if os.path.exists(directory): shutil.rmtree(directory)
os.makedirs(directory)
+def mkdir(newdir):
+ if os.path.isdir(newdir):
+ pass
+ elif os.path.isfile(newdir):
+ raise OSError("a file with the same name as the desired " \
+ "dir, '%s', already exists." % newdir)
+ else:
+ os.makedirs(newdir)
|
0216bfd48fddb9bb7bda611ec5bdfe368bdee55f
|
layout/tests.py
|
layout/tests.py
|
from django.test import TestCase
# Create your tests here.
|
from django.core.urlresolvers import resolve
from django.test import TestCase
from layout.views import home
class HomePageTest(TestCase):
def test_root_url_resolves_to_home_page(self):
found = resolve('/')
self.assertEqual(found.func, home)
|
Add home page resolve test to layout
|
Add home page resolve test to layout
|
Python
|
mit
|
jvanbrug/scout,jvanbrug/scout
|
---
+++
@@ -1,3 +1,11 @@
+from django.core.urlresolvers import resolve
from django.test import TestCase
-# Create your tests here.
+from layout.views import home
+
+
+class HomePageTest(TestCase):
+
+ def test_root_url_resolves_to_home_page(self):
+ found = resolve('/')
+ self.assertEqual(found.func, home)
|
6556acc2d1be648fdb362f1f1e5000f443642416
|
examples/mnist-autoencoder.py
|
examples/mnist-autoencoder.py
|
#!/usr/bin/env python
import cPickle
import gzip
import logging
import os
import sys
import tempfile
import urllib
import lmj.tnn
logging.basicConfig(
stream=sys.stdout,
format='%(levelname).1s %(asctime)s %(message)s',
level=logging.INFO)
URL = 'http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz'
DATASET = os.path.join(tempfile.gettempdir(), 'mnist.pkl.gz')
if not os.path.isfile(DATASET):
logging.info('downloading mnist digit dataset from %s' % URL)
urllib.urlretrieve(URL, DATASET)
logging.info('saved mnist digits to %s' % DATASET)
class Main(lmj.tnn.Main):
def get_network(self):
return lmj.tnn.Autoencoder
def get_datasets(self):
return [(x, ) for x, _ in cPickle.load(gzip.open(DATASET))]
path = os.path.join(tempfile.gettempdir(), 'mnist-autoencoder.pkl.gz')
Main().train().save(path)
print 'saved network to', path
|
#!/usr/bin/env python
import cPickle
import gzip
import logging
import os
import sys
import tempfile
import urllib
import lmj.tnn
logging.basicConfig(
stream=sys.stdout,
format='%(levelname).1s %(asctime)s %(message)s',
level=logging.INFO)
URL = 'http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz'
DATASET = os.path.join(tempfile.gettempdir(), 'mnist.pkl.gz')
if not os.path.isfile(DATASET):
logging.info('downloading mnist digit dataset from %s' % URL)
urllib.urlretrieve(URL, DATASET)
logging.info('saved mnist digits to %s' % DATASET)
class Main(lmj.tnn.Main):
def get_network(self):
return lmj.tnn.Autoencoder
def get_datasets(self):
return [x for x, _ in cPickle.load(gzip.open(DATASET))]
path = os.path.join(tempfile.gettempdir(), 'mnist-autoencoder.pkl.gz')
Main().train().save(path)
print 'saved network to', path
|
Return unlabeled data for the autoencoder as a straight array.
|
Return unlabeled data for the autoencoder as a straight array.
|
Python
|
mit
|
lmjohns3/theanets,chrinide/theanets,devdoer/theanets
|
---
+++
@@ -28,7 +28,7 @@
return lmj.tnn.Autoencoder
def get_datasets(self):
- return [(x, ) for x, _ in cPickle.load(gzip.open(DATASET))]
+ return [x for x, _ in cPickle.load(gzip.open(DATASET))]
path = os.path.join(tempfile.gettempdir(), 'mnist-autoencoder.pkl.gz')
Main().train().save(path)
|
9debed5d1d83bdf2098a7a3841ae4ff272e7ea8e
|
lib/__init__.py
|
lib/__init__.py
|
from client import WebHDFSClient
__version__ = '1.0'
|
from errors import WebHDFSError
from client import WebHDFSClient
from attrib import WebHDFSObject
__version__ = '1.0'
|
Make other API classes available from base module.
|
Make other API classes available from base module.
|
Python
|
mit
|
mk23/webhdfs,mk23/webhdfs
|
---
+++
@@ -1,3 +1,5 @@
+from errors import WebHDFSError
from client import WebHDFSClient
+from attrib import WebHDFSObject
__version__ = '1.0'
|
2cb2779bfe1ddfcd6651665276ed0a1d513c57de
|
fireplace/cards/wog/shaman.py
|
fireplace/cards/wog/shaman.py
|
from ..utils import *
##
# Minions
class OG_023:
"Primal Fusion"
play = Buff(TARGET, "OG_023t") * Count(FRIENDLY_MINIONS + TOTEM)
OG_023t = buff(+1, +1)
class OG_209:
"Hallazeal the Ascended"
events = Damage(source=SPELL + FRIENDLY).on(Heal(FRIENDLY_HERO, Damage.AMOUNT))
|
from ..utils import *
##
# Minions
class OG_023:
"Primal Fusion"
play = Buff(TARGET, "OG_023t") * Count(FRIENDLY_MINIONS + TOTEM)
OG_023t = buff(+1, +1)
class OG_026:
"Eternal Sentinel"
play = UnlockOverload(CONTROLLER)
class OG_209:
"Hallazeal the Ascended"
events = Damage(source=SPELL + FRIENDLY).on(Heal(FRIENDLY_HERO, Damage.AMOUNT))
##
# Spells
class OG_206:
"Stormcrack"
play = Hit(TARGET, 4)
##
# Weapons
class OG_031:
"Hammer of Twilight"
deathrattle = Summon(CONTROLLER, "OG_031a")
|
Implement Eternal Sentinel, Stormcrack and Hammer of Twilight
|
Implement Eternal Sentinel, Stormcrack and Hammer of Twilight
|
Python
|
agpl-3.0
|
NightKev/fireplace,beheh/fireplace,jleclanche/fireplace
|
---
+++
@@ -11,6 +11,27 @@
OG_023t = buff(+1, +1)
+class OG_026:
+ "Eternal Sentinel"
+ play = UnlockOverload(CONTROLLER)
+
+
class OG_209:
"Hallazeal the Ascended"
events = Damage(source=SPELL + FRIENDLY).on(Heal(FRIENDLY_HERO, Damage.AMOUNT))
+
+
+##
+# Spells
+
+class OG_206:
+ "Stormcrack"
+ play = Hit(TARGET, 4)
+
+
+##
+# Weapons
+
+class OG_031:
+ "Hammer of Twilight"
+ deathrattle = Summon(CONTROLLER, "OG_031a")
|
562b56d67d7d292d7c63ec8c3f453bae92a3b073
|
tests/test_wysiwyg_editor.py
|
tests/test_wysiwyg_editor.py
|
from . import TheInternetTestCase
from helium.api import click, Text, press, CONTROL, COMMAND, write
from sys import platform
class WYSIWYGEditorTest(TheInternetTestCase):
def get_page(self):
return "http://the-internet.herokuapp.com/tinymce"
def test_use_wysiwyg_editor(self):
self.assertTrue(Text("Your content goes here.").exists())
click("Your content goes here.")
if platform == 'darwin':
press(COMMAND + 'a')
else:
press(CONTROL + 'a')
write("Hello Helium!")
self.assertTrue(Text("Hello Helium!").exists())
|
from . import TheInternetTestCase
from helium.api import click, Text, write
class WYSIWYGEditorTest(TheInternetTestCase):
def get_page(self):
return "http://the-internet.herokuapp.com/tinymce"
def test_use_wysiwyg_editor(self):
self.assertTrue(Text("Your content goes here.").exists())
click("File")
click("New document")
write("Hello Helium!")
self.assertTrue(Text("Hello Helium!").exists())
|
Simplify the WYSIWYG editor test case.
|
Simplify the WYSIWYG editor test case.
|
Python
|
mit
|
bugfree-software/the-internet-solution-python
|
---
+++
@@ -1,16 +1,12 @@
from . import TheInternetTestCase
-from helium.api import click, Text, press, CONTROL, COMMAND, write
-from sys import platform
+from helium.api import click, Text, write
class WYSIWYGEditorTest(TheInternetTestCase):
def get_page(self):
return "http://the-internet.herokuapp.com/tinymce"
def test_use_wysiwyg_editor(self):
self.assertTrue(Text("Your content goes here.").exists())
- click("Your content goes here.")
- if platform == 'darwin':
- press(COMMAND + 'a')
- else:
- press(CONTROL + 'a')
+ click("File")
+ click("New document")
write("Hello Helium!")
self.assertTrue(Text("Hello Helium!").exists())
|
47b3d205931d6ee7fa8062b3e2f01d1ea07df11a
|
pathvalidate/_error.py
|
pathvalidate/_error.py
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import absolute_import
from __future__ import unicode_literals
class NullNameError(ValueError):
"""
Raised when a name is empty.
"""
class InvalidCharError(ValueError):
"""
Raised when includes invalid character(s) within a string.
"""
class InvalidCharWindowsError(InvalidCharError):
"""
Raised when includes Windows specific invalid character(s) within a string.
"""
class InvalidLengthError(ValueError):
"""
Raised when a string too long/short.
"""
class ReservedNameError(ValueError):
"""
Raised when a string is matched a reserved name.
"""
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import absolute_import
from __future__ import unicode_literals
class InvalidNameError(ValueError):
"""
Base class of invalid name error.
"""
class NullNameError(InvalidNameError):
"""
Raised when a name is empty.
"""
class InvalidCharError(InvalidNameError):
"""
Raised when includes invalid character(s) within a string.
"""
class InvalidCharWindowsError(InvalidCharError):
"""
Raised when includes Windows specific invalid character(s) within a string.
"""
class InvalidLengthError(InvalidNameError):
"""
Raised when a string too long/short.
"""
class ReservedNameError(InvalidNameError):
"""
Raised when a string is matched a reserved name.
"""
|
Add base class of invalid name error
|
Add base class of invalid name error
|
Python
|
mit
|
thombashi/pathvalidate
|
---
+++
@@ -8,13 +8,19 @@
from __future__ import unicode_literals
-class NullNameError(ValueError):
+class InvalidNameError(ValueError):
+ """
+ Base class of invalid name error.
+ """
+
+
+class NullNameError(InvalidNameError):
"""
Raised when a name is empty.
"""
-class InvalidCharError(ValueError):
+class InvalidCharError(InvalidNameError):
"""
Raised when includes invalid character(s) within a string.
"""
@@ -26,13 +32,13 @@
"""
-class InvalidLengthError(ValueError):
+class InvalidLengthError(InvalidNameError):
"""
Raised when a string too long/short.
"""
-class ReservedNameError(ValueError):
+class ReservedNameError(InvalidNameError):
"""
Raised when a string is matched a reserved name.
"""
|
46fb576c9dcf83c40b67c75dade5b43a4d122e7f
|
platformio/__init__.py
|
platformio/__init__.py
|
# Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
VERSION = (0, 7, "0.dev")
__version__ = ".".join([str(s) for s in VERSION])
__title__ = "platformio"
__description__ = ("A cross-platform code builder and library manager")
__url__ = "https://github.com/ivankravets/platformio"
__author__ = "Ivan Kravets"
__email__ = "me@ikravets.com"
__license__ = "MIT License"
__copyright__ = "Copyright (C) 2014 Ivan Kravets"
# __apiurl__ = "http://127.0.0.1:8080"
__apiurl__ = "http://api.platformio.ikravets.com"
__pkgmanifesturl__ = "http://platformio.ikravets.com/packages/manifest.json"
|
# Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
VERSION = (0, 7, "0.dev")
__version__ = ".".join([str(s) for s in VERSION])
__title__ = "platformio"
__description__ = ("A cross-platform code builder and library manager")
__url__ = "https://github.com/ivankravets/platformio"
__author__ = "Ivan Kravets"
__email__ = "me@ikravets.com"
__license__ = "MIT License"
__copyright__ = "Copyright (C) 2014 Ivan Kravets"
# __apiurl__ = "http://127.0.0.1:8080"
__apiurl__ = "http://api.platformio.ikravets.com"
__pkgmanifesturl__ = "http://dl.platformio.ikravets.com/packages/manifest.json"
|
Switch package manifest to dl.platformio
|
Switch package manifest to dl.platformio
|
Python
|
mit
|
aphelps/platformio,mcanthony/platformio,TimJay/platformio,aphelps/platformio,valeros/platformio,jrobeson/platformio,TimJay/platformio,bkudria/platformio,platformio/platformio-core,jrobeson/platformio,bkudria/platformio,ZachMassia/platformio,mplewis/platformio,mseroczynski/platformio,bkudria/platformio,aphelps/platformio,TimJay/platformio,bkudria/platformio,eiginn/platformio,atyenoria/platformio,dkuku/platformio,TimJay/platformio,TimJay/platformio,awong1900/platformio,platformio/platformio,aphelps/platformio,awong1900/platformio,jrobeson/platformio,platformio/platformio-core,jrobeson/platformio,awong1900/platformio
|
---
+++
@@ -16,4 +16,4 @@
# __apiurl__ = "http://127.0.0.1:8080"
__apiurl__ = "http://api.platformio.ikravets.com"
-__pkgmanifesturl__ = "http://platformio.ikravets.com/packages/manifest.json"
+__pkgmanifesturl__ = "http://dl.platformio.ikravets.com/packages/manifest.json"
|
752e6cef31ea124f00eced5699fb225501258148
|
reversible.py
|
reversible.py
|
#!/usr/bin/env python
""" Some tools for dealing with reversible numbers for problem 145 from Project Euler.
https://projecteuler.net/problem=145
"""
def is_odd(num):
""" Check if an integer is odd. """
if num % 2 != 0:
return True
else:
return False
def is_reversible(num):
""" Check if a number is reversible given the above definition. """
num_str = str(num)
rev_num = int("".join(reversed(num_str)))
total = num + rev_num
for digit in str(total):
if not is_odd(int(digit)):
return False
return True
if __name__ == "__main__":
# check some odd and even numbers
assert is_odd(1), "1 should be odd"
assert not is_odd(2), "2 should not be odd"
assert not is_odd(100), "100 should not be odd"
assert is_odd(10001), "10001 should be odd"
# check the example reversible numbers
assert is_reversible(36), "36 should be reversible"
assert is_reversible(63), "63 should be reversible"
assert is_reversible(409), "409 should be reversible"
assert is_reversible(904), "904 should be reversible"
print "all assertions passed"
|
#!/usr/bin/env python
""" Some tools for dealing with reversible numbers for problem 145 from Project Euler.
https://projecteuler.net/problem=145
"""
def is_odd(num):
""" Check if an integer is odd. """
if num % 2 != 0:
return True
else:
return False
def is_reversible(num):
""" Check if a number is reversible given the above definition. """
num_str = str(num)
rev_str = "".join(reversed(num_str))
if int(rev_str[0]) == 0:
return False
total = num + int(rev_str)
for digit in str(total):
if not is_odd(int(digit)):
return False
return True
if __name__ == "__main__":
# check some odd and even numbers
assert is_odd(1), "1 should be odd"
assert not is_odd(2), "2 should not be odd"
assert not is_odd(100), "100 should not be odd"
assert is_odd(10001), "10001 should be odd"
# check the example reversible numbers
assert is_reversible(36), "36 should be reversible"
assert is_reversible(63), "63 should be reversible"
assert is_reversible(409), "409 should be reversible"
assert is_reversible(904), "904 should be reversible"
assert not is_reversible(10), "10 should not be reversible. (leading zero.)"
print "all assertions passed"
|
Add check for leading zeroes.
|
Add check for leading zeroes.
|
Python
|
mit
|
smillet15/project-euler
|
---
+++
@@ -16,9 +16,12 @@
def is_reversible(num):
""" Check if a number is reversible given the above definition. """
num_str = str(num)
- rev_num = int("".join(reversed(num_str)))
+ rev_str = "".join(reversed(num_str))
- total = num + rev_num
+ if int(rev_str[0]) == 0:
+ return False
+
+ total = num + int(rev_str)
for digit in str(total):
if not is_odd(int(digit)):
@@ -40,4 +43,6 @@
assert is_reversible(409), "409 should be reversible"
assert is_reversible(904), "904 should be reversible"
+ assert not is_reversible(10), "10 should not be reversible. (leading zero.)"
+
print "all assertions passed"
|
3ffc101a1a8b1ec17e5f2e509a1e5182a1f6f4b9
|
fzn/utils.py
|
fzn/utils.py
|
import subprocess as sp
import signal
import threading
import os
SIGTERM_TIMEOUT = 1.0
class Command(object):
def __init__(self, cmd, memlimit=None):
self.cmd = cmd
self.memlimit = memlimit
self.process = None
self.stdout = None
self.stderr = None
self.exitcode = None
self.timed_out = False
def run(self, timeout=None):
def target():
self.process = sp.Popen(self.cmd,
stdout=sp.PIPE, stderr=sp.PIPE,
shell=True, preexec_fn=os.setpgrp)
self.stdout, self.stderr = self.process.communicate()
self.exitcode = self.process.returncode
thread = threading.Thread(target=target)
thread.start()
thread.join(float(timeout))
if thread.is_alive():
self.timed_out = True
# Send the TERM signal to all the process groups
os.killpg(self.process.pid, signal.SIGTERM)
thread.join(SIGTERM_TIMEOUT)
if thread.is_alive():
# Send the KILL signal if the process hasn't exited by now.
os.killpg(self.process.pid, signal.SIGKILL)
self.process.kill()
thread.join()
|
import subprocess as sp
import signal
import threading
import os
SIGTERM_TIMEOUT = 1.0
class Command(object):
def __init__(self, cmd, memlimit=None):
self.cmd = cmd
self.memlimit = memlimit
self.process = None
self.stdout = None
self.stderr = None
self.exitcode = None
self.timed_out = False
def run(self, timeout=None):
def target():
cmd = self.cmd
if self.memlimit:
cmd = "ulimit -v %d; %s" % (self.memlimit, cmd)
self.process = sp.Popen(cmd,
stdout=sp.PIPE, stderr=sp.PIPE,
shell=True, preexec_fn=os.setpgrp)
self.stdout, self.stderr = self.process.communicate()
self.exitcode = self.process.returncode
thread = threading.Thread(target=target)
thread.start()
thread.join(float(timeout))
if thread.is_alive():
self.timed_out = True
# Send the TERM signal to all the process groups
os.killpg(self.process.pid, signal.SIGTERM)
thread.join(SIGTERM_TIMEOUT)
if thread.is_alive():
# Send the KILL signal if the process hasn't exited by now.
os.killpg(self.process.pid, signal.SIGKILL)
self.process.kill()
thread.join()
|
Enable Command to support memory limiting.
|
Enable Command to support memory limiting.
|
Python
|
lgpl-2.1
|
eomahony/Numberjack,eomahony/Numberjack,eomahony/Numberjack,JElchison/Numberjack,JElchison/Numberjack,JElchison/Numberjack,JElchison/Numberjack,JElchison/Numberjack,eomahony/Numberjack,eomahony/Numberjack
|
---
+++
@@ -19,7 +19,10 @@
def run(self, timeout=None):
def target():
- self.process = sp.Popen(self.cmd,
+ cmd = self.cmd
+ if self.memlimit:
+ cmd = "ulimit -v %d; %s" % (self.memlimit, cmd)
+ self.process = sp.Popen(cmd,
stdout=sp.PIPE, stderr=sp.PIPE,
shell=True, preexec_fn=os.setpgrp)
self.stdout, self.stderr = self.process.communicate()
|
a89a61620306d3cc38062cf69c56db64aadf0a8d
|
pokedex/db/__init__.py
|
pokedex/db/__init__.py
|
import pkg_resources
from sqlalchemy import MetaData, Table, create_engine, orm
from .tables import metadata
def connect(uri=None, **kwargs):
"""Connects to the requested URI. Returns a session object.
With the URI omitted, attempts to connect to a default SQLite database
contained within the package directory.
Calling this function also binds the metadata object to the created engine.
"""
# Default to a URI within the package, which was hopefully created at some point
if not uri:
sqlite_path = pkg_resources.resource_filename('pokedex',
'data/pokedex.sqlite')
uri = 'sqlite:///' + sqlite_path
### Do some fixery for MySQL
if uri[0:5] == 'mysql':
# MySQL uses latin1 for connections by default even if the server is
# otherwise oozing with utf8; charset fixes this
if 'charset' not in uri:
uri += '?charset=utf8'
# Tables should be InnoDB, in the event that we're creating them, and
# use UTF-8 goddammit!
for table in metadata.tables.values():
table.kwargs['mysql_engine'] = 'InnoDB'
table.kwargs['mysql_charset'] = 'utf8'
### Connect
engine = create_engine(uri)
conn = engine.connect()
metadata.bind = engine
session_args = dict(autoflush=True, autocommit=False, bind=engine)
session_args.update(kwargs)
sm = orm.sessionmaker(**session_args)
session = orm.scoped_session(sm)
return session
|
import pkg_resources
from sqlalchemy import MetaData, Table, create_engine, orm
from .tables import metadata
def connect(uri=None, session_args={}, engine_args={}):
"""Connects to the requested URI. Returns a session object.
With the URI omitted, attempts to connect to a default SQLite database
contained within the package directory.
Calling this function also binds the metadata object to the created engine.
"""
# Default to a URI within the package, which was hopefully created at some point
if not uri:
sqlite_path = pkg_resources.resource_filename('pokedex',
'data/pokedex.sqlite')
uri = 'sqlite:///' + sqlite_path
### Do some fixery for MySQL
if uri[0:5] == 'mysql':
# MySQL uses latin1 for connections by default even if the server is
# otherwise oozing with utf8; charset fixes this
if 'charset' not in uri:
uri += '?charset=utf8'
# Tables should be InnoDB, in the event that we're creating them, and
# use UTF-8 goddammit!
for table in metadata.tables.values():
table.kwargs['mysql_engine'] = 'InnoDB'
table.kwargs['mysql_charset'] = 'utf8'
### Connect
engine = create_engine(uri, **engine_args)
conn = engine.connect()
metadata.bind = engine
all_session_args = dict(autoflush=True, autocommit=False, bind=engine)
all_session_args.update(session_args)
sm = orm.sessionmaker(**all_session_args)
session = orm.scoped_session(sm)
return session
|
Allow passing engine arguments to connect().
|
Allow passing engine arguments to connect().
|
Python
|
mit
|
mschex1/pokedex,RK905/pokedex-1,xfix/pokedex,veekun/pokedex,DaMouse404/pokedex,veekun/pokedex
|
---
+++
@@ -4,7 +4,7 @@
from .tables import metadata
-def connect(uri=None, **kwargs):
+def connect(uri=None, session_args={}, engine_args={}):
"""Connects to the requested URI. Returns a session object.
With the URI omitted, attempts to connect to a default SQLite database
@@ -33,13 +33,13 @@
table.kwargs['mysql_charset'] = 'utf8'
### Connect
- engine = create_engine(uri)
+ engine = create_engine(uri, **engine_args)
conn = engine.connect()
metadata.bind = engine
- session_args = dict(autoflush=True, autocommit=False, bind=engine)
- session_args.update(kwargs)
- sm = orm.sessionmaker(**session_args)
+ all_session_args = dict(autoflush=True, autocommit=False, bind=engine)
+ all_session_args.update(session_args)
+ sm = orm.sessionmaker(**all_session_args)
session = orm.scoped_session(sm)
return session
|
9d5abdaefa483574cdd81da8d8d4e63ef68f5ab8
|
crossfolium/__init__.py
|
crossfolium/__init__.py
|
# -*- coding: utf-8 -*-
"""
Crossfolium
-----------
"""
import crossfolium.marker_function as marker_function
from crossfolium.crossfolium import (
Crossfilter,
PieFilter,
RowBarFilter,
BarFilter,
TableFilter,
CountFilter,
ResetFilter,
GeoChoroplethFilter,
)
from .map import (
FeatureGroupFilter,
HeatmapFilter,
)
__version__ = "0.0.0"
__all__ = [
'__version__',
'marker_function',
'Crossfilter',
'PieFilter',
'RowBarFilter',
'BarFilter',
'FeatureGroupFilter',
'TableFilter',
'CountFilter',
'ResetFilter',
'HeatmapFilter',
'GeoChoroplethFilter',
]
|
# -*- coding: utf-8 -*-
"""
Crossfolium
-----------
"""
from __future__ import absolute_import
from crossfolium import marker_function
from crossfolium.crossfolium import (
Crossfilter,
PieFilter,
RowBarFilter,
BarFilter,
TableFilter,
CountFilter,
ResetFilter,
GeoChoroplethFilter,
)
from crossfolium.map import (
FeatureGroupFilter,
HeatmapFilter,
)
__version__ = "0.0.0"
__all__ = [
'__version__',
'marker_function',
'Crossfilter',
'PieFilter',
'RowBarFilter',
'BarFilter',
'FeatureGroupFilter',
'TableFilter',
'CountFilter',
'ResetFilter',
'HeatmapFilter',
'GeoChoroplethFilter',
]
|
Handle absolute import for py27
|
Handle absolute import for py27
|
Python
|
mit
|
BibMartin/crossfolium,BibMartin/crossfolium
|
---
+++
@@ -4,8 +4,9 @@
-----------
"""
+from __future__ import absolute_import
-import crossfolium.marker_function as marker_function
+from crossfolium import marker_function
from crossfolium.crossfolium import (
Crossfilter,
@@ -18,7 +19,7 @@
GeoChoroplethFilter,
)
-from .map import (
+from crossfolium.map import (
FeatureGroupFilter,
HeatmapFilter,
)
|
e325c603e972e6e7cd50eefae23b94594b6c9751
|
Tables/build_db.py
|
Tables/build_db.py
|
import sqlite3
import os
import pandas as pd
TABLES = [['Natures', 'nature'],
['Experience'],
]
PATH = os.path.dirname(__file__)+"/"
CONNECTION = sqlite3.connect(PATH + 'serpyrior.db')
# insert a little jimmy drop tables here
for table in TABLES:
table_name = table[0]
print(table_name)
try:
table_index = table[1]
write_index = False
except IndexError:
table_index = None
write_index = True
df = pd.read_csv(PATH + table_name + '.csv')
df.to_sql(table_name, CONNECTION, index=write_index, index_label=table_index)
CONNECTION.commit()
CONNECTION.close()
# cur = conn.cursor()
# cur.execute("CREATE TABLE IF NOT EXISTS natures()")
# filename.encode('utf-8')
# with open(filename) as f:
# reader = csv.reader(f)
# for field in reader:
# cur.execute("INSERT INTO natures VALUES (?,?,?,?,?,?,?);", field)
#
# conn.commit()
#
# df = pd.read_sql_query("SELECT * FROM natures", conn, index_col='nature')
#
# print(df.head(25))
# conn.close()
|
import sqlite3
import os
import pandas as pd
TABLES = [['Natures', 'nature'],
['Experience'],
]
PATH = os.path.dirname(__file__)+"/"
try: # Little Bobby Tables
os.remove(PATH + 'serpyrior.db')
except FileNotFoundError:
pass
CONNECTION = sqlite3.connect(PATH + 'serpyrior.db')
for table in TABLES:
table_name = table[0]
print(table_name)
try:
table_index = table[1]
write_index = False
except IndexError:
table_index = None
write_index = True
df = pd.read_csv(PATH + table_name + '.csv')
df.to_sql(table_name, CONNECTION, index=write_index, index_label=table_index)
CONNECTION.commit()
CONNECTION.close()
|
Remove db if it already exists
|
Remove db if it already exists
|
Python
|
mit
|
Ditoeight/Pyranitar
|
---
+++
@@ -7,10 +7,14 @@
]
PATH = os.path.dirname(__file__)+"/"
+
+
+try: # Little Bobby Tables
+ os.remove(PATH + 'serpyrior.db')
+except FileNotFoundError:
+ pass
+
CONNECTION = sqlite3.connect(PATH + 'serpyrior.db')
-
-# insert a little jimmy drop tables here
-
for table in TABLES:
table_name = table[0]
@@ -27,18 +31,3 @@
CONNECTION.commit()
CONNECTION.close()
-
-# cur = conn.cursor()
-# cur.execute("CREATE TABLE IF NOT EXISTS natures()")
-# filename.encode('utf-8')
-# with open(filename) as f:
-# reader = csv.reader(f)
-# for field in reader:
-# cur.execute("INSERT INTO natures VALUES (?,?,?,?,?,?,?);", field)
-#
-# conn.commit()
-#
-# df = pd.read_sql_query("SELECT * FROM natures", conn, index_col='nature')
-#
-# print(df.head(25))
-# conn.close()
|
55a8921f3634fe842eddf202d1237f53ca6d003b
|
kobo/settings/dev.py
|
kobo/settings/dev.py
|
# coding: utf-8
from .base import *
LOGGING['handlers']['console'] = {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'verbose'
}
INSTALLED_APPS = INSTALLED_APPS + ('debug_toolbar',)
MIDDLEWARE.append('debug_toolbar.middleware.DebugToolbarMiddleware')
# Comment out the line below to use `Django Debug Toolbar`
# INTERNAL_IPS = ['172.28.0.4'] # Change IP to KPI container's IP
ENV = 'dev'
# Expiration time in sec. after which paired data xml file must be regenerated
# Does not need to match KoBoCAT setting
PAIRED_DATA_EXPIRATION = 5
# Minimum size (in bytes) of files to allow fast calculation of hashes
# Should match KoBoCAT setting
HASH_BIG_FILE_SIZE_THRESHOLD = 200 * 1024 # 200 kB
# Chunk size in bytes to read per iteration when hash of a file is calculated
# Should match KoBoCAT setting
HASH_BIG_FILE_CHUNK = 5 * 1024 # 5 kB
|
# coding: utf-8
from .base import *
LOGGING['handlers']['console'] = {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'verbose'
}
INSTALLED_APPS = INSTALLED_APPS + ('debug_toolbar',)
MIDDLEWARE.append('debug_toolbar.middleware.DebugToolbarMiddleware')
def show_toolbar(request):
return env.bool("DEBUG_TOOLBAR", False)
DEBUG_TOOLBAR_CONFIG = {"SHOW_TOOLBAR_CALLBACK": show_toolbar}
ENV = 'dev'
# Expiration time in sec. after which paired data xml file must be regenerated
# Does not need to match KoBoCAT setting
PAIRED_DATA_EXPIRATION = 5
# Minimum size (in bytes) of files to allow fast calculation of hashes
# Should match KoBoCAT setting
HASH_BIG_FILE_SIZE_THRESHOLD = 200 * 1024 # 200 kB
# Chunk size in bytes to read per iteration when hash of a file is calculated
# Should match KoBoCAT setting
HASH_BIG_FILE_CHUNK = 5 * 1024 # 5 kB
|
Enable django debug toolbar via env var
|
Enable django debug toolbar via env var
|
Python
|
agpl-3.0
|
kobotoolbox/kpi,kobotoolbox/kpi,kobotoolbox/kpi,kobotoolbox/kpi,kobotoolbox/kpi
|
---
+++
@@ -10,8 +10,9 @@
INSTALLED_APPS = INSTALLED_APPS + ('debug_toolbar',)
MIDDLEWARE.append('debug_toolbar.middleware.DebugToolbarMiddleware')
-# Comment out the line below to use `Django Debug Toolbar`
-# INTERNAL_IPS = ['172.28.0.4'] # Change IP to KPI container's IP
+def show_toolbar(request):
+ return env.bool("DEBUG_TOOLBAR", False)
+DEBUG_TOOLBAR_CONFIG = {"SHOW_TOOLBAR_CALLBACK": show_toolbar}
ENV = 'dev'
|
80fb36f2e8754a07ae2f6f4b454862a8b1852763
|
dadd/worker/handlers.py
|
dadd/worker/handlers.py
|
import json
import requests
from flask import request, jsonify
from dadd.worker import app
from dadd.worker.proc import ChildProcess
@app.route('/run/', methods=['POST'])
def run_process():
proc = ChildProcess(request.json)
proc.run()
return jsonify(proc.info())
@app.route('/register/', methods=['POST'])
def register_with_master():
register(app)
return jsonify({'message': 'ok'})
def register(host, port):
sess = requests.Session()
if 'USERNAME' in app.config and 'PASSWORD' in app.config:
sess.auth = (app.config['USERNAME'], app.config['PASSWORD'])
sess.headers = {'content-type': 'application/json'}
try:
url = app.config['MASTER_URL'] + '/api/hosts/'
resp = sess.post(url, data=json.dumps({
'host': host, 'port': port
}))
if not resp.ok:
app.logger.warning('Error registering with master: %s' %
app.config['MASTER_URL'])
except Exception as e:
app.logger.warning('Connection Error: %s' % e)
|
import json
import socket
import requests
from flask import request, jsonify
from dadd.worker import app
from dadd.worker.proc import ChildProcess
@app.route('/run/', methods=['POST'])
def run_process():
proc = ChildProcess(request.json)
proc.run()
return jsonify(proc.info())
@app.route('/register/', methods=['POST'])
def register_with_master():
register(app)
return jsonify({'message': 'ok'})
def register(host, port):
sess = requests.Session()
if 'USERNAME' in app.config and 'PASSWORD' in app.config:
sess.auth = (app.config['USERNAME'], app.config['PASSWORD'])
sess.headers = {'content-type': 'application/json'}
try:
url = app.config['MASTER_URL'] + '/api/hosts/'
resp = sess.post(url, data=json.dumps({
'host': socket.getfqdn(), 'port': port
}))
if not resp.ok:
app.logger.warning('Error registering with master: %s' %
app.config['MASTER_URL'])
except Exception as e:
app.logger.warning('Connection Error: %s' % e)
|
Use the fqdn when registering with the master.
|
Use the fqdn when registering with the master.
Not all deployment systems will provide a specific hostname via an env
var so we'll avoid relying on it by asking the machine.
|
Python
|
bsd-3-clause
|
ionrock/dadd,ionrock/dadd,ionrock/dadd,ionrock/dadd
|
---
+++
@@ -1,4 +1,5 @@
import json
+import socket
import requests
@@ -31,7 +32,7 @@
try:
url = app.config['MASTER_URL'] + '/api/hosts/'
resp = sess.post(url, data=json.dumps({
- 'host': host, 'port': port
+ 'host': socket.getfqdn(), 'port': port
}))
if not resp.ok:
app.logger.warning('Error registering with master: %s' %
|
2a76368054599006c8f7833cda1ec20f85bfcb28
|
hash_table.py
|
hash_table.py
|
#!/usr/bin/env python
'''Implementation of a simple hash table.
The table has `hash`, `get` and `set` methods.
The hash function uses a very basic hash algorithm to insert the value
into the table.
'''
class HashItem(object):
def __init__(self):
pass
class Hash(object):
def __init__(self, size=1024):
self.table = []
for i in range(size):
self.table.append(list())
def hash(self):
pass
def get(self):
pass
def set(self):
pass
|
#!/usr/bin/env python
'''Implementation of a simple hash table.
The table has `hash`, `get` and `set` methods.
The hash function uses a very basic hash algorithm to insert the value
into the table.
'''
class HashItem(object):
def __init__(self):
pass
class Hash(object):
def __init__(self, size=1024):
self.table = []
for i in range(size):
self.table.append(list())
def hash(self, key):
hash_value = 0
for i in key:
hash_value += ord(key)
return hash_value % len(self.table)
def get(self):
pass
def set(self):
pass
|
Build out hash function of hash table class
|
Build out hash function of hash table class
|
Python
|
mit
|
jwarren116/data-structures-deux
|
---
+++
@@ -18,8 +18,11 @@
for i in range(size):
self.table.append(list())
- def hash(self):
- pass
+ def hash(self, key):
+ hash_value = 0
+ for i in key:
+ hash_value += ord(key)
+ return hash_value % len(self.table)
def get(self):
pass
|
b0824da73317bae42cb39fad5cfc95574548594a
|
accounts/models.py
|
accounts/models.py
|
# coding: utf-8
from __future__ import unicode_literals
from django.contrib.auth.models import AbstractUser, UserManager
from django.db.models import BooleanField
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ungettext_lazy
from mptt.fields import TreeForeignKey
from mptt.managers import TreeManager
from mptt.models import MPTTModel
from cache_tools import cached_ugettext_lazy as _
class HierarchicUserManager(TreeManager, UserManager):
pass
@python_2_unicode_compatible
class HierarchicUser(MPTTModel, AbstractUser):
mentor = TreeForeignKey(
'self', null=True, blank=True, related_name='disciples',
verbose_name=_('mentor'),
limit_choices_to={'willing_to_be_mentor__exact': True})
willing_to_be_mentor = BooleanField(
_('Veut être mentor'), default=False)
objects = HierarchicUserManager()
class MPTTMeta(object):
parent_attr = 'mentor'
order_insertion_by = ('username',)
class Meta(object):
verbose_name = ungettext_lazy('utilisateur', 'utilisateurs', 1)
verbose_name_plural = ungettext_lazy('utilisateur', 'utilisateurs', 2)
def __str__(self):
return self.get_full_name() or self.get_username()
|
# coding: utf-8
from __future__ import unicode_literals
from django.contrib.auth.models import AbstractUser, UserManager
from django.db.models import BooleanField
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ungettext_lazy
from mptt.fields import TreeForeignKey
from mptt.managers import TreeManager
from mptt.models import MPTTModel
from cache_tools import cached_ugettext_lazy as _
class HierarchicUserManager(TreeManager, UserManager):
pass
@python_2_unicode_compatible
class HierarchicUser(MPTTModel, AbstractUser):
mentor = TreeForeignKey(
'self', null=True, blank=True, related_name='disciples',
verbose_name=_('mentor'),
limit_choices_to={'willing_to_be_mentor__exact': True})
willing_to_be_mentor = BooleanField(
_('Veut être mentor'), default=False)
objects = HierarchicUserManager()
class MPTTMeta(object):
parent_attr = 'mentor'
order_insertion_by = ('last_name', 'first_name', 'username')
class Meta(object):
verbose_name = ungettext_lazy('utilisateur', 'utilisateurs', 1)
verbose_name_plural = ungettext_lazy('utilisateur', 'utilisateurs', 2)
def __str__(self):
return self.get_full_name() or self.get_username()
|
Change l'ordre d'insertion des utilisateurs.
|
Change l'ordre d'insertion des utilisateurs.
|
Python
|
bsd-3-clause
|
dezede/dezede,dezede/dezede,dezede/dezede,dezede/dezede
|
---
+++
@@ -28,7 +28,7 @@
class MPTTMeta(object):
parent_attr = 'mentor'
- order_insertion_by = ('username',)
+ order_insertion_by = ('last_name', 'first_name', 'username')
class Meta(object):
verbose_name = ungettext_lazy('utilisateur', 'utilisateurs', 1)
|
c568c4b51f7a20524fb8c5a6184b8fdacb25d613
|
src/lib/ghostlines/windows/account_window.py
|
src/lib/ghostlines/windows/account_window.py
|
from ghostlines.storage.app_storage import AppStorage
from ghostlines.windows.account_details_window import AccountDetailsWindow
from ghostlines.windows.sign_in_window import SignInWindow
class AccountWindow(object):
def __init__(self, sign_in=SignInWindow, account_details=AccountDetailsWindow):
if self.is_logged_in:
self.window = account_details(logout_window=sign_in)
else:
self.window = sign_in(success_window=account_details)
@property
def is_logged_in(self):
token = AppStorage("pm.ghostlines.ghostlines.access_token").retrieve()
# TODO: Retrieve returns NSNull if set to None. Empty string used to clear password for now.
return token != '' and token is not None
def open(self):
self.window.open()
AccountWindow().open()
|
from ghostlines.storage.app_storage import AppStorage
from ghostlines.windows.account_details_window import AccountDetailsWindow
from ghostlines.windows.sign_in_window import SignInWindow
class AccountWindow(object):
def __init__(self, sign_in=SignInWindow, account_details=AccountDetailsWindow):
if self.is_logged_in:
self.window = account_details(logout_window=sign_in)
else:
self.window = sign_in(success_window=account_details)
@property
def is_logged_in(self):
token = AppStorage("pm.ghostlines.ghostlines.access_token").retrieve()
# TODO: Retrieve returns NSNull if set to None. Empty string used to clear password for now.
return token != '' and token is not None
def open(self):
self.window.open()
|
Remove call to open account window when loading module
|
Remove call to open account window when loading module
|
Python
|
mit
|
ghostlines/ghostlines-robofont,ghostlines/ghostlines-robofont,ghostlines/ghostlines-robofont,ghostlines/ghostlines-robofont
|
---
+++
@@ -18,6 +18,3 @@
def open(self):
self.window.open()
-
-
-AccountWindow().open()
|
9ffc56e947dea40cd49c76beada2ec469a01f8f8
|
__init__.py
|
__init__.py
|
import base64
import json
from os import path
import sys
sys.path.insert(0, path.dirname(path.dirname(path.abspath(__file__))))
api_file = 'my_api.json'
_api_file = '{}\{}'.format(path.dirname(path.abspath(__file__)), api_file)
with open(_api_file) as fin:
cw_api_settings = json.load(fin)
API_URL = cw_api_settings['API_URL']
_cid = cw_api_settings['COMPANYID']
_pubk = cw_api_settings['PUBLICKEY']
_privk = cw_api_settings['PRIVATEKEY']
basic_auth = base64.b64encode("{}+{}:{}".format(_cid, _pubk, _privk).encode('utf-8'))
basic_auth = {'Authorization': 'Basic {}'.format(str(basic_auth, 'utf-8'))}
|
import base64
import json
from os import path
import sys
sys.path.insert(0, path.dirname(path.dirname(path.abspath(__file__))))
api_file = 'my_api.json'
_api_file = path.join(path.dirname(path.abspath(__file__)), api_file)
with open(_api_file) as fin:
cw_api_settings = json.load(fin)
API_URL = cw_api_settings['API_URL']
_cid = cw_api_settings['COMPANYID']
_pubk = cw_api_settings['PUBLICKEY']
_privk = cw_api_settings['PRIVATEKEY']
basic_auth = base64.b64encode("{}+{}:{}".format(_cid, _pubk, _privk).encode('utf-8'))
basic_auth = {'Authorization': 'Basic {}'.format(str(basic_auth, 'utf-8'))}
|
Make api file path OS safe
|
Make api file path OS safe
|
Python
|
mit
|
joshuamsmith/ConnectPyse
|
---
+++
@@ -6,7 +6,7 @@
sys.path.insert(0, path.dirname(path.dirname(path.abspath(__file__))))
api_file = 'my_api.json'
-_api_file = '{}\{}'.format(path.dirname(path.abspath(__file__)), api_file)
+_api_file = path.join(path.dirname(path.abspath(__file__)), api_file)
with open(_api_file) as fin:
cw_api_settings = json.load(fin)
|
f99847f363eed36713f657a4cb15a103ffcc6623
|
web/server.py
|
web/server.py
|
import http.client
import os
from flask import Flask
from pymongo import MongoClient
MONGO_URL = os.environ.get('MONGO_URL', 'mongodb://mongo:27017/')
MONGO_DATABASE = os.environ.get('MONGO_DATABASE', 'whistleblower')
DATABASE = MongoClient(MONGO_URL)[MONGO_DATABASE]
app = Flask(__name__)
@app.route('/')
def hello_world():
return 'Hello, World!'
@app.route('/facebook_webhook', methods=['POST'])
def facebook_webhook():
DATABASE.facebook_webhook.insert(request.form)
return ('', http.client.NO_CONTENT)
|
import http.client
import os
from flask import Flask
from pymongo import MongoClient
MONGO_URL = os.environ.get('MONGO_URL', 'mongodb://mongo:27017/')
MONGO_DATABASE = os.environ.get('MONGO_DATABASE', 'whistleblower')
DATABASE = MongoClient(MONGO_URL)[MONGO_DATABASE]
app = Flask(__name__)
@app.route('/')
def hello_world():
return 'Hello, World!'
# @app.route('/facebook_webhook', methods=['POST'])
@app.route('/facebook_webhook')
def facebook_webhook():
DATABASE.facebook_webhook.insert(request.form)
return ('', http.client.NO_CONTENT)
|
Save every request coming in the facebook webroot endpoint
|
Save every request coming in the facebook webroot endpoint
|
Python
|
unlicense
|
datasciencebr/whistleblower
|
---
+++
@@ -15,7 +15,8 @@
return 'Hello, World!'
-@app.route('/facebook_webhook', methods=['POST'])
+# @app.route('/facebook_webhook', methods=['POST'])
+@app.route('/facebook_webhook')
def facebook_webhook():
DATABASE.facebook_webhook.insert(request.form)
return ('', http.client.NO_CONTENT)
|
483cf7f91a89e040184bd71a0a1c59c0e0926e34
|
elasticmapping/types.py
|
elasticmapping/types.py
|
# ElasticMapping
# File: types.py
# Desc: base Elasticsearch types
class CallableDict(dict):
BASE = None
OVERRIDES = None
def __call__(self, overrides):
new_dict = CallableDict(self)
new_dict.OVERRIDES = overrides
new_dict.BASE = self
return new_dict
BASE_TYPE = {
'store': False,
'doc_values': False
}
STRING = CallableDict({
'type': 'string',
'index': 'analyzed'
})
FLOAT = CallableDict({
'type': 'float'
})
DOUBLE = CallableDict({
'type': 'double'
})
INTEGER = CallableDict({
'type': 'integer'
})
LONG = CallableDict({
'type': 'long'
})
SHORT = CallableDict({
'type': 'short'
})
BYTE = CallableDict({
'type': 'byte'
})
BOOLEAN = CallableDict({
'type': 'boolean'
})
DATE = CallableDict({
'type': 'date',
'format': 'date'
})
DATETIME = CallableDict({
'type': 'date',
'format': 'date_hour_minute_second_fraction'
})
TYPES = {
name: type
for name, type in locals().items()
if isinstance(type, CallableDict)
}
|
# ElasticMapping
# File: types.py
# Desc: base Elasticsearch types
class CallableDict(dict):
BASE = None
OVERRIDES = None
def __call__(self, overrides):
new_dict = CallableDict(self)
new_dict.OVERRIDES = overrides
new_dict.BASE = self
return new_dict
BASE_TYPE = {
'store': False,
'doc_values': False
}
STRING = CallableDict({
'type': 'string',
'index': 'analyzed'
})
FLOAT = CallableDict({
'type': 'float'
})
DOUBLE = CallableDict({
'type': 'double'
})
INTEGER = CallableDict({
'type': 'integer'
})
LONG = CallableDict({
'type': 'long'
})
SHORT = CallableDict({
'type': 'short'
})
BYTE = CallableDict({
'type': 'byte'
})
BOOLEAN = CallableDict({
'type': 'boolean'
})
DATE = CallableDict({
'type': 'date',
'format': 'date'
})
TIME = CallableDict({
'type': 'date',
'format': 'time'
})
DATETIME = CallableDict({
'type': 'date',
'format': 'date_optional_time'
})
TYPES = {
name: type
for name, type in locals().items()
if isinstance(type, CallableDict)
}
|
Switch default to actual ES default (date_optional_time) and add TIME type
|
Switch default to actual ES default (date_optional_time) and add TIME type
|
Python
|
mit
|
Fizzadar/ElasticMapping,Fizzadar/ElasticMapping
|
---
+++
@@ -57,9 +57,14 @@
'format': 'date'
})
+TIME = CallableDict({
+ 'type': 'date',
+ 'format': 'time'
+})
+
DATETIME = CallableDict({
'type': 'date',
- 'format': 'date_hour_minute_second_fraction'
+ 'format': 'date_optional_time'
})
|
b3bae8e48618e487ce9c8a90a555d5c6d6664872
|
app/management/commands/cleanapptables.py
|
app/management/commands/cleanapptables.py
|
from django.core.management.base import BaseCommand, CommandError
from app.models import Author, Location, AutoComment, Comment, Idea, Vote
class Command(BaseCommand):
def handle(self, *args, **options):
self.stdout.write('Starting to clean app tables...')
try:
Idea.objects.all().delete()
self.stdout.write('Ideas deleted')
Comment.objects.all().delete()
self.stdout.write('Comments deleted')
Vote.objects.all().delete()
self.stdout.write('Votes deleted')
Location.objects.all().delete()
self.stdout.write('Locations deleted')
Author.objects.all().delete()
self.stdout.write('Authors deleted')
AutoComment.objects.all().delete()
self.stdout.write('Automatic Comments deleted')
except Exception as e:
raise CommandError('The cleaning procedure couldn\'t finished. Error {}'.format(e))
self.stdout.write('The procedure has finished successfully...')
|
from django.core.management.base import BaseCommand, CommandError
from app.models import Author, Location, AutoComment, Comment, Idea, Vote, SocialNetworkAppUser
class Command(BaseCommand):
def handle(self, *args, **options):
self.stdout.write('Starting to clean app tables...')
try:
Idea.objects.all().delete()
self.stdout.write('Ideas were deleted')
Comment.objects.all().delete()
self.stdout.write('Comments were deleted')
Vote.objects.all().delete()
self.stdout.write('Votes were deleted')
Location.objects.all().delete()
self.stdout.write('Locations were deleted')
Author.objects.all().delete()
self.stdout.write('Authors were deleted')
AutoComment.objects.all().delete()
self.stdout.write('Automatic Comments were deleted')
SocialNetworkAppUser.objects.all().delete()
self.stdout.write('App users were deleted')
except Exception as e:
raise CommandError('The cleaning procedure couldn\'t finished. Error {}'.format(e))
self.stdout.write('The procedure has finished successfully...')
|
Add the deletion of the app users
|
Add the deletion of the app users
|
Python
|
mit
|
joausaga/social-ideation,joausaga/social-ideation,rebearteta/social-ideation,rebearteta/social-ideation,rebearteta/social-ideation,joausaga/social-ideation,joausaga/social-ideation,rebearteta/social-ideation
|
---
+++
@@ -1,5 +1,5 @@
from django.core.management.base import BaseCommand, CommandError
-from app.models import Author, Location, AutoComment, Comment, Idea, Vote
+from app.models import Author, Location, AutoComment, Comment, Idea, Vote, SocialNetworkAppUser
class Command(BaseCommand):
@@ -7,17 +7,19 @@
self.stdout.write('Starting to clean app tables...')
try:
Idea.objects.all().delete()
- self.stdout.write('Ideas deleted')
+ self.stdout.write('Ideas were deleted')
Comment.objects.all().delete()
- self.stdout.write('Comments deleted')
+ self.stdout.write('Comments were deleted')
Vote.objects.all().delete()
- self.stdout.write('Votes deleted')
+ self.stdout.write('Votes were deleted')
Location.objects.all().delete()
- self.stdout.write('Locations deleted')
+ self.stdout.write('Locations were deleted')
Author.objects.all().delete()
- self.stdout.write('Authors deleted')
+ self.stdout.write('Authors were deleted')
AutoComment.objects.all().delete()
- self.stdout.write('Automatic Comments deleted')
+ self.stdout.write('Automatic Comments were deleted')
+ SocialNetworkAppUser.objects.all().delete()
+ self.stdout.write('App users were deleted')
except Exception as e:
raise CommandError('The cleaning procedure couldn\'t finished. Error {}'.format(e))
|
cde145c95f604cd1936fe0d61e6f6b5858177a80
|
app/util/danger.py
|
app/util/danger.py
|
from itsdangerous import TimedJSONWebSignatureSerializer as Serializer
from flask import request as flask_request
from flask import abort
import logging
import os
def gen_auth_token(id,expiration=10000):
"""Generate auth token"""
try:
s = Serializer(os.environ['API_KEY'],expires_in=expiration)
except KeyError:
logging.fatal("No API_KEY env")
abort(500)
return s.dumps({'id':id})
def verify_auth_token(token):
"""Verify auth token"""
try:
s = Serializer(os.environ['API_KEY'])
except KeyError:
logging.fatal("No API_KEY env")
abort(500)
# check the token and throw respective exception
try:
user = s.loads(token)
except Exception as e:
logging.info(e)
abort(401)
return user
def enable_auth(func):
"""Decorator to enable auth"""
def wrapper(*args,**kwargs):
re = flask_request
# deny if not authorized
if not re.headers.has_key("Authorization"):
logging.info("No token found")
abort(401)
auth = re.headers.get("Authorization").split(" ")
# proces token
validate = verify_auth_token(auth[1])
logging.debug("Valid auth! Yay")
return func(*args,**kwargs)
return wrapper
|
from itsdangerous import TimedJSONWebSignatureSerializer as Serializer
from flask import request as flask_request
from flask import abort
import logging
import os
def gen_auth_token(id,expiration=10000):
"""Generate auth token"""
try:
s = Serializer(os.environ['API_KEY'],expires_in=expiration)
except KeyError:
logging.fatal("No API_KEY env")
abort(500)
return s.dumps({'id':id})
def verify_auth_token(token):
"""Verify auth token"""
try:
s = Serializer(os.environ['API_KEY'])
except KeyError:
logging.fatal("No API_KEY env")
abort(500)
# check the token and throw respective exception
try:
user = s.loads(token)
except Exception as e:
logging.warning("Bad token")
abort(401)
return user
def enable_auth(func):
"""Decorator to enable auth"""
def wrapper(*args,**kwargs):
re = flask_request
# deny if not authorized
if not re.headers.has_key("Authorization"):
logging.warning("No token found")
abort(401)
auth = re.headers.get("Authorization").split(" ")
# proces token
validate = verify_auth_token(auth[1])
logging.debug("Valid auth! Yay")
return func(*args,**kwargs)
return wrapper
|
Add warning log level for auth checks
|
Add warning log level for auth checks
|
Python
|
mit
|
tforrest/soda-automation,tforrest/soda-automation
|
---
+++
@@ -26,7 +26,7 @@
try:
user = s.loads(token)
except Exception as e:
- logging.info(e)
+ logging.warning("Bad token")
abort(401)
return user
@@ -36,7 +36,7 @@
re = flask_request
# deny if not authorized
if not re.headers.has_key("Authorization"):
- logging.info("No token found")
+ logging.warning("No token found")
abort(401)
auth = re.headers.get("Authorization").split(" ")
# proces token
|
c9003940c583a19861c1dff20498aa4c6aae1efb
|
scikits/crab/tests/test_base.py
|
scikits/crab/tests/test_base.py
|
#-*- coding:utf-8 -*-
"""
Base Recommender Models.
"""
# Authors: Marcel Caraciolo <marcel@muricoca.com>
# Bruno Melo <bruno@muricoca.com>
# License: BSD Style.
import unittest
import sys
sys.path.append('/Users/marcelcaraciolo/Desktop/Orygens/crab/crab/scikits/craba')
from base import BaseRecommender
#test classes
class MyRecommender(BaseRecommender):
def __init__(self,model):
BaseRecommender.__init__(self,model)
################################################################################
# The tests
class testBaseRecommender(unittest.TestCase):
pass
if __name__ == '__main__':
unittest.main()
|
#-*- coding:utf-8 -*-
"""
Base Recommender Models.
"""
# Authors: Marcel Caraciolo <marcel@muricoca.com>
# Bruno Melo <bruno@muricoca.com>
# License: BSD Style.
import unittest
from base import BaseRecommender
#test classes
class MyRecommender(BaseRecommender):
def __init__(self,model):
BaseRecommender.__init__(self,model)
################################################################################
# The tests
class testBaseRecommender(unittest.TestCase):
pass
if __name__ == '__main__':
unittest.main()
|
Fix the test removing paths.
|
Fix the test removing paths.
|
Python
|
bsd-3-clause
|
Lawrence-Liu/crab,muricoca/crab,hi2srihari/crab,echogreens/crab,muricoca/crab,augustoppimenta/crab,imouren/crab,rcarmo/crab,Flowerowl/Crab,wnyc/crab,wnyc/crab
|
---
+++
@@ -9,9 +9,6 @@
# License: BSD Style.
import unittest
-import sys
-
-sys.path.append('/Users/marcelcaraciolo/Desktop/Orygens/crab/crab/scikits/craba')
from base import BaseRecommender
|
b2e4b0eaf67714c180feec53e5b0021d911d8f8d
|
pandora/py2compat.py
|
pandora/py2compat.py
|
"""
Python 2 Compatibility Layer
This module exists to work around compatibility issues between Python 2 and
Python 3. The main code-base will use Python 3 idioms and this module will
patch Python 2 code to support those changes. When Python 2 support is
dropped this module can be removed and imports can be updated.
"""
try:
from configparser import ConfigParser
except ImportError:
from ConfigParser import SafeConfigParser
class ConfigParser(SafeConfigParser):
def read_file(self, fp):
return self.readfp(fp)
# Only used in tests
try:
from unittest.mock import Mock, MagicMock, call
except ImportError:
try:
from mock import Mock, MagicMock, call, patch
except ImportError:
pass
|
"""
Python 2 Compatibility Layer
This module exists to work around compatibility issues between Python 2 and
Python 3. The main code-base will use Python 3 idioms and this module will
patch Python 2 code to support those changes. When Python 2 support is
dropped this module can be removed and imports can be updated.
"""
try:
from configparser import ConfigParser
except ImportError:
from ConfigParser import SafeConfigParser
class ConfigParser(SafeConfigParser):
def read_file(self, fp):
return self.readfp(fp)
# Only used in tests
try:
from unittest.mock import Mock, MagicMock, call, patch
except ImportError:
try:
from mock import Mock, MagicMock, call, patch
except ImportError:
pass
|
Add missing import for unittest.mock.patch.
|
Add missing import for unittest.mock.patch.
|
Python
|
mit
|
mcrute/pydora
|
---
+++
@@ -20,7 +20,7 @@
# Only used in tests
try:
- from unittest.mock import Mock, MagicMock, call
+ from unittest.mock import Mock, MagicMock, call, patch
except ImportError:
try:
from mock import Mock, MagicMock, call, patch
|
84dd763d5d2aec1c4248e42106ef4f68439bc4cd
|
server/api/serializers/rides.py
|
server/api/serializers/rides.py
|
import requests
from django.conf import settings
from rest_framework import serializers
from rest_framework.validators import UniqueTogetherValidator
from server.api.serializers.chapters import ChapterSerializer
from server.core.models.rides import Ride, RideRiders
class RideSerializer(serializers.ModelSerializer):
chapter = ChapterSerializer()
riders = serializers.PrimaryKeyRelatedField(source='registered_riders', many=True, read_only=True)
class Meta:
model = Ride
fields = ('id', 'name', 'slug', 'description_html', 'start_location', 'end_location', 'start_date', 'end_date',
'chapter', 'rider_capacity', 'riders', 'spaces_left', 'price', 'full_cost', 'currency', 'is_over',
'fundraising_total', 'fundraising_target')
class RideRiderSerializer(serializers.ModelSerializer):
user = serializers.PrimaryKeyRelatedField(read_only=True, default=serializers.CurrentUserDefault())
class Meta:
model = RideRiders
fields = ('ride', 'user', 'signup_date', 'signup_expires', 'status', 'paid', 'expired', 'payload')
validators = [
UniqueTogetherValidator(
queryset=RideRiders.objects.all(),
fields=('user', 'ride'),
message='You have already registered for this ride.'
)
]
|
import requests
from django.conf import settings
from rest_framework import serializers
from rest_framework.validators import UniqueTogetherValidator
from server.api.serializers.chapters import ChapterSerializer
from .riders import RiderSerializer
from server.core.models.rides import Ride, RideRiders
class RideSerializer(serializers.ModelSerializer):
chapter = ChapterSerializer()
riders = RiderSerializer(source='registered_riders', many=True, read_only=True)
class Meta:
model = Ride
fields = ('id', 'name', 'slug', 'description_html', 'start_location', 'end_location', 'start_date', 'end_date',
'chapter', 'rider_capacity', 'riders', 'spaces_left', 'price', 'full_cost', 'currency', 'is_over',
'fundraising_total', 'fundraising_target')
class RideRiderSerializer(serializers.ModelSerializer):
user = serializers.PrimaryKeyRelatedField(read_only=True, default=serializers.CurrentUserDefault())
class Meta:
model = RideRiders
fields = ('ride', 'user', 'signup_date', 'signup_expires', 'status', 'paid', 'expired', 'payload')
validators = [
UniqueTogetherValidator(
queryset=RideRiders.objects.all(),
fields=('user', 'ride'),
message='You have already registered for this ride.'
)
]
|
Send through serialised users with each ride api request
|
Send through serialised users with each ride api request
|
Python
|
mit
|
mwillmott/techbikers,mwillmott/techbikers,mwillmott/techbikers,Techbikers/techbikers,Techbikers/techbikers,Techbikers/techbikers,Techbikers/techbikers,mwillmott/techbikers
|
---
+++
@@ -3,12 +3,13 @@
from rest_framework import serializers
from rest_framework.validators import UniqueTogetherValidator
from server.api.serializers.chapters import ChapterSerializer
+from .riders import RiderSerializer
from server.core.models.rides import Ride, RideRiders
class RideSerializer(serializers.ModelSerializer):
chapter = ChapterSerializer()
- riders = serializers.PrimaryKeyRelatedField(source='registered_riders', many=True, read_only=True)
+ riders = RiderSerializer(source='registered_riders', many=True, read_only=True)
class Meta:
model = Ride
|
823347e9c5bcc93710b7bdc2656f438a0e74c2b8
|
invocations/_version.py
|
invocations/_version.py
|
__version_info__ = (0, 14, 0)
__version__ = '.'.join(map(str, __version_info__))
|
__version_info__ = (0, 14, 1)
__version__ = '.'.join(map(str, __version_info__))
|
Cut 0.14.1 because setup.py bug
|
Cut 0.14.1 because setup.py bug
|
Python
|
bsd-2-clause
|
pyinvoke/invocations
|
---
+++
@@ -1,2 +1,2 @@
-__version_info__ = (0, 14, 0)
+__version_info__ = (0, 14, 1)
__version__ = '.'.join(map(str, __version_info__))
|
12bbc7e10ae52328feb766e2bed5f5f20fa0d354
|
pyramid_es/__init__.py
|
pyramid_es/__init__.py
|
from pyramid.settings import asbool
from .client import ElasticClient
def client_from_config(settings, prefix='elastic.'):
"""
Instantiate and configure an Elasticsearch from settings.
In typical Pyramid usage, you shouldn't use this directly: instead, just
include ``pyramid_es`` and use the :py:func:`get_client` function to get
access to the shared :py:class:`.client.ElasticClient` instance.
"""
return ElasticClient(
servers=settings.get(prefix + 'servers', ['localhost:9200']),
timeout=settings.get(prefix + 'timeout', 1.0),
index=settings[prefix + 'index'],
use_transaction=asbool(settings.get(prefix + 'use_transaction', True)),
disable_indexing=settings.get(prefix + 'disable_indexing', False))
def includeme(config):
registry = config.registry
settings = registry.settings
client = client_from_config(settings)
client.ensure_index()
registry.pyramid_es_client = client
def get_client(request):
"""
Get the registered Elasticsearch client. The supplied argument can be
either a ``Request`` instance or a ``Registry``.
"""
registry = getattr(request, 'registry', None)
if registry is None:
registry = request
return registry.pyramid_es_client
|
from pyramid.settings import asbool
from .client import ElasticClient
def client_from_config(settings, prefix='elastic.'):
"""
Instantiate and configure an Elasticsearch from settings.
In typical Pyramid usage, you shouldn't use this directly: instead, just
include ``pyramid_es`` and use the :py:func:`get_client` function to get
access to the shared :py:class:`.client.ElasticClient` instance.
"""
return ElasticClient(
servers=settings.get(prefix + 'servers', ['localhost:9200']),
timeout=settings.get(prefix + 'timeout', 1.0),
index=settings[prefix + 'index'],
use_transaction=asbool(settings.get(prefix + 'use_transaction', True)),
disable_indexing=settings.get(prefix + 'disable_indexing', False))
def includeme(config):
registry = config.registry
settings = registry.settings
client = client_from_config(settings)
if asbool(settings.get('elastic.ensure_index_on_start')):
client.ensure_index()
registry.pyramid_es_client = client
def get_client(request):
"""
Get the registered Elasticsearch client. The supplied argument can be
either a ``Request`` instance or a ``Registry``.
"""
registry = getattr(request, 'registry', None)
if registry is None:
registry = request
return registry.pyramid_es_client
|
Add a settings key to ensure index at start
|
Add a settings key to ensure index at start
|
Python
|
mit
|
storborg/pyramid_es
|
---
+++
@@ -24,7 +24,8 @@
settings = registry.settings
client = client_from_config(settings)
- client.ensure_index()
+ if asbool(settings.get('elastic.ensure_index_on_start')):
+ client.ensure_index()
registry.pyramid_es_client = client
|
bfecf498c30c08d8ede18fd587e192f0961c334c
|
invoke/run.py
|
invoke/run.py
|
from subprocess import PIPE
from .monkey import Popen
from .exceptions import Failure
class Result(object):
def __init__(self, stdout=None, stderr=None, exited=None):
self.exited = self.return_code = exited
self.stdout = stdout
self.stderr = stderr
def __nonzero__(self):
# Holy mismatch between name and implementation, Batman!
return self.exited == 0
def run(command, warn=False):
"""
Execute ``command`` in a local subprocess.
By default, raises an exception if the subprocess terminates with a nonzero
return code. This may be disabled by setting ``warn=True``.
"""
process = Popen(command,
shell=True,
stdout=PIPE,
stderr=PIPE
)
stdout, stderr = process.communicate()
result = Result(stdout=stdout, stderr=stderr, exited=process.returncode)
if not (result or warn):
raise Failure(result)
return result
|
from subprocess import PIPE
from .monkey import Popen
from .exceptions import Failure
class Result(object):
def __init__(self, stdout=None, stderr=None, exited=None):
self.exited = self.return_code = exited
self.stdout = stdout
self.stderr = stderr
def __nonzero__(self):
# Holy mismatch between name and implementation, Batman!
return self.exited == 0
def __str__(self):
ret = ["Command exited with status %s." % self.exited]
for x in ('stdout', 'stderr'):
val = getattr(self, x)
ret.append("""=== %s ===
%s
""" % (x, val.rstrip()) if val else "(no %s)" % x)
return "\n".join(ret)
def run(command, warn=False):
"""
Execute ``command`` in a local subprocess.
By default, raises an exception if the subprocess terminates with a nonzero
return code. This may be disabled by setting ``warn=True``.
"""
process = Popen(command,
shell=True,
stdout=PIPE,
stderr=PIPE
)
stdout, stderr = process.communicate()
result = Result(stdout=stdout, stderr=stderr, exited=process.returncode)
if not (result or warn):
raise Failure(result)
return result
|
Add semi-useful `__str__` for Result
|
Add semi-useful `__str__` for Result
|
Python
|
bsd-2-clause
|
pyinvoke/invoke,mkusz/invoke,singingwolfboy/invoke,tyewang/invoke,mattrobenolt/invoke,kejbaly2/invoke,sophacles/invoke,pfmoore/invoke,pyinvoke/invoke,mkusz/invoke,pfmoore/invoke,kejbaly2/invoke,alex/invoke,frol/invoke,mattrobenolt/invoke,frol/invoke
|
---
+++
@@ -14,6 +14,14 @@
# Holy mismatch between name and implementation, Batman!
return self.exited == 0
+ def __str__(self):
+ ret = ["Command exited with status %s." % self.exited]
+ for x in ('stdout', 'stderr'):
+ val = getattr(self, x)
+ ret.append("""=== %s ===
+%s
+""" % (x, val.rstrip()) if val else "(no %s)" % x)
+ return "\n".join(ret)
def run(command, warn=False):
"""
|
87d099f8094d5fb2c78729adfc6df9c68f68b450
|
pythonforandroid/recipes/regex/__init__.py
|
pythonforandroid/recipes/regex/__init__.py
|
from pythonforandroid.recipe import CompiledComponentsPythonRecipe
class RegexRecipe(CompiledComponentsPythonRecipe):
name = 'regex'
version = '2017.07.28'
url = 'https://pypi.python.org/packages/d1/23/5fa829706ee1d4452552eb32e0bfc1039553e01f50a8754c6f7152e85c1b/regex-{version}.tar.gz'
depends = ['setuptools']
recipe = RegexRecipe()
|
from pythonforandroid.recipe import CompiledComponentsPythonRecipe
class RegexRecipe(CompiledComponentsPythonRecipe):
name = 'regex'
version = '2017.07.28'
url = 'https://pypi.python.org/packages/d1/23/5fa829706ee1d4452552eb32e0bfc1039553e01f50a8754c6f7152e85c1b/regex-{version}.tar.gz'
depends = ['setuptools']
call_hostpython_via_targetpython = False
recipe = RegexRecipe()
|
Fix compilation for regex recipe
|
[recipes] Fix compilation for regex recipe
The error was: build/other_builds/hostpython3/desktop/hostpython3/Include/Python.h:39:19: fatal error: crypt.h: No such file or directory
|
Python
|
mit
|
rnixx/python-for-android,kronenpj/python-for-android,kivy/python-for-android,kivy/python-for-android,kivy/python-for-android,kivy/python-for-android,germn/python-for-android,PKRoma/python-for-android,kronenpj/python-for-android,rnixx/python-for-android,germn/python-for-android,germn/python-for-android,rnixx/python-for-android,PKRoma/python-for-android,kronenpj/python-for-android,PKRoma/python-for-android,germn/python-for-android,PKRoma/python-for-android,kronenpj/python-for-android,kronenpj/python-for-android,PKRoma/python-for-android,rnixx/python-for-android,rnixx/python-for-android,kivy/python-for-android,germn/python-for-android,rnixx/python-for-android,germn/python-for-android
|
---
+++
@@ -7,6 +7,7 @@
url = 'https://pypi.python.org/packages/d1/23/5fa829706ee1d4452552eb32e0bfc1039553e01f50a8754c6f7152e85c1b/regex-{version}.tar.gz'
depends = ['setuptools']
+ call_hostpython_via_targetpython = False
recipe = RegexRecipe()
|
5f5be04adc9e17aa497022ed3b19371075c63d85
|
relay_api/api/backend.py
|
relay_api/api/backend.py
|
import json
from relay_api.core.relay import relay
from relay_api.conf.config import relays
def init_relays():
for r in relays:
relays[r]["object"] = relay(relays[r]["gpio"])
relays[r]["state"] = relays[r]["object"].get_state()
def get_all_relays():
relays_dict = __get_relay_dict()
return json.dumps(relays_dict, indent=4)
def get_relay(relay_name):
if relay_name not in relays:
return None
relay_dict = __get_relay_dict(relay_name)
return json.dumps(relay_dict, indent=4)
def __get_relay_dict(relay_name=None):
if relay_name:
relay_dict = dict.copy(relays["relay_name"])
del(relay_dict["object"])
return relay_dict
relays_dict = dict.copy(relays)
for r in relays_dict:
del(relays_dict[r]["object"])
return relays_dict
|
import json
import copy
from relay_api.core.relay import relay
from relay_api.conf.config import relays
def init_relays():
for r in relays:
relays[r]["object"] = relay(relays[r]["gpio"])
relays[r]["state"] = relays[r]["object"].get_state()
def get_all_relays():
relays_dict = __get_relay_dict()
return json.dumps(relays_dict, indent=4)
def get_relay(relay_name):
if relay_name not in relays:
return None
relay_dict = __get_relay_dict(relay_name)
return json.dumps(relay_dict, indent=4)
def __get_relay_dict(relay_name=None):
if relay_name:
relay_dict = copy.deepcopy(relays[relay_name])
del(relay_dict["object"])
return relay_dict
relays_dict = copy.deepcopy(relays)
for r in relays_dict:
del(relays_dict[r]["object"])
return relays_dict
|
Use deppcopy to copy dicts
|
Use deppcopy to copy dicts
|
Python
|
mit
|
pahumadad/raspi-relay-api
|
---
+++
@@ -1,4 +1,5 @@
import json
+import copy
from relay_api.core.relay import relay
from relay_api.conf.config import relays
@@ -23,10 +24,10 @@
def __get_relay_dict(relay_name=None):
if relay_name:
- relay_dict = dict.copy(relays["relay_name"])
+ relay_dict = copy.deepcopy(relays[relay_name])
del(relay_dict["object"])
return relay_dict
- relays_dict = dict.copy(relays)
+ relays_dict = copy.deepcopy(relays)
for r in relays_dict:
del(relays_dict[r]["object"])
return relays_dict
|
c100d61f084ea759654baf0e9414f73567577f68
|
src/robot.py
|
src/robot.py
|
from sr import *
print "Hello, world!"
|
import time
from sr import *
print "Hello, world!"
R = Robot()
# This is the configuration for Elizabeth.
# TODO: make this generic
BOARD_RIGHT = R.motors["SR0HL17"]
BOARD_LEFT = R.motors["SR0YK1C"]
WHEEL_FRONT_LEFT = BOARD_LEFT.m1 # positive is towards the front of the robot
WHEEL_FRONT_RIGHT = BOARD_RIGHT.m0 # positive is towards the front of the robot
WHEEL_BACK = BOARD_RIGHT.m1 # positive is to the right of the robot
# enable the brakes
WHEEL_FRONT_LEFT.use_brake = True
WHEEL_FRONT_RIGHT.use_brake = True
WHEEL_BACK.use_brake = True
WHEEL_FRONT_LEFT_CALIBRATION = -1
WHEEL_FRONT_RIGHT_CALIBRATION = -1
WHEEL_BACK_CALIBRATION = 1
def set_motors(front_left, front_right, back):
WHEEL_FRONT_LEFT.power = int(front_left * WHEEL_FRONT_LEFT_CALIBRATION)
WHEEL_FRONT_RIGHT.power = int(front_right * WHEEL_FRONT_RIGHT_CALIBRATION)
WHEEL_BACK.power = int(back * WHEEL_BACK_CALIBRATION)
def forward(speed):
set_motors(speed, speed, 0)
def reverse(speed):
forward(-speed)
def stop():
forward(0)
def rotate(speed):
set_motors(speed, -speed, speed)
def can_see_block():
ACCEPTABLE_MARKER_TYPES = (MARKER_TOKEN_TOP,
MARKER_TOKEN_BOTTOM,
MARKER_TOKEN_SIDE)
markers = R.see()
return any(marker.info.marker_type in ACCEPTABLE_MARKER_TYPES
for marker in R.see())
# FIXME: debug while we don't have a marker
search_count = 0
def can_see_block():
global search_count
search_count += 1
return search_count % 5 == 0
def state_search():
rotate(40)
time.sleep(0.4)
stop()
time.sleep(0.3)
return state_advance if can_see_block() else state_search
def state_advance():
forward(30)
time.sleep(1)
stop()
time.sleep(0.8)
return state_advance if can_see_block() else state_backoff
def state_backoff():
reverse(20)
time.sleep(2)
stop()
rotate(-80)
time.sleep(4)
stop()
return state_search
current_state = state_search
while True:
current_state = current_state()
|
Implement a basic search-and-grab algorithm
|
Implement a basic search-and-grab algorithm
|
Python
|
mit
|
prophile/tudor-block-chase
|
---
+++
@@ -1,4 +1,84 @@
+import time
from sr import *
print "Hello, world!"
+R = Robot()
+
+# This is the configuration for Elizabeth.
+# TODO: make this generic
+BOARD_RIGHT = R.motors["SR0HL17"]
+BOARD_LEFT = R.motors["SR0YK1C"]
+
+WHEEL_FRONT_LEFT = BOARD_LEFT.m1 # positive is towards the front of the robot
+WHEEL_FRONT_RIGHT = BOARD_RIGHT.m0 # positive is towards the front of the robot
+WHEEL_BACK = BOARD_RIGHT.m1 # positive is to the right of the robot
+
+# enable the brakes
+WHEEL_FRONT_LEFT.use_brake = True
+WHEEL_FRONT_RIGHT.use_brake = True
+WHEEL_BACK.use_brake = True
+
+WHEEL_FRONT_LEFT_CALIBRATION = -1
+WHEEL_FRONT_RIGHT_CALIBRATION = -1
+WHEEL_BACK_CALIBRATION = 1
+
+def set_motors(front_left, front_right, back):
+ WHEEL_FRONT_LEFT.power = int(front_left * WHEEL_FRONT_LEFT_CALIBRATION)
+ WHEEL_FRONT_RIGHT.power = int(front_right * WHEEL_FRONT_RIGHT_CALIBRATION)
+ WHEEL_BACK.power = int(back * WHEEL_BACK_CALIBRATION)
+
+def forward(speed):
+ set_motors(speed, speed, 0)
+
+def reverse(speed):
+ forward(-speed)
+
+def stop():
+ forward(0)
+
+def rotate(speed):
+ set_motors(speed, -speed, speed)
+
+def can_see_block():
+ ACCEPTABLE_MARKER_TYPES = (MARKER_TOKEN_TOP,
+ MARKER_TOKEN_BOTTOM,
+ MARKER_TOKEN_SIDE)
+ markers = R.see()
+ return any(marker.info.marker_type in ACCEPTABLE_MARKER_TYPES
+ for marker in R.see())
+
+# FIXME: debug while we don't have a marker
+search_count = 0
+def can_see_block():
+ global search_count
+ search_count += 1
+ return search_count % 5 == 0
+
+def state_search():
+ rotate(40)
+ time.sleep(0.4)
+ stop()
+ time.sleep(0.3)
+ return state_advance if can_see_block() else state_search
+
+def state_advance():
+ forward(30)
+ time.sleep(1)
+ stop()
+ time.sleep(0.8)
+ return state_advance if can_see_block() else state_backoff
+
+def state_backoff():
+ reverse(20)
+ time.sleep(2)
+ stop()
+ rotate(-80)
+ time.sleep(4)
+ stop()
+ return state_search
+
+current_state = state_search
+while True:
+ current_state = current_state()
+
|
0d4e619a11a084f83ab42d45e528f7b38777fcae
|
linter.py
|
linter.py
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Andrew Grim
# Copyright (c) 2014 Andrew Grim
#
# License: MIT
#
"""This module exports the Puppet plugin class."""
from SublimeLinter.lint import Linter, util
class Puppet(Linter):
"""Provides an interface to puppet."""
syntax = 'puppet'
cmd = ('puppet', 'parser', 'validate', '--color=false')
regex = r'^(?P<error>Error:).+?(?P<message>Syntax error at \'(?P<near>.+?)\'; expected \'.+\').+?line (?P<line>\d+)'
error_stream = util.STREAM_STDERR
defaults = {}
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Andrew Grim
# Copyright (c) 2014 Andrew Grim
#
# License: MIT
#
"""This module exports the Puppet plugin class."""
from SublimeLinter.lint import Linter, util
class Puppet(Linter):
"""Provides an interface to puppet."""
syntax = 'puppet'
cmd = ('puppet', 'parser', 'validate', '--color=false')
regex = r'^Error:.+?(?P<message>Syntax error at \'(?P<near>.+?)\'; expected \'.+\').+?line (?P<line>\d+)'
error_stream = util.STREAM_STDERR
|
Remove unnecessary defaults and simplify regexp
|
Remove unnecessary defaults and simplify regexp
|
Python
|
mit
|
dylanratcliffe/SublimeLinter-puppet,travisgroth/SublimeLinter-puppet,stopdropandrew/SublimeLinter-puppet
|
---
+++
@@ -19,6 +19,5 @@
syntax = 'puppet'
cmd = ('puppet', 'parser', 'validate', '--color=false')
- regex = r'^(?P<error>Error:).+?(?P<message>Syntax error at \'(?P<near>.+?)\'; expected \'.+\').+?line (?P<line>\d+)'
+ regex = r'^Error:.+?(?P<message>Syntax error at \'(?P<near>.+?)\'; expected \'.+\').+?line (?P<line>\d+)'
error_stream = util.STREAM_STDERR
- defaults = {}
|
b6b506e8250078664bdefdcf7d9d380e950e3730
|
linter.py
|
linter.py
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""Exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = ('stylus', 'vue')
selectors = {'vue': 'source.stylus.embedded.html'}
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
^(?P<line>\d+):?(?P<col>\d+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""Exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = ('stylus', 'vue')
selectors = {'vue': 'source.stylus.embedded.html'}
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
^(?P<line>\d+):?(?P<col>\d+)?\s*(?P<rule>\w+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
|
Handle case where rule shows before severity
|
Handle case where rule shows before severity
Thank you @suprMax !
|
Python
|
mit
|
jackbrewer/SublimeLinter-contrib-stylint
|
---
+++
@@ -27,7 +27,7 @@
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
- ^(?P<line>\d+):?(?P<col>\d+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
+ ^(?P<line>\d+):?(?P<col>\d+)?\s*(?P<rule>\w+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
|
f7172424977d0166ec4dd7946a360a5a426f4a72
|
bin/migrate-tips.py
|
bin/migrate-tips.py
|
from gratipay.wireup import db, env
from gratipay.models.team import AlreadyMigrated
db = db(env())
teams = db.all("""
SELECT distinct ON (t.slug) t.*::teams
FROM teams t
JOIN tips ON t.owner = tips.tippee -- Only fetch teams whose owner have tips.
WHERE t.is_approved IS TRUE -- Only fetch approved teams.
AND NOT EXISTS ( -- Make sure not already migrated.
SELECT 1
FROM payment_instructions pi
WHERE t.slug = pi.team
AND pi.ctime < t.ctime
)
""")
for team in teams:
try:
ntips = team.migrate_tips()
print("Migrated {} tip(s) for '{}'".format(ntips, team.slug))
except AlreadyMigrated:
print("'%s' already migrated." % team.slug)
print("Done.")
|
from gratipay.wireup import db, env
from gratipay.models.team import AlreadyMigrated
db = db(env())
teams = db.all("""
SELECT distinct ON (t.slug) t.*::teams
FROM teams t
JOIN tips ON t.owner = tips.tippee -- Only fetch teams whose owners had tips under Gratipay 1.0
WHERE t.is_approved IS TRUE -- Only fetch approved teams
AND NOT EXISTS ( -- Make sure tips haven't been migrated for any teams with same owner
SELECT 1
FROM payment_instructions pi
JOIN teams t2 ON t2.slug = pi.team
WHERE t2.owner = t.owner
AND pi.ctime < t2.ctime
)
""")
for team in teams:
try:
ntips = team.migrate_tips()
print("Migrated {} tip(s) for '{}'".format(ntips, team.slug))
except AlreadyMigrated:
print("'%s' already migrated." % team.slug)
print("Done.")
|
Exclude teams if owner has other teams with migrated tips
|
Exclude teams if owner has other teams with migrated tips
|
Python
|
mit
|
gratipay/gratipay.com,gratipay/gratipay.com,gratipay/gratipay.com,gratipay/gratipay.com
|
---
+++
@@ -6,14 +6,15 @@
teams = db.all("""
SELECT distinct ON (t.slug) t.*::teams
FROM teams t
- JOIN tips ON t.owner = tips.tippee -- Only fetch teams whose owner have tips.
- WHERE t.is_approved IS TRUE -- Only fetch approved teams.
- AND NOT EXISTS ( -- Make sure not already migrated.
+ JOIN tips ON t.owner = tips.tippee -- Only fetch teams whose owners had tips under Gratipay 1.0
+ WHERE t.is_approved IS TRUE -- Only fetch approved teams
+ AND NOT EXISTS ( -- Make sure tips haven't been migrated for any teams with same owner
SELECT 1
FROM payment_instructions pi
- WHERE t.slug = pi.team
- AND pi.ctime < t.ctime
- )
+ JOIN teams t2 ON t2.slug = pi.team
+ WHERE t2.owner = t.owner
+ AND pi.ctime < t2.ctime
+ )
""")
for team in teams:
|
0ce14be170e09530b225f2f7526ad68ee1758095
|
peering/migrations/0027_auto_20190105_1600.py
|
peering/migrations/0027_auto_20190105_1600.py
|
# Generated by Django 2.1.4 on 2019-01-05 15:00
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
(
"peering",
"0026_autonomoussystem_potential_internet_exchange_peering_sessions",
)
]
operations = [
migrations.AlterField(
model_name="autonomoussystem",
name="potential_internet_exchange_peering_sessions",
field=django.contrib.postgres.fields.ArrayField(
base_field=models.GenericIPAddressField(),
blank=True,
default=list,
size=None,
),
)
]
|
# Generated by Django 2.1.4 on 2019-01-05 15:00
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
(
"peering",
"0026_autonomoussystem_potential_internet_exchange_peering_sessions",
)
]
def forwards_func(apps, schema_editor):
AutonomousSystem = apps.get_model("peering", "AutonomousSystem")
db_alias = schema_editor.connection.alias
AutonomousSystem.objects.using(db_alias).filter(
potential_internet_exchange_peering_sessions=None
).update(potential_internet_exchange_peering_sessions=[])
def reverse_func(apps, schema_editor):
AutonomousSystem = apps.get_model("peering", "AutonomousSystem")
db_alias = schema_editor.connection.alias
AutonomousSystem.objects.using(db_alias).filter(
potential_internet_exchange_peering_sessions=[]
).update(potential_internet_exchange_peering_sessions=None)
operations = [
migrations.AlterField(
model_name="autonomoussystem",
name="potential_internet_exchange_peering_sessions",
field=django.contrib.postgres.fields.ArrayField(
base_field=models.GenericIPAddressField(),
blank=True,
default=list,
size=None,
),
),
migrations.RunPython(forwards_func, reverse_func),
]
|
Fix issue with migrations introduced lately.
|
Fix issue with migrations introduced lately.
|
Python
|
apache-2.0
|
respawner/peering-manager,respawner/peering-manager,respawner/peering-manager,respawner/peering-manager
|
---
+++
@@ -13,6 +13,20 @@
)
]
+ def forwards_func(apps, schema_editor):
+ AutonomousSystem = apps.get_model("peering", "AutonomousSystem")
+ db_alias = schema_editor.connection.alias
+ AutonomousSystem.objects.using(db_alias).filter(
+ potential_internet_exchange_peering_sessions=None
+ ).update(potential_internet_exchange_peering_sessions=[])
+
+ def reverse_func(apps, schema_editor):
+ AutonomousSystem = apps.get_model("peering", "AutonomousSystem")
+ db_alias = schema_editor.connection.alias
+ AutonomousSystem.objects.using(db_alias).filter(
+ potential_internet_exchange_peering_sessions=[]
+ ).update(potential_internet_exchange_peering_sessions=None)
+
operations = [
migrations.AlterField(
model_name="autonomoussystem",
@@ -23,5 +37,6 @@
default=list,
size=None,
),
- )
+ ),
+ migrations.RunPython(forwards_func, reverse_func),
]
|
e8b8c257c71b6c02fa691557618261e6832fba94
|
faker/providers/ssn/uk_UA/__init__.py
|
faker/providers/ssn/uk_UA/__init__.py
|
# coding=utf-8
from __future__ import unicode_literals
from .. import Provider as SsnProvider
# Note: as there no SSN in Ukraine
# we get value added tax identification number (VATIN) here.
# It is also called "Ідентифікаційний номер платника податків" (in ukrainian).
# It contains only digits and length if 12.
class Provider(SsnProvider):
ssn_formats = ("############",)
|
# coding=utf-8
from __future__ import unicode_literals
from datetime import date
from .. import Provider as SsnProvider
from faker.providers.date_time import Provider as DateTimeProvider
class Provider(SsnProvider):
@classmethod
def ssn(cls):
"""
Ukrainian "Реєстраційний номер облікової картки платника податків"
also known as "Ідентифікаційний номер фізичної особи".
"""
digits = []
# Number of days between 1899-12-31 and a birth date
for digit in str((DateTimeProvider.date_object() -
date(1899, 12, 31)).days):
digits.append(int(digit))
# Person's sequence number
for _ in range(4):
digits.append(cls.random_int(0, 9))
checksum = (digits[0]*-1 + digits[1]*5 + digits[2]*7 + digits[3]*9 +
digits[4]*4 + digits[5]*6 + digits[6]*10 + digits[7]*5 +
digits[8]*7)
# Remainder of a checksum divided by 11 or 1 if it equals to 10
digits.append(checksum % 11 % 10)
return ''.join(str(digit) for digit in digits)
|
Make the Ukrainian SSN provider realer
|
Make the Ukrainian SSN provider realer
|
Python
|
mit
|
joke2k/faker,danhuss/faker,trtd/faker,joke2k/faker
|
---
+++
@@ -1,13 +1,34 @@
# coding=utf-8
from __future__ import unicode_literals
+
+from datetime import date
+
from .. import Provider as SsnProvider
-
-
-# Note: as there no SSN in Ukraine
-# we get value added tax identification number (VATIN) here.
-# It is also called "Ідентифікаційний номер платника податків" (in ukrainian).
-# It contains only digits and length if 12.
+from faker.providers.date_time import Provider as DateTimeProvider
class Provider(SsnProvider):
- ssn_formats = ("############",)
+ @classmethod
+ def ssn(cls):
+ """
+ Ukrainian "Реєстраційний номер облікової картки платника податків"
+ also known as "Ідентифікаційний номер фізичної особи".
+ """
+ digits = []
+
+ # Number of days between 1899-12-31 and a birth date
+ for digit in str((DateTimeProvider.date_object() -
+ date(1899, 12, 31)).days):
+ digits.append(int(digit))
+
+ # Person's sequence number
+ for _ in range(4):
+ digits.append(cls.random_int(0, 9))
+
+ checksum = (digits[0]*-1 + digits[1]*5 + digits[2]*7 + digits[3]*9 +
+ digits[4]*4 + digits[5]*6 + digits[6]*10 + digits[7]*5 +
+ digits[8]*7)
+ # Remainder of a checksum divided by 11 or 1 if it equals to 10
+ digits.append(checksum % 11 % 10)
+
+ return ''.join(str(digit) for digit in digits)
|
b698f6925b4629d7473fbe42806f54068d98428a
|
tests/component/test_component_identidock.py
|
tests/component/test_component_identidock.py
|
import sys
print(sys.path)
|
import pytest
import requests
from time import sleep
COMPONENT_INDEX_URL = "http://identidock:5000"
COMPONENT_MONSTER_BASE_URL = COMPONENT_INDEX_URL + '/monster'
def test_get_mainpage():
print('component tester sleeping for 1 sec to let the identidock app to be ready adn also start its server')
sleep(1)
page = requests.get(COMPONENT_INDEX_URL)
assert page.status_code == 200
assert 'Joe Bloggs' in str(page.text)
def test_post_mainpage():
page = requests.post(COMPONENT_INDEX_URL, data=dict(name="Moby Dock"))
assert page.status_code == 200
assert 'Moby Dock' in str(page.text)
def test_mainpage_html_escaping():
page = requests.post(COMPONENT_INDEX_URL, data=dict(name='"><b>TEST</b><!--'))
assert page.status_code == 200
assert '<b>' not in str(page.text)
def test_get_identicon_with_valid_name_and_invalid_post_method_should_return_405():
name_hash = 'ABCDEF123456789'
page = requests.post('{0}/{1}'.format(COMPONENT_MONSTER_BASE_URL, name_hash))
assert page.status_code == 405
def test_get_identicon_with_valid_name_and_cache_miss():
name_hash = 'ABCDEF123456789'
page = requests.get('{0}/{1}'.format(COMPONENT_MONSTER_BASE_URL, name_hash))
# print('page.content : {0}'.format(page.content))
assert page.status_code == 200
def test_get_identicon_with_valid_name_and_cache_hit():
name_hash = 'ABCDEF123456789'
page = requests.get('{0}/{1}'.format(COMPONENT_MONSTER_BASE_URL, name_hash))
# print('page.content : {0}'.format(page.content))
assert page.status_code == 200
def test_get_identicon_with_insecure_and_unescaped_invalid_name_hash():
invalid_name_hash = '<b>;i_am_invalid|name <{"'
page = requests.get('{0}/{1}'.format(COMPONENT_MONSTER_BASE_URL, invalid_name_hash))
# print('page.content : {0}'.format(page.content))
assert page.status_code == 200
if __name__ == '__main__':
# unittest.main()
pytest.main()
|
Add component test functions using pytest
|
Add component test functions using pytest
|
Python
|
mit
|
anirbanroydas/ci-testing-python,anirbanroydas/ci-testing-python,anirbanroydas/ci-testing-python
|
---
+++
@@ -1,3 +1,80 @@
-import sys
+import pytest
+import requests
+from time import sleep
-print(sys.path)
+COMPONENT_INDEX_URL = "http://identidock:5000"
+
+COMPONENT_MONSTER_BASE_URL = COMPONENT_INDEX_URL + '/monster'
+
+
+
+
+def test_get_mainpage():
+ print('component tester sleeping for 1 sec to let the identidock app to be ready adn also start its server')
+ sleep(1)
+ page = requests.get(COMPONENT_INDEX_URL)
+ assert page.status_code == 200
+ assert 'Joe Bloggs' in str(page.text)
+
+
+
+
+def test_post_mainpage():
+ page = requests.post(COMPONENT_INDEX_URL, data=dict(name="Moby Dock"))
+ assert page.status_code == 200
+ assert 'Moby Dock' in str(page.text)
+
+
+
+
+def test_mainpage_html_escaping():
+ page = requests.post(COMPONENT_INDEX_URL, data=dict(name='"><b>TEST</b><!--'))
+ assert page.status_code == 200
+ assert '<b>' not in str(page.text)
+
+
+
+def test_get_identicon_with_valid_name_and_invalid_post_method_should_return_405():
+ name_hash = 'ABCDEF123456789'
+
+ page = requests.post('{0}/{1}'.format(COMPONENT_MONSTER_BASE_URL, name_hash))
+
+ assert page.status_code == 405
+
+
+
+
+def test_get_identicon_with_valid_name_and_cache_miss():
+ name_hash = 'ABCDEF123456789'
+ page = requests.get('{0}/{1}'.format(COMPONENT_MONSTER_BASE_URL, name_hash))
+
+ # print('page.content : {0}'.format(page.content))
+ assert page.status_code == 200
+
+
+
+
+def test_get_identicon_with_valid_name_and_cache_hit():
+ name_hash = 'ABCDEF123456789'
+ page = requests.get('{0}/{1}'.format(COMPONENT_MONSTER_BASE_URL, name_hash))
+
+ # print('page.content : {0}'.format(page.content))
+ assert page.status_code == 200
+
+
+
+
+def test_get_identicon_with_insecure_and_unescaped_invalid_name_hash():
+ invalid_name_hash = '<b>;i_am_invalid|name <{"'
+
+ page = requests.get('{0}/{1}'.format(COMPONENT_MONSTER_BASE_URL, invalid_name_hash))
+
+ # print('page.content : {0}'.format(page.content))
+ assert page.status_code == 200
+
+
+
+
+if __name__ == '__main__':
+ # unittest.main()
+ pytest.main()
|
024ea3b2e9e373abdcd78e44a163a2c32345073f
|
unittests.py
|
unittests.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import const
import uniformdh
import obfsproxy.network.buffer as obfs_buf
class UniformDHTest( unittest.TestCase ):
def setUp( self ):
weAreServer = True
self.udh = uniformdh.new("A" * const.SHARED_SECRET_LENGTH, weAreServer)
def test1_createHandshake( self ):
handshake = self.udh.createHandshake()
self.failUnless((const.PUBLIC_KEY_LENGTH +
const.MARKER_LENGTH +
const.HMAC_LENGTH) <= len(handshake) <=
(const.MARKER_LENGTH +
const.HMAC_LENGTH +
const.MAX_PADDING_LENGTH))
def test2_receivePublicKey( self ):
buf = obfs_buf.Buffer(self.udh.createHandshake())
self.failUnless(self.udh.receivePublicKey(buf, lambda x: x) == True)
publicKey = self.udh.getRemotePublicKey()
self.failUnless(len(publicKey) == const.PUBLIC_KEY_LENGTH)
def test3_invalidHMAC( self ):
# Make the HMAC invalid.
handshake = self.udh.createHandshake()
if handshake[-1] != 'a':
handshake = handshake[:-1] + 'a'
else:
handshake = handshake[:-1] + 'b'
buf = obfs_buf.Buffer(handshake)
self.failIf(self.udh.receivePublicKey(buf, lambda x: x) == True)
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import const
import uniformdh
import obfsproxy.network.buffer as obfs_buf
class UniformDHTest( unittest.TestCase ):
def setUp( self ):
weAreServer = True
self.udh = uniformdh.new("A" * const.SHARED_SECRET_LENGTH, weAreServer)
def test1_createHandshake( self ):
handshake = self.udh.createHandshake()
self.failUnless((const.PUBLIC_KEY_LENGTH +
const.MARKER_LENGTH +
const.HMAC_LENGTH) <= len(handshake) <=
(const.MARKER_LENGTH +
const.HMAC_LENGTH +
const.MAX_PADDING_LENGTH))
def test2_receivePublicKey( self ):
buf = obfs_buf.Buffer(self.udh.createHandshake())
def callback( masterKey ):
self.failUnless(len(masterKey) == const.MASTER_KEY_LENGTH)
self.failUnless(self.udh.receivePublicKey(buf, callback) == True)
publicKey = self.udh.getRemotePublicKey()
self.failUnless(len(publicKey) == const.PUBLIC_KEY_LENGTH)
def test3_invalidHMAC( self ):
# Make the HMAC invalid.
handshake = self.udh.createHandshake()
if handshake[-1] != 'a':
handshake = handshake[:-1] + 'a'
else:
handshake = handshake[:-1] + 'b'
buf = obfs_buf.Buffer(handshake)
self.failIf(self.udh.receivePublicKey(buf, lambda x: x) == True)
if __name__ == '__main__':
unittest.main()
|
Extend UniformDH test to also verify the length of the shared master secret.
|
Extend UniformDH test to also verify the length of the shared master secret.
|
Python
|
bsd-3-clause
|
isislovecruft/scramblesuit,isislovecruft/scramblesuit
|
---
+++
@@ -26,7 +26,10 @@
def test2_receivePublicKey( self ):
buf = obfs_buf.Buffer(self.udh.createHandshake())
- self.failUnless(self.udh.receivePublicKey(buf, lambda x: x) == True)
+ def callback( masterKey ):
+ self.failUnless(len(masterKey) == const.MASTER_KEY_LENGTH)
+
+ self.failUnless(self.udh.receivePublicKey(buf, callback) == True)
publicKey = self.udh.getRemotePublicKey()
self.failUnless(len(publicKey) == const.PUBLIC_KEY_LENGTH)
|
8a9c4585d633e5c7858071fe1420850f930c7614
|
manage.py
|
manage.py
|
from flask_script import Manager
from flask_skeleton_api.main import app
import subprocess
import os
manager = Manager(app)
@manager.command
def unittest(report=False):
"""Run unit tests"""
if report:
subprocess.call(["py.test", "--junitxml=test-output/unit-test-output.xml",
"--cov-report=html:test-output/unit-test-cov-report"])
else:
subprocess.call(["py.test"])
@manager.command
def integrationtest(report=False):
"""Run integration tests"""
if report:
subprocess.call(["py.test", "--junitxml=test-output/integration-test-output.xml",
"integration_tests"])
else:
subprocess.call(["py.test", "integration_tests"])
@manager.command
def runserver(port=9998):
"""Run the app using flask server"""
os.environ["PYTHONUNBUFFERED"] = "yes"
os.environ["LOG_LEVEL"] = "DEBUG"
os.environ["COMMIT"] = "LOCAL"
app.run(debug=True, port=int(port))
if __name__ == "__main__":
manager.run()
|
from flask_script import Manager
from flask_skeleton_api.main import app
import subprocess
import os
# Using Alembic?
# See what extra lines are needed here:
# http://192.168.249.38/gadgets/gadget-api/blob/master/manage.py
manager = Manager(app)
@manager.command
def unittest(report=False):
"""Run unit tests"""
if report:
subprocess.call(["py.test", "--junitxml=test-output/unit-test-output.xml",
"--cov-report=html:test-output/unit-test-cov-report"])
else:
subprocess.call(["py.test"])
@manager.command
def integrationtest(report=False):
"""Run integration tests"""
if report:
subprocess.call(["py.test", "--junitxml=test-output/integration-test-output.xml",
"integration_tests"])
else:
subprocess.call(["py.test", "integration_tests"])
@manager.command
def runserver(port=9998):
"""Run the app using flask server"""
os.environ["PYTHONUNBUFFERED"] = "yes"
os.environ["LOG_LEVEL"] = "DEBUG"
os.environ["COMMIT"] = "LOCAL"
app.run(debug=True, port=int(port))
if __name__ == "__main__":
manager.run()
|
Add link to gadget for Alembic code
|
Add link to gadget for Alembic code
|
Python
|
mit
|
matthew-shaw/thing-api
|
---
+++
@@ -2,9 +2,11 @@
from flask_skeleton_api.main import app
import subprocess
import os
+# Using Alembic?
+# See what extra lines are needed here:
+# http://192.168.249.38/gadgets/gadget-api/blob/master/manage.py
manager = Manager(app)
-
@manager.command
def unittest(report=False):
|
61450328583cfb8e5ceee94a03502cef54bb11d6
|
learning_journal/tests/test_models.py
|
learning_journal/tests/test_models.py
|
# -*- coding: utf-8 -*-
from learning_journal.models import Entry, DBSession
def test_create_entry(dbtransaction):
"""Assert entry was entered into database."""
new_entry = Entry(title="Entry1", text="Hey, this works. Awesome.")
assert new_entry.id is None
DBSession.flush
assert new_entry.id is not None
|
# -*- coding: utf-8 -*-
from learning_journal.models import Entry, DBSession
def test_create_entry(dbtransaction):
"""Assert entry was entered into database."""
new_entry = Entry(title="Entry1", text="Hey, this works. Awesome.")
assert new_entry.id is None
DBSession.add(new_entry)
DBSession.flush
assert new_entry.id is not None
|
Modify test file, still doesn't work. Messed around in pshell.
|
Modify test file, still doesn't work. Messed around in pshell.
|
Python
|
mit
|
DZwell/learning_journal,DZwell/learning_journal,DZwell/learning_journal
|
---
+++
@@ -7,5 +7,6 @@
"""Assert entry was entered into database."""
new_entry = Entry(title="Entry1", text="Hey, this works. Awesome.")
assert new_entry.id is None
+ DBSession.add(new_entry)
DBSession.flush
assert new_entry.id is not None
|
e45f394c61620db13bae579a29043dfdd6ae2d0f
|
SLA_bot/alertfeed.py
|
SLA_bot/alertfeed.py
|
import asyncio
import json
import aiohttp
import SLA_bot.config as cf
class AlertFeed:
source_url = 'http://pso2emq.flyergo.eu/api/v2/'
async def download(url):
try:
async with aiohttp.get(url) as response:
return await response.json()
except json.decoder.JSONDecodeError:
pass
def parse_data(data):
latest_alert = data[0]['text']
lines = latest_alert.splitlines()
header = '-' * len(lines[0])
lines.insert(1, header)
text = '\n'.join(lines)
return '```fix\n{}\n```'.format(text)
async def fetch():
header = cf.get('PSO2 Feed', 'header')
raw_data = await AlertFeed.download(AlertFeed.source_url)
return '** **\n' + header + '\n' + AlertFeed.parse_data(raw_data)
|
import asyncio
import json
import aiohttp
import SLA_bot.config as cf
class AlertFeed:
source_url = 'http://pso2emq.flyergo.eu/api/v2/'
async def download(url):
try:
async with aiohttp.get(url) as response:
return await response.json()
except json.decoder.JSONDecodeError:
pass
def parse_data(data):
latest_alert = data[0]['text']
lines = latest_alert.splitlines()
code_color = 'fix' if len(lines) >= 10 else ''
header = '-' * len(lines[0])
lines.insert(1, header)
text = '\n'.join(lines)
return '```{}\n{}\n```'.format(code_color, text)
async def fetch():
header = cf.get('PSO2 Feed', 'header')
raw_data = await AlertFeed.download(AlertFeed.source_url)
return '** **\n' + header + '\n' + AlertFeed.parse_data(raw_data)
|
Remove text coloring in AlertFeed if it seems like scheduled text
|
Remove text coloring in AlertFeed if it seems like scheduled text
|
Python
|
mit
|
EsqWiggles/SLA-bot,EsqWiggles/SLA-bot
|
---
+++
@@ -18,10 +18,11 @@
def parse_data(data):
latest_alert = data[0]['text']
lines = latest_alert.splitlines()
+ code_color = 'fix' if len(lines) >= 10 else ''
header = '-' * len(lines[0])
lines.insert(1, header)
text = '\n'.join(lines)
- return '```fix\n{}\n```'.format(text)
+ return '```{}\n{}\n```'.format(code_color, text)
async def fetch():
header = cf.get('PSO2 Feed', 'header')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.