commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2d052b49151ea4fb8e0a422d8d743f49de593a04
|
filter_plugins/filterciscohash.py
|
filter_plugins/filterciscohash.py
|
#!/usr/bin/env python
import passlib.hash
class FilterModule(object):
def filters(self):
return {
'ciscohash5': self.ciscohash5,
'ciscohash7': self.ciscohash7,
'ciscohashpix': self.ciscohashpix,
'ciscohashasa': self.ciscohashasa
}
def ciscohash5(self, password):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.md5_crypt.html"""
return passlib.hash.md5_crypt.using(salt_size=4).hash(password)
def ciscohash7(self, password):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.cisco_type7.html"""
return str(passlib.hash.cisco_type7.hash(password))
def ciscohashpix(self, password, user=''):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.cisco_pix.html"""
return str(passlib.hash.cisco_pix.hash(password, user))
def ciscohashasa(self, password, user=''):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.cisco_asa.html"""
return str(passlib.hash.cisco_asa.hash(password, user))
|
#!/usr/bin/env python
import passlib.hash
# Version 1.7.0 introduced `passlib.hash.md5_crypt.using(salt_size=...)`.
try:
md5_crypt = passlib.hash.md5_crypt.using
except AttributeError:
md5_crypt = passlib.hash.md5_crypt
class FilterModule(object):
def filters(self):
return {
'ciscohash5': self.ciscohash5,
'ciscohash7': self.ciscohash7,
'ciscohashpix': self.ciscohashpix,
'ciscohashasa': self.ciscohashasa
}
def ciscohash5(self, password):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.md5_crypt.html"""
return md5_crypt(salt_size=4).hash(password)
def ciscohash7(self, password):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.cisco_type7.html"""
return passlib.hash.cisco_type7.hash(password)
def ciscohashpix(self, password, user=''):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.cisco_pix.html"""
return passlib.hash.cisco_pix.hash(password, user)
def ciscohashasa(self, password, user=''):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.cisco_asa.html"""
return passlib.hash.cisco_asa.hash(password, user)
|
Add support for Passlib versions prior to 1.7
|
Add support for Passlib versions prior to 1.7
|
Python
|
bsd-2-clause
|
mjuenema/ansible-filter-cisco-hash
|
#!/usr/bin/env python
import passlib.hash
class FilterModule(object):
def filters(self):
return {
'ciscohash5': self.ciscohash5,
'ciscohash7': self.ciscohash7,
'ciscohashpix': self.ciscohashpix,
'ciscohashasa': self.ciscohashasa
}
def ciscohash5(self, password):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.md5_crypt.html"""
return passlib.hash.md5_crypt.using(salt_size=4).hash(password)
def ciscohash7(self, password):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.cisco_type7.html"""
return str(passlib.hash.cisco_type7.hash(password))
def ciscohashpix(self, password, user=''):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.cisco_pix.html"""
return str(passlib.hash.cisco_pix.hash(password, user))
def ciscohashasa(self, password, user=''):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.cisco_asa.html"""
return str(passlib.hash.cisco_asa.hash(password, user))
Add support for Passlib versions prior to 1.7
|
#!/usr/bin/env python
import passlib.hash
# Version 1.7.0 introduced `passlib.hash.md5_crypt.using(salt_size=...)`.
try:
md5_crypt = passlib.hash.md5_crypt.using
except AttributeError:
md5_crypt = passlib.hash.md5_crypt
class FilterModule(object):
def filters(self):
return {
'ciscohash5': self.ciscohash5,
'ciscohash7': self.ciscohash7,
'ciscohashpix': self.ciscohashpix,
'ciscohashasa': self.ciscohashasa
}
def ciscohash5(self, password):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.md5_crypt.html"""
return md5_crypt(salt_size=4).hash(password)
def ciscohash7(self, password):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.cisco_type7.html"""
return passlib.hash.cisco_type7.hash(password)
def ciscohashpix(self, password, user=''):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.cisco_pix.html"""
return passlib.hash.cisco_pix.hash(password, user)
def ciscohashasa(self, password, user=''):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.cisco_asa.html"""
return passlib.hash.cisco_asa.hash(password, user)
|
<commit_before>#!/usr/bin/env python
import passlib.hash
class FilterModule(object):
def filters(self):
return {
'ciscohash5': self.ciscohash5,
'ciscohash7': self.ciscohash7,
'ciscohashpix': self.ciscohashpix,
'ciscohashasa': self.ciscohashasa
}
def ciscohash5(self, password):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.md5_crypt.html"""
return passlib.hash.md5_crypt.using(salt_size=4).hash(password)
def ciscohash7(self, password):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.cisco_type7.html"""
return str(passlib.hash.cisco_type7.hash(password))
def ciscohashpix(self, password, user=''):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.cisco_pix.html"""
return str(passlib.hash.cisco_pix.hash(password, user))
def ciscohashasa(self, password, user=''):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.cisco_asa.html"""
return str(passlib.hash.cisco_asa.hash(password, user))
<commit_msg>Add support for Passlib versions prior to 1.7<commit_after>
|
#!/usr/bin/env python
import passlib.hash
# Version 1.7.0 introduced `passlib.hash.md5_crypt.using(salt_size=...)`.
try:
md5_crypt = passlib.hash.md5_crypt.using
except AttributeError:
md5_crypt = passlib.hash.md5_crypt
class FilterModule(object):
def filters(self):
return {
'ciscohash5': self.ciscohash5,
'ciscohash7': self.ciscohash7,
'ciscohashpix': self.ciscohashpix,
'ciscohashasa': self.ciscohashasa
}
def ciscohash5(self, password):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.md5_crypt.html"""
return md5_crypt(salt_size=4).hash(password)
def ciscohash7(self, password):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.cisco_type7.html"""
return passlib.hash.cisco_type7.hash(password)
def ciscohashpix(self, password, user=''):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.cisco_pix.html"""
return passlib.hash.cisco_pix.hash(password, user)
def ciscohashasa(self, password, user=''):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.cisco_asa.html"""
return passlib.hash.cisco_asa.hash(password, user)
|
#!/usr/bin/env python
import passlib.hash
class FilterModule(object):
def filters(self):
return {
'ciscohash5': self.ciscohash5,
'ciscohash7': self.ciscohash7,
'ciscohashpix': self.ciscohashpix,
'ciscohashasa': self.ciscohashasa
}
def ciscohash5(self, password):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.md5_crypt.html"""
return passlib.hash.md5_crypt.using(salt_size=4).hash(password)
def ciscohash7(self, password):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.cisco_type7.html"""
return str(passlib.hash.cisco_type7.hash(password))
def ciscohashpix(self, password, user=''):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.cisco_pix.html"""
return str(passlib.hash.cisco_pix.hash(password, user))
def ciscohashasa(self, password, user=''):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.cisco_asa.html"""
return str(passlib.hash.cisco_asa.hash(password, user))
Add support for Passlib versions prior to 1.7#!/usr/bin/env python
import passlib.hash
# Version 1.7.0 introduced `passlib.hash.md5_crypt.using(salt_size=...)`.
try:
md5_crypt = passlib.hash.md5_crypt.using
except AttributeError:
md5_crypt = passlib.hash.md5_crypt
class FilterModule(object):
def filters(self):
return {
'ciscohash5': self.ciscohash5,
'ciscohash7': self.ciscohash7,
'ciscohashpix': self.ciscohashpix,
'ciscohashasa': self.ciscohashasa
}
def ciscohash5(self, password):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.md5_crypt.html"""
return md5_crypt(salt_size=4).hash(password)
def ciscohash7(self, password):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.cisco_type7.html"""
return passlib.hash.cisco_type7.hash(password)
def ciscohashpix(self, password, user=''):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.cisco_pix.html"""
return passlib.hash.cisco_pix.hash(password, user)
def ciscohashasa(self, password, user=''):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.cisco_asa.html"""
return passlib.hash.cisco_asa.hash(password, user)
|
<commit_before>#!/usr/bin/env python
import passlib.hash
class FilterModule(object):
def filters(self):
return {
'ciscohash5': self.ciscohash5,
'ciscohash7': self.ciscohash7,
'ciscohashpix': self.ciscohashpix,
'ciscohashasa': self.ciscohashasa
}
def ciscohash5(self, password):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.md5_crypt.html"""
return passlib.hash.md5_crypt.using(salt_size=4).hash(password)
def ciscohash7(self, password):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.cisco_type7.html"""
return str(passlib.hash.cisco_type7.hash(password))
def ciscohashpix(self, password, user=''):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.cisco_pix.html"""
return str(passlib.hash.cisco_pix.hash(password, user))
def ciscohashasa(self, password, user=''):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.cisco_asa.html"""
return str(passlib.hash.cisco_asa.hash(password, user))
<commit_msg>Add support for Passlib versions prior to 1.7<commit_after>#!/usr/bin/env python
import passlib.hash
# Version 1.7.0 introduced `passlib.hash.md5_crypt.using(salt_size=...)`.
try:
md5_crypt = passlib.hash.md5_crypt.using
except AttributeError:
md5_crypt = passlib.hash.md5_crypt
class FilterModule(object):
def filters(self):
return {
'ciscohash5': self.ciscohash5,
'ciscohash7': self.ciscohash7,
'ciscohashpix': self.ciscohashpix,
'ciscohashasa': self.ciscohashasa
}
def ciscohash5(self, password):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.md5_crypt.html"""
return md5_crypt(salt_size=4).hash(password)
def ciscohash7(self, password):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.cisco_type7.html"""
return passlib.hash.cisco_type7.hash(password)
def ciscohashpix(self, password, user=''):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.cisco_pix.html"""
return passlib.hash.cisco_pix.hash(password, user)
def ciscohashasa(self, password, user=''):
"""https://passlib.readthedocs.io/en/stable/lib/passlib.hash.cisco_asa.html"""
return passlib.hash.cisco_asa.hash(password, user)
|
2be2b71dbd3aba4d7aee2c54102eeac45252c5ed
|
drftutorial/catalog/views.py
|
drftutorial/catalog/views.py
|
from django.http import HttpResponse
from rest_framework.response import Response
from rest_framework.views import APIView
from .models import Product
from .serializers import ProductSerializer
class ProductList(APIView):
def get(self, request, format=None):
products = Product.objects.all()
serializer = ProductSerializer(products, many=True)
return Response(serializer.data)
|
from django.http import HttpResponse
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework import status
from .models import Product
from .serializers import ProductSerializer
class ProductList(APIView):
def get(self, request, format=None):
products = Product.objects.all()
serializer = ProductSerializer(products, many=True)
return Response(serializer.data)
def post(self, request, format=None):
serializer = ProductSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
Add POST method to ProductList class
|
Add POST method to ProductList class
|
Python
|
mit
|
andreagrandi/drf-tutorial
|
from django.http import HttpResponse
from rest_framework.response import Response
from rest_framework.views import APIView
from .models import Product
from .serializers import ProductSerializer
class ProductList(APIView):
def get(self, request, format=None):
products = Product.objects.all()
serializer = ProductSerializer(products, many=True)
return Response(serializer.data)
Add POST method to ProductList class
|
from django.http import HttpResponse
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework import status
from .models import Product
from .serializers import ProductSerializer
class ProductList(APIView):
def get(self, request, format=None):
products = Product.objects.all()
serializer = ProductSerializer(products, many=True)
return Response(serializer.data)
def post(self, request, format=None):
serializer = ProductSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
<commit_before>from django.http import HttpResponse
from rest_framework.response import Response
from rest_framework.views import APIView
from .models import Product
from .serializers import ProductSerializer
class ProductList(APIView):
def get(self, request, format=None):
products = Product.objects.all()
serializer = ProductSerializer(products, many=True)
return Response(serializer.data)
<commit_msg>Add POST method to ProductList class<commit_after>
|
from django.http import HttpResponse
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework import status
from .models import Product
from .serializers import ProductSerializer
class ProductList(APIView):
def get(self, request, format=None):
products = Product.objects.all()
serializer = ProductSerializer(products, many=True)
return Response(serializer.data)
def post(self, request, format=None):
serializer = ProductSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
from django.http import HttpResponse
from rest_framework.response import Response
from rest_framework.views import APIView
from .models import Product
from .serializers import ProductSerializer
class ProductList(APIView):
def get(self, request, format=None):
products = Product.objects.all()
serializer = ProductSerializer(products, many=True)
return Response(serializer.data)
Add POST method to ProductList classfrom django.http import HttpResponse
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework import status
from .models import Product
from .serializers import ProductSerializer
class ProductList(APIView):
def get(self, request, format=None):
products = Product.objects.all()
serializer = ProductSerializer(products, many=True)
return Response(serializer.data)
def post(self, request, format=None):
serializer = ProductSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
<commit_before>from django.http import HttpResponse
from rest_framework.response import Response
from rest_framework.views import APIView
from .models import Product
from .serializers import ProductSerializer
class ProductList(APIView):
def get(self, request, format=None):
products = Product.objects.all()
serializer = ProductSerializer(products, many=True)
return Response(serializer.data)
<commit_msg>Add POST method to ProductList class<commit_after>from django.http import HttpResponse
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework import status
from .models import Product
from .serializers import ProductSerializer
class ProductList(APIView):
def get(self, request, format=None):
products = Product.objects.all()
serializer = ProductSerializer(products, many=True)
return Response(serializer.data)
def post(self, request, format=None):
serializer = ProductSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
0f68b46ff94cb2a132f1d860c66998c17f1de3ef
|
ckanext/doi/tests/test_api.py
|
ckanext/doi/tests/test_api.py
|
#!/usr/bin/env python
# encoding: utf-8
#
# This file is part of ckanext-doi
# Created by the Natural History Museum in London, UK
import nose
from ckanext.doi.lib.api import DataciteClient
from ckantest.models import TestBase
from datacite.errors import DataCiteError
import constants
class TestAPI(TestBase):
plugins = [u'doi']
persist = {
u'ckanext.doi.debug': True
}
def test_generate_new_doi(self):
api = DataciteClient()
doi = api.generate_doi()
nose.tools.assert_is_instance(doi, (str, unicode))
def test_mint_new_doi(self):
api = DataciteClient()
doi = constants.XML_DICT[u'identifier'][u'identifier']
pkg_id = u'abcd1234'
with nose.tools.assert_raises(DataCiteError):
api.mint_doi(doi, pkg_id)
api.set_metadata(doi, constants.XML_DICT)
api.mint_doi(doi, pkg_id)
def test_datacite_authentication(self):
api = DataciteClient()
nose.tools.assert_is_not_none(api.client)
|
#!/usr/bin/env python
# encoding: utf-8
#
# This file is part of ckanext-doi
# Created by the Natural History Museum in London, UK
import time
import nose
from ckanext.doi.lib.api import DataciteClient
from ckantest.models import TestBase
from datacite.errors import DataCiteError
import constants
class TestAPI(TestBase):
plugins = [u'doi']
persist = {
u'ckanext.doi.debug': True
}
def test_generate_new_doi(self):
api = DataciteClient()
doi = api.generate_doi()
nose.tools.assert_is_instance(doi, (str, unicode))
def test_mint_new_doi(self):
api = DataciteClient()
doi = api.generate_doi()
pkg_id = u'abcd1234'
with nose.tools.assert_raises(DataCiteError):
api.mint_doi(doi, pkg_id)
api.set_metadata(doi, constants.XML_DICT)
api.mint_doi(doi, pkg_id)
time.sleep(10) # give datacite time to update
datacite_url = api.client.doi_get(doi)
nose.tools.assert_is_not_none(datacite_url)
def test_datacite_authentication(self):
api = DataciteClient()
nose.tools.assert_is_not_none(api.client)
|
Test if the DOI was actually created on DataCite
|
Test if the DOI was actually created on DataCite
|
Python
|
mit
|
NaturalHistoryMuseum/ckanext-doi,NaturalHistoryMuseum/ckanext-doi,NaturalHistoryMuseum/ckanext-doi
|
#!/usr/bin/env python
# encoding: utf-8
#
# This file is part of ckanext-doi
# Created by the Natural History Museum in London, UK
import nose
from ckanext.doi.lib.api import DataciteClient
from ckantest.models import TestBase
from datacite.errors import DataCiteError
import constants
class TestAPI(TestBase):
plugins = [u'doi']
persist = {
u'ckanext.doi.debug': True
}
def test_generate_new_doi(self):
api = DataciteClient()
doi = api.generate_doi()
nose.tools.assert_is_instance(doi, (str, unicode))
def test_mint_new_doi(self):
api = DataciteClient()
doi = constants.XML_DICT[u'identifier'][u'identifier']
pkg_id = u'abcd1234'
with nose.tools.assert_raises(DataCiteError):
api.mint_doi(doi, pkg_id)
api.set_metadata(doi, constants.XML_DICT)
api.mint_doi(doi, pkg_id)
def test_datacite_authentication(self):
api = DataciteClient()
nose.tools.assert_is_not_none(api.client)
Test if the DOI was actually created on DataCite
|
#!/usr/bin/env python
# encoding: utf-8
#
# This file is part of ckanext-doi
# Created by the Natural History Museum in London, UK
import time
import nose
from ckanext.doi.lib.api import DataciteClient
from ckantest.models import TestBase
from datacite.errors import DataCiteError
import constants
class TestAPI(TestBase):
plugins = [u'doi']
persist = {
u'ckanext.doi.debug': True
}
def test_generate_new_doi(self):
api = DataciteClient()
doi = api.generate_doi()
nose.tools.assert_is_instance(doi, (str, unicode))
def test_mint_new_doi(self):
api = DataciteClient()
doi = api.generate_doi()
pkg_id = u'abcd1234'
with nose.tools.assert_raises(DataCiteError):
api.mint_doi(doi, pkg_id)
api.set_metadata(doi, constants.XML_DICT)
api.mint_doi(doi, pkg_id)
time.sleep(10) # give datacite time to update
datacite_url = api.client.doi_get(doi)
nose.tools.assert_is_not_none(datacite_url)
def test_datacite_authentication(self):
api = DataciteClient()
nose.tools.assert_is_not_none(api.client)
|
<commit_before>#!/usr/bin/env python
# encoding: utf-8
#
# This file is part of ckanext-doi
# Created by the Natural History Museum in London, UK
import nose
from ckanext.doi.lib.api import DataciteClient
from ckantest.models import TestBase
from datacite.errors import DataCiteError
import constants
class TestAPI(TestBase):
plugins = [u'doi']
persist = {
u'ckanext.doi.debug': True
}
def test_generate_new_doi(self):
api = DataciteClient()
doi = api.generate_doi()
nose.tools.assert_is_instance(doi, (str, unicode))
def test_mint_new_doi(self):
api = DataciteClient()
doi = constants.XML_DICT[u'identifier'][u'identifier']
pkg_id = u'abcd1234'
with nose.tools.assert_raises(DataCiteError):
api.mint_doi(doi, pkg_id)
api.set_metadata(doi, constants.XML_DICT)
api.mint_doi(doi, pkg_id)
def test_datacite_authentication(self):
api = DataciteClient()
nose.tools.assert_is_not_none(api.client)
<commit_msg>Test if the DOI was actually created on DataCite<commit_after>
|
#!/usr/bin/env python
# encoding: utf-8
#
# This file is part of ckanext-doi
# Created by the Natural History Museum in London, UK
import time
import nose
from ckanext.doi.lib.api import DataciteClient
from ckantest.models import TestBase
from datacite.errors import DataCiteError
import constants
class TestAPI(TestBase):
plugins = [u'doi']
persist = {
u'ckanext.doi.debug': True
}
def test_generate_new_doi(self):
api = DataciteClient()
doi = api.generate_doi()
nose.tools.assert_is_instance(doi, (str, unicode))
def test_mint_new_doi(self):
api = DataciteClient()
doi = api.generate_doi()
pkg_id = u'abcd1234'
with nose.tools.assert_raises(DataCiteError):
api.mint_doi(doi, pkg_id)
api.set_metadata(doi, constants.XML_DICT)
api.mint_doi(doi, pkg_id)
time.sleep(10) # give datacite time to update
datacite_url = api.client.doi_get(doi)
nose.tools.assert_is_not_none(datacite_url)
def test_datacite_authentication(self):
api = DataciteClient()
nose.tools.assert_is_not_none(api.client)
|
#!/usr/bin/env python
# encoding: utf-8
#
# This file is part of ckanext-doi
# Created by the Natural History Museum in London, UK
import nose
from ckanext.doi.lib.api import DataciteClient
from ckantest.models import TestBase
from datacite.errors import DataCiteError
import constants
class TestAPI(TestBase):
plugins = [u'doi']
persist = {
u'ckanext.doi.debug': True
}
def test_generate_new_doi(self):
api = DataciteClient()
doi = api.generate_doi()
nose.tools.assert_is_instance(doi, (str, unicode))
def test_mint_new_doi(self):
api = DataciteClient()
doi = constants.XML_DICT[u'identifier'][u'identifier']
pkg_id = u'abcd1234'
with nose.tools.assert_raises(DataCiteError):
api.mint_doi(doi, pkg_id)
api.set_metadata(doi, constants.XML_DICT)
api.mint_doi(doi, pkg_id)
def test_datacite_authentication(self):
api = DataciteClient()
nose.tools.assert_is_not_none(api.client)
Test if the DOI was actually created on DataCite#!/usr/bin/env python
# encoding: utf-8
#
# This file is part of ckanext-doi
# Created by the Natural History Museum in London, UK
import time
import nose
from ckanext.doi.lib.api import DataciteClient
from ckantest.models import TestBase
from datacite.errors import DataCiteError
import constants
class TestAPI(TestBase):
plugins = [u'doi']
persist = {
u'ckanext.doi.debug': True
}
def test_generate_new_doi(self):
api = DataciteClient()
doi = api.generate_doi()
nose.tools.assert_is_instance(doi, (str, unicode))
def test_mint_new_doi(self):
api = DataciteClient()
doi = api.generate_doi()
pkg_id = u'abcd1234'
with nose.tools.assert_raises(DataCiteError):
api.mint_doi(doi, pkg_id)
api.set_metadata(doi, constants.XML_DICT)
api.mint_doi(doi, pkg_id)
time.sleep(10) # give datacite time to update
datacite_url = api.client.doi_get(doi)
nose.tools.assert_is_not_none(datacite_url)
def test_datacite_authentication(self):
api = DataciteClient()
nose.tools.assert_is_not_none(api.client)
|
<commit_before>#!/usr/bin/env python
# encoding: utf-8
#
# This file is part of ckanext-doi
# Created by the Natural History Museum in London, UK
import nose
from ckanext.doi.lib.api import DataciteClient
from ckantest.models import TestBase
from datacite.errors import DataCiteError
import constants
class TestAPI(TestBase):
plugins = [u'doi']
persist = {
u'ckanext.doi.debug': True
}
def test_generate_new_doi(self):
api = DataciteClient()
doi = api.generate_doi()
nose.tools.assert_is_instance(doi, (str, unicode))
def test_mint_new_doi(self):
api = DataciteClient()
doi = constants.XML_DICT[u'identifier'][u'identifier']
pkg_id = u'abcd1234'
with nose.tools.assert_raises(DataCiteError):
api.mint_doi(doi, pkg_id)
api.set_metadata(doi, constants.XML_DICT)
api.mint_doi(doi, pkg_id)
def test_datacite_authentication(self):
api = DataciteClient()
nose.tools.assert_is_not_none(api.client)
<commit_msg>Test if the DOI was actually created on DataCite<commit_after>#!/usr/bin/env python
# encoding: utf-8
#
# This file is part of ckanext-doi
# Created by the Natural History Museum in London, UK
import time
import nose
from ckanext.doi.lib.api import DataciteClient
from ckantest.models import TestBase
from datacite.errors import DataCiteError
import constants
class TestAPI(TestBase):
plugins = [u'doi']
persist = {
u'ckanext.doi.debug': True
}
def test_generate_new_doi(self):
api = DataciteClient()
doi = api.generate_doi()
nose.tools.assert_is_instance(doi, (str, unicode))
def test_mint_new_doi(self):
api = DataciteClient()
doi = api.generate_doi()
pkg_id = u'abcd1234'
with nose.tools.assert_raises(DataCiteError):
api.mint_doi(doi, pkg_id)
api.set_metadata(doi, constants.XML_DICT)
api.mint_doi(doi, pkg_id)
time.sleep(10) # give datacite time to update
datacite_url = api.client.doi_get(doi)
nose.tools.assert_is_not_none(datacite_url)
def test_datacite_authentication(self):
api = DataciteClient()
nose.tools.assert_is_not_none(api.client)
|
cbfa5d916585524212193f476db4affa38eed5a8
|
pythymiodw/pyro/__init__.py
|
pythymiodw/pyro/__init__.py
|
import os
import Pyro4
import subprocess
import signal
from pythymiodw import ThymioSimMR
class ThymioMR():
def __init__(self):
self.pyro4daemon_proc=subprocess.Popen(['python -m pythymiodw.pyro.__main__'], stdout=subprocess.PIPE, shell=True, preexec_fn=os.setsid)
self.robot = Pyro4.Proxy('PYRONAME:pythymiodw.thymiosimmr')
def quit(self):
self.robot.quit()
os.killpg(os.getpgid(self.pyro4daemon_proc.pid), signal.SIGTERM)
def wheels(self, lv, rv):
self.robot.wheels(lv, rv)
def get_wheels(self):
return self.robot.leftv, self.robot.rightv
|
import os
import Pyro4
import subprocess
import signal
from pythymiodw import ThymioSimMR
import time
from pythymiodw.io import ProxGround
class ThymioMR():
def __init__(self):
self.pyro4daemon_proc=subprocess.Popen(['python -m pythymiodw.pyro.__main__'], stdout=subprocess.PIPE, shell=True, preexec_fn=os.setsid)
time.sleep(2)
self.robot = Pyro4.Proxy('PYRONAME:pythymiodw.thymiosimmr')
def quit(self):
self.robot.quit()
os.killpg(os.getpgid(self.pyro4daemon_proc.pid), signal.SIGTERM)
def wheels(self, lv, rv):
self.robot.wheels(lv, rv)
def get_wheels(self):
return self.robot.leftv, self.robot.rightv
def sleep(self, sec):
time.sleep(sec)
@property
def prox_horizontal(self):
return self.robot.prox_horizontal
@property
def prox_ground(self):
delta, ambiant, reflected = self.robot.prox_ground
return ProxGround(delta, ambiant, reflected)
|
Add sleep, and prox_ground, prox_horizontal.
|
Add sleep, and prox_ground, prox_horizontal.
|
Python
|
mit
|
kurniawano/pythymiodw
|
import os
import Pyro4
import subprocess
import signal
from pythymiodw import ThymioSimMR
class ThymioMR():
def __init__(self):
self.pyro4daemon_proc=subprocess.Popen(['python -m pythymiodw.pyro.__main__'], stdout=subprocess.PIPE, shell=True, preexec_fn=os.setsid)
self.robot = Pyro4.Proxy('PYRONAME:pythymiodw.thymiosimmr')
def quit(self):
self.robot.quit()
os.killpg(os.getpgid(self.pyro4daemon_proc.pid), signal.SIGTERM)
def wheels(self, lv, rv):
self.robot.wheels(lv, rv)
def get_wheels(self):
return self.robot.leftv, self.robot.rightv
Add sleep, and prox_ground, prox_horizontal.
|
import os
import Pyro4
import subprocess
import signal
from pythymiodw import ThymioSimMR
import time
from pythymiodw.io import ProxGround
class ThymioMR():
def __init__(self):
self.pyro4daemon_proc=subprocess.Popen(['python -m pythymiodw.pyro.__main__'], stdout=subprocess.PIPE, shell=True, preexec_fn=os.setsid)
time.sleep(2)
self.robot = Pyro4.Proxy('PYRONAME:pythymiodw.thymiosimmr')
def quit(self):
self.robot.quit()
os.killpg(os.getpgid(self.pyro4daemon_proc.pid), signal.SIGTERM)
def wheels(self, lv, rv):
self.robot.wheels(lv, rv)
def get_wheels(self):
return self.robot.leftv, self.robot.rightv
def sleep(self, sec):
time.sleep(sec)
@property
def prox_horizontal(self):
return self.robot.prox_horizontal
@property
def prox_ground(self):
delta, ambiant, reflected = self.robot.prox_ground
return ProxGround(delta, ambiant, reflected)
|
<commit_before>import os
import Pyro4
import subprocess
import signal
from pythymiodw import ThymioSimMR
class ThymioMR():
def __init__(self):
self.pyro4daemon_proc=subprocess.Popen(['python -m pythymiodw.pyro.__main__'], stdout=subprocess.PIPE, shell=True, preexec_fn=os.setsid)
self.robot = Pyro4.Proxy('PYRONAME:pythymiodw.thymiosimmr')
def quit(self):
self.robot.quit()
os.killpg(os.getpgid(self.pyro4daemon_proc.pid), signal.SIGTERM)
def wheels(self, lv, rv):
self.robot.wheels(lv, rv)
def get_wheels(self):
return self.robot.leftv, self.robot.rightv
<commit_msg>Add sleep, and prox_ground, prox_horizontal.<commit_after>
|
import os
import Pyro4
import subprocess
import signal
from pythymiodw import ThymioSimMR
import time
from pythymiodw.io import ProxGround
class ThymioMR():
def __init__(self):
self.pyro4daemon_proc=subprocess.Popen(['python -m pythymiodw.pyro.__main__'], stdout=subprocess.PIPE, shell=True, preexec_fn=os.setsid)
time.sleep(2)
self.robot = Pyro4.Proxy('PYRONAME:pythymiodw.thymiosimmr')
def quit(self):
self.robot.quit()
os.killpg(os.getpgid(self.pyro4daemon_proc.pid), signal.SIGTERM)
def wheels(self, lv, rv):
self.robot.wheels(lv, rv)
def get_wheels(self):
return self.robot.leftv, self.robot.rightv
def sleep(self, sec):
time.sleep(sec)
@property
def prox_horizontal(self):
return self.robot.prox_horizontal
@property
def prox_ground(self):
delta, ambiant, reflected = self.robot.prox_ground
return ProxGround(delta, ambiant, reflected)
|
import os
import Pyro4
import subprocess
import signal
from pythymiodw import ThymioSimMR
class ThymioMR():
def __init__(self):
self.pyro4daemon_proc=subprocess.Popen(['python -m pythymiodw.pyro.__main__'], stdout=subprocess.PIPE, shell=True, preexec_fn=os.setsid)
self.robot = Pyro4.Proxy('PYRONAME:pythymiodw.thymiosimmr')
def quit(self):
self.robot.quit()
os.killpg(os.getpgid(self.pyro4daemon_proc.pid), signal.SIGTERM)
def wheels(self, lv, rv):
self.robot.wheels(lv, rv)
def get_wheels(self):
return self.robot.leftv, self.robot.rightv
Add sleep, and prox_ground, prox_horizontal.import os
import Pyro4
import subprocess
import signal
from pythymiodw import ThymioSimMR
import time
from pythymiodw.io import ProxGround
class ThymioMR():
def __init__(self):
self.pyro4daemon_proc=subprocess.Popen(['python -m pythymiodw.pyro.__main__'], stdout=subprocess.PIPE, shell=True, preexec_fn=os.setsid)
time.sleep(2)
self.robot = Pyro4.Proxy('PYRONAME:pythymiodw.thymiosimmr')
def quit(self):
self.robot.quit()
os.killpg(os.getpgid(self.pyro4daemon_proc.pid), signal.SIGTERM)
def wheels(self, lv, rv):
self.robot.wheels(lv, rv)
def get_wheels(self):
return self.robot.leftv, self.robot.rightv
def sleep(self, sec):
time.sleep(sec)
@property
def prox_horizontal(self):
return self.robot.prox_horizontal
@property
def prox_ground(self):
delta, ambiant, reflected = self.robot.prox_ground
return ProxGround(delta, ambiant, reflected)
|
<commit_before>import os
import Pyro4
import subprocess
import signal
from pythymiodw import ThymioSimMR
class ThymioMR():
def __init__(self):
self.pyro4daemon_proc=subprocess.Popen(['python -m pythymiodw.pyro.__main__'], stdout=subprocess.PIPE, shell=True, preexec_fn=os.setsid)
self.robot = Pyro4.Proxy('PYRONAME:pythymiodw.thymiosimmr')
def quit(self):
self.robot.quit()
os.killpg(os.getpgid(self.pyro4daemon_proc.pid), signal.SIGTERM)
def wheels(self, lv, rv):
self.robot.wheels(lv, rv)
def get_wheels(self):
return self.robot.leftv, self.robot.rightv
<commit_msg>Add sleep, and prox_ground, prox_horizontal.<commit_after>import os
import Pyro4
import subprocess
import signal
from pythymiodw import ThymioSimMR
import time
from pythymiodw.io import ProxGround
class ThymioMR():
def __init__(self):
self.pyro4daemon_proc=subprocess.Popen(['python -m pythymiodw.pyro.__main__'], stdout=subprocess.PIPE, shell=True, preexec_fn=os.setsid)
time.sleep(2)
self.robot = Pyro4.Proxy('PYRONAME:pythymiodw.thymiosimmr')
def quit(self):
self.robot.quit()
os.killpg(os.getpgid(self.pyro4daemon_proc.pid), signal.SIGTERM)
def wheels(self, lv, rv):
self.robot.wheels(lv, rv)
def get_wheels(self):
return self.robot.leftv, self.robot.rightv
def sleep(self, sec):
time.sleep(sec)
@property
def prox_horizontal(self):
return self.robot.prox_horizontal
@property
def prox_ground(self):
delta, ambiant, reflected = self.robot.prox_ground
return ProxGround(delta, ambiant, reflected)
|
bc7c389ac00348792fd3346a454704cdf9f48416
|
django_lightweight_queue/backends/redis.py
|
django_lightweight_queue/backends/redis.py
|
from __future__ import absolute_import # For 'redis'
import redis
from ..job import Job
from .. import app_settings
class RedisBackend(object):
KEY = 'django_lightweight_queue'
def __init__(self):
self.client = redis.Redis(
host=app_settings.REDIS_HOST,
port=app_settings.REDIS_PORT,
)
def enqueue(self, job):
self.client.rpush(self.KEY, job.to_json())
def dequeue(self, timeout):
data = self.client.blpop(self.KEY, timeout)
if data is not None:
return Job.from_json(data)
|
from __future__ import absolute_import # For 'redis'
import redis
from ..job import Job
from .. import app_settings
class RedisBackend(object):
KEY = 'django_lightweight_queue'
def __init__(self):
self.client = redis.Redis(
host=app_settings.REDIS_HOST,
port=app_settings.REDIS_PORT,
)
def enqueue(self, job):
self.client.rpush(self.KEY, job.to_json())
def dequeue(self, timeout):
try:
key, data = self.client.blpop(self.KEY, timeout)
return Job.from_json(data)
except TypeError:
pass
|
Fix Redis backend; BLPOP takes multiple lists.
|
Fix Redis backend; BLPOP takes multiple lists.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@thread.com>
|
Python
|
bsd-3-clause
|
thread/django-lightweight-queue,thread/django-lightweight-queue,prophile/django-lightweight-queue,lamby/django-lightweight-queue,prophile/django-lightweight-queue
|
from __future__ import absolute_import # For 'redis'
import redis
from ..job import Job
from .. import app_settings
class RedisBackend(object):
KEY = 'django_lightweight_queue'
def __init__(self):
self.client = redis.Redis(
host=app_settings.REDIS_HOST,
port=app_settings.REDIS_PORT,
)
def enqueue(self, job):
self.client.rpush(self.KEY, job.to_json())
def dequeue(self, timeout):
data = self.client.blpop(self.KEY, timeout)
if data is not None:
return Job.from_json(data)
Fix Redis backend; BLPOP takes multiple lists.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@thread.com>
|
from __future__ import absolute_import # For 'redis'
import redis
from ..job import Job
from .. import app_settings
class RedisBackend(object):
KEY = 'django_lightweight_queue'
def __init__(self):
self.client = redis.Redis(
host=app_settings.REDIS_HOST,
port=app_settings.REDIS_PORT,
)
def enqueue(self, job):
self.client.rpush(self.KEY, job.to_json())
def dequeue(self, timeout):
try:
key, data = self.client.blpop(self.KEY, timeout)
return Job.from_json(data)
except TypeError:
pass
|
<commit_before>from __future__ import absolute_import # For 'redis'
import redis
from ..job import Job
from .. import app_settings
class RedisBackend(object):
KEY = 'django_lightweight_queue'
def __init__(self):
self.client = redis.Redis(
host=app_settings.REDIS_HOST,
port=app_settings.REDIS_PORT,
)
def enqueue(self, job):
self.client.rpush(self.KEY, job.to_json())
def dequeue(self, timeout):
data = self.client.blpop(self.KEY, timeout)
if data is not None:
return Job.from_json(data)
<commit_msg>Fix Redis backend; BLPOP takes multiple lists.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@thread.com><commit_after>
|
from __future__ import absolute_import # For 'redis'
import redis
from ..job import Job
from .. import app_settings
class RedisBackend(object):
KEY = 'django_lightweight_queue'
def __init__(self):
self.client = redis.Redis(
host=app_settings.REDIS_HOST,
port=app_settings.REDIS_PORT,
)
def enqueue(self, job):
self.client.rpush(self.KEY, job.to_json())
def dequeue(self, timeout):
try:
key, data = self.client.blpop(self.KEY, timeout)
return Job.from_json(data)
except TypeError:
pass
|
from __future__ import absolute_import # For 'redis'
import redis
from ..job import Job
from .. import app_settings
class RedisBackend(object):
KEY = 'django_lightweight_queue'
def __init__(self):
self.client = redis.Redis(
host=app_settings.REDIS_HOST,
port=app_settings.REDIS_PORT,
)
def enqueue(self, job):
self.client.rpush(self.KEY, job.to_json())
def dequeue(self, timeout):
data = self.client.blpop(self.KEY, timeout)
if data is not None:
return Job.from_json(data)
Fix Redis backend; BLPOP takes multiple lists.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@thread.com>from __future__ import absolute_import # For 'redis'
import redis
from ..job import Job
from .. import app_settings
class RedisBackend(object):
KEY = 'django_lightweight_queue'
def __init__(self):
self.client = redis.Redis(
host=app_settings.REDIS_HOST,
port=app_settings.REDIS_PORT,
)
def enqueue(self, job):
self.client.rpush(self.KEY, job.to_json())
def dequeue(self, timeout):
try:
key, data = self.client.blpop(self.KEY, timeout)
return Job.from_json(data)
except TypeError:
pass
|
<commit_before>from __future__ import absolute_import # For 'redis'
import redis
from ..job import Job
from .. import app_settings
class RedisBackend(object):
KEY = 'django_lightweight_queue'
def __init__(self):
self.client = redis.Redis(
host=app_settings.REDIS_HOST,
port=app_settings.REDIS_PORT,
)
def enqueue(self, job):
self.client.rpush(self.KEY, job.to_json())
def dequeue(self, timeout):
data = self.client.blpop(self.KEY, timeout)
if data is not None:
return Job.from_json(data)
<commit_msg>Fix Redis backend; BLPOP takes multiple lists.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@thread.com><commit_after>from __future__ import absolute_import # For 'redis'
import redis
from ..job import Job
from .. import app_settings
class RedisBackend(object):
KEY = 'django_lightweight_queue'
def __init__(self):
self.client = redis.Redis(
host=app_settings.REDIS_HOST,
port=app_settings.REDIS_PORT,
)
def enqueue(self, job):
self.client.rpush(self.KEY, job.to_json())
def dequeue(self, timeout):
try:
key, data = self.client.blpop(self.KEY, timeout)
return Job.from_json(data)
except TypeError:
pass
|
ab5edd504789e8fad3dcf0f30b0fbec8608e2abe
|
django_nyt/urls.py
|
django_nyt/urls.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
from django.conf.urls import patterns, url
urlpatterns = patterns(
'', url('^json/get/$', 'django_nyt.views.get_notifications',
name='json_get'), url('^json/get/(?P<latest_id>\d+)/$',
'django_nyt.views.get_notifications', name='json_get'), url(
'^json/mark-read/$', 'django_nyt.views.mark_read',
name='json_mark_read_base'), url('^json/mark-read/(\d+)/$',
'django_nyt.views.mark_read', name='json_mark_read'), url(
'^json/mark-read/(?P<id_lte>\d+)/(?P<id_gte>\d+)/$',
'django_nyt.views.mark_read', name='json_mark_read'), url(
'^goto/(?P<notification_id>\d+)/$', 'django_nyt.views.goto', name='goto'),
url('^goto/$', 'django_nyt.views.goto', name='goto_base'),)
def get_pattern(app_name="nyt", namespace="nyt"):
"""Every url resolution takes place as "nyt:view_name".
https://docs.djangoproject.com/en/dev/topics/http/urls/#topics-http-reversing-url-namespaces
"""
return urlpatterns, app_name, namespace
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
from django import VERSION as DJANGO_VERSION
from django.conf.urls import url
urlpatterns = [
url('^json/get/$', 'django_nyt.views.get_notifications', name='json_get'),
url('^json/get/(?P<latest_id>\d+)/$', 'django_nyt.views.get_notifications', name='json_get'),
url('^json/mark-read/$', 'django_nyt.views.mark_read', name='json_mark_read_base'),
url('^json/mark-read/(\d+)/$', 'django_nyt.views.mark_read', name='json_mark_read'),
url('^json/mark-read/(?P<id_lte>\d+)/(?P<id_gte>\d+)/$', 'django_nyt.views.mark_read', name='json_mark_read'),
url('^goto/(?P<notification_id>\d+)/$', 'django_nyt.views.goto', name='goto'),
url('^goto/$', 'django_nyt.views.goto', name='goto_base'),
]
if DJANGO_VERSION < (1, 8):
from django.conf.urls import patterns
urlpatterns = patterns('', *urlpatterns)
def get_pattern(app_name="nyt", namespace="nyt"):
"""Every url resolution takes place as "nyt:view_name".
https://docs.djangoproject.com/en/dev/topics/http/urls/#topics-http-reversing-url-namespaces
"""
return urlpatterns, app_name, namespace
|
Use list instead of patterns()
|
Use list instead of patterns()
|
Python
|
apache-2.0
|
benjaoming/django-nyt,benjaoming/django-nyt
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
from django.conf.urls import patterns, url
urlpatterns = patterns(
'', url('^json/get/$', 'django_nyt.views.get_notifications',
name='json_get'), url('^json/get/(?P<latest_id>\d+)/$',
'django_nyt.views.get_notifications', name='json_get'), url(
'^json/mark-read/$', 'django_nyt.views.mark_read',
name='json_mark_read_base'), url('^json/mark-read/(\d+)/$',
'django_nyt.views.mark_read', name='json_mark_read'), url(
'^json/mark-read/(?P<id_lte>\d+)/(?P<id_gte>\d+)/$',
'django_nyt.views.mark_read', name='json_mark_read'), url(
'^goto/(?P<notification_id>\d+)/$', 'django_nyt.views.goto', name='goto'),
url('^goto/$', 'django_nyt.views.goto', name='goto_base'),)
def get_pattern(app_name="nyt", namespace="nyt"):
"""Every url resolution takes place as "nyt:view_name".
https://docs.djangoproject.com/en/dev/topics/http/urls/#topics-http-reversing-url-namespaces
"""
return urlpatterns, app_name, namespace
Use list instead of patterns()
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
from django import VERSION as DJANGO_VERSION
from django.conf.urls import url
urlpatterns = [
url('^json/get/$', 'django_nyt.views.get_notifications', name='json_get'),
url('^json/get/(?P<latest_id>\d+)/$', 'django_nyt.views.get_notifications', name='json_get'),
url('^json/mark-read/$', 'django_nyt.views.mark_read', name='json_mark_read_base'),
url('^json/mark-read/(\d+)/$', 'django_nyt.views.mark_read', name='json_mark_read'),
url('^json/mark-read/(?P<id_lte>\d+)/(?P<id_gte>\d+)/$', 'django_nyt.views.mark_read', name='json_mark_read'),
url('^goto/(?P<notification_id>\d+)/$', 'django_nyt.views.goto', name='goto'),
url('^goto/$', 'django_nyt.views.goto', name='goto_base'),
]
if DJANGO_VERSION < (1, 8):
from django.conf.urls import patterns
urlpatterns = patterns('', *urlpatterns)
def get_pattern(app_name="nyt", namespace="nyt"):
"""Every url resolution takes place as "nyt:view_name".
https://docs.djangoproject.com/en/dev/topics/http/urls/#topics-http-reversing-url-namespaces
"""
return urlpatterns, app_name, namespace
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
from django.conf.urls import patterns, url
urlpatterns = patterns(
'', url('^json/get/$', 'django_nyt.views.get_notifications',
name='json_get'), url('^json/get/(?P<latest_id>\d+)/$',
'django_nyt.views.get_notifications', name='json_get'), url(
'^json/mark-read/$', 'django_nyt.views.mark_read',
name='json_mark_read_base'), url('^json/mark-read/(\d+)/$',
'django_nyt.views.mark_read', name='json_mark_read'), url(
'^json/mark-read/(?P<id_lte>\d+)/(?P<id_gte>\d+)/$',
'django_nyt.views.mark_read', name='json_mark_read'), url(
'^goto/(?P<notification_id>\d+)/$', 'django_nyt.views.goto', name='goto'),
url('^goto/$', 'django_nyt.views.goto', name='goto_base'),)
def get_pattern(app_name="nyt", namespace="nyt"):
"""Every url resolution takes place as "nyt:view_name".
https://docs.djangoproject.com/en/dev/topics/http/urls/#topics-http-reversing-url-namespaces
"""
return urlpatterns, app_name, namespace
<commit_msg>Use list instead of patterns()<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
from django import VERSION as DJANGO_VERSION
from django.conf.urls import url
urlpatterns = [
url('^json/get/$', 'django_nyt.views.get_notifications', name='json_get'),
url('^json/get/(?P<latest_id>\d+)/$', 'django_nyt.views.get_notifications', name='json_get'),
url('^json/mark-read/$', 'django_nyt.views.mark_read', name='json_mark_read_base'),
url('^json/mark-read/(\d+)/$', 'django_nyt.views.mark_read', name='json_mark_read'),
url('^json/mark-read/(?P<id_lte>\d+)/(?P<id_gte>\d+)/$', 'django_nyt.views.mark_read', name='json_mark_read'),
url('^goto/(?P<notification_id>\d+)/$', 'django_nyt.views.goto', name='goto'),
url('^goto/$', 'django_nyt.views.goto', name='goto_base'),
]
if DJANGO_VERSION < (1, 8):
from django.conf.urls import patterns
urlpatterns = patterns('', *urlpatterns)
def get_pattern(app_name="nyt", namespace="nyt"):
"""Every url resolution takes place as "nyt:view_name".
https://docs.djangoproject.com/en/dev/topics/http/urls/#topics-http-reversing-url-namespaces
"""
return urlpatterns, app_name, namespace
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
from django.conf.urls import patterns, url
urlpatterns = patterns(
'', url('^json/get/$', 'django_nyt.views.get_notifications',
name='json_get'), url('^json/get/(?P<latest_id>\d+)/$',
'django_nyt.views.get_notifications', name='json_get'), url(
'^json/mark-read/$', 'django_nyt.views.mark_read',
name='json_mark_read_base'), url('^json/mark-read/(\d+)/$',
'django_nyt.views.mark_read', name='json_mark_read'), url(
'^json/mark-read/(?P<id_lte>\d+)/(?P<id_gte>\d+)/$',
'django_nyt.views.mark_read', name='json_mark_read'), url(
'^goto/(?P<notification_id>\d+)/$', 'django_nyt.views.goto', name='goto'),
url('^goto/$', 'django_nyt.views.goto', name='goto_base'),)
def get_pattern(app_name="nyt", namespace="nyt"):
"""Every url resolution takes place as "nyt:view_name".
https://docs.djangoproject.com/en/dev/topics/http/urls/#topics-http-reversing-url-namespaces
"""
return urlpatterns, app_name, namespace
Use list instead of patterns()# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
from django import VERSION as DJANGO_VERSION
from django.conf.urls import url
urlpatterns = [
url('^json/get/$', 'django_nyt.views.get_notifications', name='json_get'),
url('^json/get/(?P<latest_id>\d+)/$', 'django_nyt.views.get_notifications', name='json_get'),
url('^json/mark-read/$', 'django_nyt.views.mark_read', name='json_mark_read_base'),
url('^json/mark-read/(\d+)/$', 'django_nyt.views.mark_read', name='json_mark_read'),
url('^json/mark-read/(?P<id_lte>\d+)/(?P<id_gte>\d+)/$', 'django_nyt.views.mark_read', name='json_mark_read'),
url('^goto/(?P<notification_id>\d+)/$', 'django_nyt.views.goto', name='goto'),
url('^goto/$', 'django_nyt.views.goto', name='goto_base'),
]
if DJANGO_VERSION < (1, 8):
from django.conf.urls import patterns
urlpatterns = patterns('', *urlpatterns)
def get_pattern(app_name="nyt", namespace="nyt"):
"""Every url resolution takes place as "nyt:view_name".
https://docs.djangoproject.com/en/dev/topics/http/urls/#topics-http-reversing-url-namespaces
"""
return urlpatterns, app_name, namespace
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
from django.conf.urls import patterns, url
urlpatterns = patterns(
'', url('^json/get/$', 'django_nyt.views.get_notifications',
name='json_get'), url('^json/get/(?P<latest_id>\d+)/$',
'django_nyt.views.get_notifications', name='json_get'), url(
'^json/mark-read/$', 'django_nyt.views.mark_read',
name='json_mark_read_base'), url('^json/mark-read/(\d+)/$',
'django_nyt.views.mark_read', name='json_mark_read'), url(
'^json/mark-read/(?P<id_lte>\d+)/(?P<id_gte>\d+)/$',
'django_nyt.views.mark_read', name='json_mark_read'), url(
'^goto/(?P<notification_id>\d+)/$', 'django_nyt.views.goto', name='goto'),
url('^goto/$', 'django_nyt.views.goto', name='goto_base'),)
def get_pattern(app_name="nyt", namespace="nyt"):
"""Every url resolution takes place as "nyt:view_name".
https://docs.djangoproject.com/en/dev/topics/http/urls/#topics-http-reversing-url-namespaces
"""
return urlpatterns, app_name, namespace
<commit_msg>Use list instead of patterns()<commit_after># -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
from django import VERSION as DJANGO_VERSION
from django.conf.urls import url
urlpatterns = [
url('^json/get/$', 'django_nyt.views.get_notifications', name='json_get'),
url('^json/get/(?P<latest_id>\d+)/$', 'django_nyt.views.get_notifications', name='json_get'),
url('^json/mark-read/$', 'django_nyt.views.mark_read', name='json_mark_read_base'),
url('^json/mark-read/(\d+)/$', 'django_nyt.views.mark_read', name='json_mark_read'),
url('^json/mark-read/(?P<id_lte>\d+)/(?P<id_gte>\d+)/$', 'django_nyt.views.mark_read', name='json_mark_read'),
url('^goto/(?P<notification_id>\d+)/$', 'django_nyt.views.goto', name='goto'),
url('^goto/$', 'django_nyt.views.goto', name='goto_base'),
]
if DJANGO_VERSION < (1, 8):
from django.conf.urls import patterns
urlpatterns = patterns('', *urlpatterns)
def get_pattern(app_name="nyt", namespace="nyt"):
"""Every url resolution takes place as "nyt:view_name".
https://docs.djangoproject.com/en/dev/topics/http/urls/#topics-http-reversing-url-namespaces
"""
return urlpatterns, app_name, namespace
|
cc9bf099823afbcaa98836dcb93c78124edaed5c
|
pydevd_attach_to_process/attach_script.py
|
pydevd_attach_to_process/attach_script.py
|
def attach(port, host):
try:
import pydevd
pydevd.stoptrace() #I.e.: disconnect if already connected
# pydevd.DebugInfoHolder.DEBUG_RECORD_SOCKET_READS = True
# pydevd.DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS = 3
# pydevd.DebugInfoHolder.DEBUG_TRACE_LEVEL = 3
pydevd.settrace(
port=port,
host=host,
overwrite_prev_trace=True,
suspend=False,
trace_only_current_thread=False,
patch_multiprocessing=False,
)
except:
import traceback;traceback.print_exc()
|
def attach(port, host):
try:
import pydevd
pydevd.stoptrace() #I.e.: disconnect if already connected
# pydevd.DebugInfoHolder.DEBUG_RECORD_SOCKET_READS = True
# pydevd.DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS = 3
# pydevd.DebugInfoHolder.DEBUG_TRACE_LEVEL = 3
pydevd.settrace(
port=port,
host=host,
stdoutToServer=True,
stderrToServer=True,
overwrite_prev_trace=True,
suspend=False,
trace_only_current_thread=False,
patch_multiprocessing=False,
)
except:
import traceback;traceback.print_exc()
|
Send output to server by default for attach to process debug.
|
Send output to server by default for attach to process debug.
|
Python
|
epl-1.0
|
fabioz/PyDev.Debugger,Elizaveta239/PyDev.Debugger,simudream/PyDev.Debugger,SylvainCorlay/PyDev.Debugger,Elizaveta239/PyDev.Debugger,Elizaveta239/PyDev.Debugger,simudream/PyDev.Debugger,fabioz/PyDev.Debugger,SylvainCorlay/PyDev.Debugger,simudream/PyDev.Debugger,fabioz/PyDev.Debugger,Elizaveta239/PyDev.Debugger,Elizaveta239/PyDev.Debugger,SylvainCorlay/PyDev.Debugger,fabioz/PyDev.Debugger,SylvainCorlay/PyDev.Debugger,simudream/PyDev.Debugger,fabioz/PyDev.Debugger
|
def attach(port, host):
try:
import pydevd
pydevd.stoptrace() #I.e.: disconnect if already connected
# pydevd.DebugInfoHolder.DEBUG_RECORD_SOCKET_READS = True
# pydevd.DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS = 3
# pydevd.DebugInfoHolder.DEBUG_TRACE_LEVEL = 3
pydevd.settrace(
port=port,
host=host,
overwrite_prev_trace=True,
suspend=False,
trace_only_current_thread=False,
patch_multiprocessing=False,
)
except:
import traceback;traceback.print_exc()Send output to server by default for attach to process debug.
|
def attach(port, host):
try:
import pydevd
pydevd.stoptrace() #I.e.: disconnect if already connected
# pydevd.DebugInfoHolder.DEBUG_RECORD_SOCKET_READS = True
# pydevd.DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS = 3
# pydevd.DebugInfoHolder.DEBUG_TRACE_LEVEL = 3
pydevd.settrace(
port=port,
host=host,
stdoutToServer=True,
stderrToServer=True,
overwrite_prev_trace=True,
suspend=False,
trace_only_current_thread=False,
patch_multiprocessing=False,
)
except:
import traceback;traceback.print_exc()
|
<commit_before>def attach(port, host):
try:
import pydevd
pydevd.stoptrace() #I.e.: disconnect if already connected
# pydevd.DebugInfoHolder.DEBUG_RECORD_SOCKET_READS = True
# pydevd.DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS = 3
# pydevd.DebugInfoHolder.DEBUG_TRACE_LEVEL = 3
pydevd.settrace(
port=port,
host=host,
overwrite_prev_trace=True,
suspend=False,
trace_only_current_thread=False,
patch_multiprocessing=False,
)
except:
import traceback;traceback.print_exc()<commit_msg>Send output to server by default for attach to process debug.<commit_after>
|
def attach(port, host):
try:
import pydevd
pydevd.stoptrace() #I.e.: disconnect if already connected
# pydevd.DebugInfoHolder.DEBUG_RECORD_SOCKET_READS = True
# pydevd.DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS = 3
# pydevd.DebugInfoHolder.DEBUG_TRACE_LEVEL = 3
pydevd.settrace(
port=port,
host=host,
stdoutToServer=True,
stderrToServer=True,
overwrite_prev_trace=True,
suspend=False,
trace_only_current_thread=False,
patch_multiprocessing=False,
)
except:
import traceback;traceback.print_exc()
|
def attach(port, host):
try:
import pydevd
pydevd.stoptrace() #I.e.: disconnect if already connected
# pydevd.DebugInfoHolder.DEBUG_RECORD_SOCKET_READS = True
# pydevd.DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS = 3
# pydevd.DebugInfoHolder.DEBUG_TRACE_LEVEL = 3
pydevd.settrace(
port=port,
host=host,
overwrite_prev_trace=True,
suspend=False,
trace_only_current_thread=False,
patch_multiprocessing=False,
)
except:
import traceback;traceback.print_exc()Send output to server by default for attach to process debug.def attach(port, host):
try:
import pydevd
pydevd.stoptrace() #I.e.: disconnect if already connected
# pydevd.DebugInfoHolder.DEBUG_RECORD_SOCKET_READS = True
# pydevd.DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS = 3
# pydevd.DebugInfoHolder.DEBUG_TRACE_LEVEL = 3
pydevd.settrace(
port=port,
host=host,
stdoutToServer=True,
stderrToServer=True,
overwrite_prev_trace=True,
suspend=False,
trace_only_current_thread=False,
patch_multiprocessing=False,
)
except:
import traceback;traceback.print_exc()
|
<commit_before>def attach(port, host):
try:
import pydevd
pydevd.stoptrace() #I.e.: disconnect if already connected
# pydevd.DebugInfoHolder.DEBUG_RECORD_SOCKET_READS = True
# pydevd.DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS = 3
# pydevd.DebugInfoHolder.DEBUG_TRACE_LEVEL = 3
pydevd.settrace(
port=port,
host=host,
overwrite_prev_trace=True,
suspend=False,
trace_only_current_thread=False,
patch_multiprocessing=False,
)
except:
import traceback;traceback.print_exc()<commit_msg>Send output to server by default for attach to process debug.<commit_after>def attach(port, host):
try:
import pydevd
pydevd.stoptrace() #I.e.: disconnect if already connected
# pydevd.DebugInfoHolder.DEBUG_RECORD_SOCKET_READS = True
# pydevd.DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS = 3
# pydevd.DebugInfoHolder.DEBUG_TRACE_LEVEL = 3
pydevd.settrace(
port=port,
host=host,
stdoutToServer=True,
stderrToServer=True,
overwrite_prev_trace=True,
suspend=False,
trace_only_current_thread=False,
patch_multiprocessing=False,
)
except:
import traceback;traceback.print_exc()
|
f6f6ae78a865a3c8bcb28c16157168470264e59b
|
tasks.py
|
tasks.py
|
from invocations.docs import docs, www, sites, watch_docs
from invocations.testing import test, integration, coverage, watch_tests
from invocations import packaging
from invoke import Collection
from invoke.util import LOG_FORMAT
ns = Collection(
docs, www, test, coverage, integration, sites, watch_docs,
watch_tests, release=packaging,
)
ns.configure({
'tests': {
'package': 'fabric',
'logformat': LOG_FORMAT,
}
})
|
from invocations.docs import docs, www, sites, watch_docs
from invocations.testing import test, integration, coverage, watch_tests, count_errors
from invocations import packaging
from invoke import Collection
from invoke.util import LOG_FORMAT
ns = Collection(
docs, www, test, coverage, integration, sites, watch_docs,
watch_tests, count_errors, release=packaging,
)
ns.configure({
'tests': {
'package': 'fabric',
'logformat': LOG_FORMAT,
}
})
|
Use new invocations error counter for testing thread sleeps etc
|
Use new invocations error counter for testing thread sleeps etc
|
Python
|
bsd-2-clause
|
fabric/fabric
|
from invocations.docs import docs, www, sites, watch_docs
from invocations.testing import test, integration, coverage, watch_tests
from invocations import packaging
from invoke import Collection
from invoke.util import LOG_FORMAT
ns = Collection(
docs, www, test, coverage, integration, sites, watch_docs,
watch_tests, release=packaging,
)
ns.configure({
'tests': {
'package': 'fabric',
'logformat': LOG_FORMAT,
}
})
Use new invocations error counter for testing thread sleeps etc
|
from invocations.docs import docs, www, sites, watch_docs
from invocations.testing import test, integration, coverage, watch_tests, count_errors
from invocations import packaging
from invoke import Collection
from invoke.util import LOG_FORMAT
ns = Collection(
docs, www, test, coverage, integration, sites, watch_docs,
watch_tests, count_errors, release=packaging,
)
ns.configure({
'tests': {
'package': 'fabric',
'logformat': LOG_FORMAT,
}
})
|
<commit_before>from invocations.docs import docs, www, sites, watch_docs
from invocations.testing import test, integration, coverage, watch_tests
from invocations import packaging
from invoke import Collection
from invoke.util import LOG_FORMAT
ns = Collection(
docs, www, test, coverage, integration, sites, watch_docs,
watch_tests, release=packaging,
)
ns.configure({
'tests': {
'package': 'fabric',
'logformat': LOG_FORMAT,
}
})
<commit_msg>Use new invocations error counter for testing thread sleeps etc<commit_after>
|
from invocations.docs import docs, www, sites, watch_docs
from invocations.testing import test, integration, coverage, watch_tests, count_errors
from invocations import packaging
from invoke import Collection
from invoke.util import LOG_FORMAT
ns = Collection(
docs, www, test, coverage, integration, sites, watch_docs,
watch_tests, count_errors, release=packaging,
)
ns.configure({
'tests': {
'package': 'fabric',
'logformat': LOG_FORMAT,
}
})
|
from invocations.docs import docs, www, sites, watch_docs
from invocations.testing import test, integration, coverage, watch_tests
from invocations import packaging
from invoke import Collection
from invoke.util import LOG_FORMAT
ns = Collection(
docs, www, test, coverage, integration, sites, watch_docs,
watch_tests, release=packaging,
)
ns.configure({
'tests': {
'package': 'fabric',
'logformat': LOG_FORMAT,
}
})
Use new invocations error counter for testing thread sleeps etcfrom invocations.docs import docs, www, sites, watch_docs
from invocations.testing import test, integration, coverage, watch_tests, count_errors
from invocations import packaging
from invoke import Collection
from invoke.util import LOG_FORMAT
ns = Collection(
docs, www, test, coverage, integration, sites, watch_docs,
watch_tests, count_errors, release=packaging,
)
ns.configure({
'tests': {
'package': 'fabric',
'logformat': LOG_FORMAT,
}
})
|
<commit_before>from invocations.docs import docs, www, sites, watch_docs
from invocations.testing import test, integration, coverage, watch_tests
from invocations import packaging
from invoke import Collection
from invoke.util import LOG_FORMAT
ns = Collection(
docs, www, test, coverage, integration, sites, watch_docs,
watch_tests, release=packaging,
)
ns.configure({
'tests': {
'package': 'fabric',
'logformat': LOG_FORMAT,
}
})
<commit_msg>Use new invocations error counter for testing thread sleeps etc<commit_after>from invocations.docs import docs, www, sites, watch_docs
from invocations.testing import test, integration, coverage, watch_tests, count_errors
from invocations import packaging
from invoke import Collection
from invoke.util import LOG_FORMAT
ns = Collection(
docs, www, test, coverage, integration, sites, watch_docs,
watch_tests, count_errors, release=packaging,
)
ns.configure({
'tests': {
'package': 'fabric',
'logformat': LOG_FORMAT,
}
})
|
6d37f54983c5fa4c9b9087bef801d78cb38daeb6
|
tasks.py
|
tasks.py
|
import invoke
@invoke.task
def release(version):
"""`version`` should be a string like '0.4' or '1.0'."""
invoke.run('git tag -s {0}'.format(version))
invoke.run('git push --tags')
invoke.run('python setup.py sdist')
invoke.run('python setup.py bdist_wheel')
invoke.run('twine upload -s dist/django-babel{0}* '.format(version))
invoke.run('twine upload -s dist/django_babel{0}* '.format(version))
|
import invoke
@invoke.task
def release(version):
"""`version`` should be a string like '0.4' or '1.0'."""
invoke.run('git tag -s {0}'.format(version))
invoke.run('git push --tags')
invoke.run('python setup.py sdist')
invoke.run('python setup.py bdist_wheel')
invoke.run('twine upload -s dist/django-babel-{0}* '.format(version))
invoke.run('twine upload -s dist/django_babel-{0}* '.format(version))
|
Fix invoke to fix package finding
|
Fix invoke to fix package finding
|
Python
|
bsd-3-clause
|
python-babel/django-babel,graingert/django-babel
|
import invoke
@invoke.task
def release(version):
"""`version`` should be a string like '0.4' or '1.0'."""
invoke.run('git tag -s {0}'.format(version))
invoke.run('git push --tags')
invoke.run('python setup.py sdist')
invoke.run('python setup.py bdist_wheel')
invoke.run('twine upload -s dist/django-babel{0}* '.format(version))
invoke.run('twine upload -s dist/django_babel{0}* '.format(version))
Fix invoke to fix package finding
|
import invoke
@invoke.task
def release(version):
"""`version`` should be a string like '0.4' or '1.0'."""
invoke.run('git tag -s {0}'.format(version))
invoke.run('git push --tags')
invoke.run('python setup.py sdist')
invoke.run('python setup.py bdist_wheel')
invoke.run('twine upload -s dist/django-babel-{0}* '.format(version))
invoke.run('twine upload -s dist/django_babel-{0}* '.format(version))
|
<commit_before>import invoke
@invoke.task
def release(version):
"""`version`` should be a string like '0.4' or '1.0'."""
invoke.run('git tag -s {0}'.format(version))
invoke.run('git push --tags')
invoke.run('python setup.py sdist')
invoke.run('python setup.py bdist_wheel')
invoke.run('twine upload -s dist/django-babel{0}* '.format(version))
invoke.run('twine upload -s dist/django_babel{0}* '.format(version))
<commit_msg>Fix invoke to fix package finding<commit_after>
|
import invoke
@invoke.task
def release(version):
"""`version`` should be a string like '0.4' or '1.0'."""
invoke.run('git tag -s {0}'.format(version))
invoke.run('git push --tags')
invoke.run('python setup.py sdist')
invoke.run('python setup.py bdist_wheel')
invoke.run('twine upload -s dist/django-babel-{0}* '.format(version))
invoke.run('twine upload -s dist/django_babel-{0}* '.format(version))
|
import invoke
@invoke.task
def release(version):
"""`version`` should be a string like '0.4' or '1.0'."""
invoke.run('git tag -s {0}'.format(version))
invoke.run('git push --tags')
invoke.run('python setup.py sdist')
invoke.run('python setup.py bdist_wheel')
invoke.run('twine upload -s dist/django-babel{0}* '.format(version))
invoke.run('twine upload -s dist/django_babel{0}* '.format(version))
Fix invoke to fix package findingimport invoke
@invoke.task
def release(version):
"""`version`` should be a string like '0.4' or '1.0'."""
invoke.run('git tag -s {0}'.format(version))
invoke.run('git push --tags')
invoke.run('python setup.py sdist')
invoke.run('python setup.py bdist_wheel')
invoke.run('twine upload -s dist/django-babel-{0}* '.format(version))
invoke.run('twine upload -s dist/django_babel-{0}* '.format(version))
|
<commit_before>import invoke
@invoke.task
def release(version):
"""`version`` should be a string like '0.4' or '1.0'."""
invoke.run('git tag -s {0}'.format(version))
invoke.run('git push --tags')
invoke.run('python setup.py sdist')
invoke.run('python setup.py bdist_wheel')
invoke.run('twine upload -s dist/django-babel{0}* '.format(version))
invoke.run('twine upload -s dist/django_babel{0}* '.format(version))
<commit_msg>Fix invoke to fix package finding<commit_after>import invoke
@invoke.task
def release(version):
"""`version`` should be a string like '0.4' or '1.0'."""
invoke.run('git tag -s {0}'.format(version))
invoke.run('git push --tags')
invoke.run('python setup.py sdist')
invoke.run('python setup.py bdist_wheel')
invoke.run('twine upload -s dist/django-babel-{0}* '.format(version))
invoke.run('twine upload -s dist/django_babel-{0}* '.format(version))
|
847143cd60986c6558167a8ad28a778b09330a7c
|
gocd/server.py
|
gocd/server.py
|
import urllib2
from urlparse import urljoin
from gocd.api import Pipeline
class Server(object):
def __init__(self, host, user=None, password=None):
self.host = host
self.user = user
self.password = password
if self.user and self.password:
self._add_basic_auth()
def get(self, path):
return urllib2.urlopen(self._request(path))
def pipeline(self, name):
return Pipeline(self, name)
def _add_basic_auth(self):
auth_handler = urllib2.HTTPBasicAuthHandler()
auth_handler.add_password(
realm='Cruise', # This seems to be hard coded.
uri=self.host,
user=self.user,
passwd=self.password,
)
urllib2.install_opener(urllib2.build_opener(auth_handler))
def _request(self, path, data=None, headers=None):
default_headers = {
'User-Agent': 'py-gocd',
}
default_headers.update(headers or {})
return urllib2.Request(
self._url(path),
data=data,
headers=default_headers
)
def _url(self, path):
return urljoin(self.host, path)
|
import urllib2
from urlparse import urljoin
from gocd.api import Pipeline
class Server(object):
def __init__(self, host, user=None, password=None):
self.host = host
self.user = user
self.password = password
if self.user and self.password:
self._add_basic_auth()
def get(self, path):
return urllib2.urlopen(self._request(path))
def pipeline(self, name):
return Pipeline(self, name)
def _add_basic_auth(self):
auth_handler = urllib2.HTTPBasicAuthHandler(
urllib2.HTTPPasswordMgrWithDefaultRealm()
)
auth_handler.add_password(
realm=None,
uri=self.host,
user=self.user,
passwd=self.password,
)
urllib2.install_opener(urllib2.build_opener(auth_handler))
def _request(self, path, data=None, headers=None):
default_headers = {
'User-Agent': 'py-gocd',
}
default_headers.update(headers or {})
return urllib2.Request(
self._url(path),
data=data,
headers=default_headers
)
def _url(self, path):
return urljoin(self.host, path)
|
Remove hard coded realm and assume any is fine
|
Remove hard coded realm and assume any is fine
|
Python
|
mit
|
henriquegemignani/py-gocd,gaqzi/py-gocd
|
import urllib2
from urlparse import urljoin
from gocd.api import Pipeline
class Server(object):
def __init__(self, host, user=None, password=None):
self.host = host
self.user = user
self.password = password
if self.user and self.password:
self._add_basic_auth()
def get(self, path):
return urllib2.urlopen(self._request(path))
def pipeline(self, name):
return Pipeline(self, name)
def _add_basic_auth(self):
auth_handler = urllib2.HTTPBasicAuthHandler()
auth_handler.add_password(
realm='Cruise', # This seems to be hard coded.
uri=self.host,
user=self.user,
passwd=self.password,
)
urllib2.install_opener(urllib2.build_opener(auth_handler))
def _request(self, path, data=None, headers=None):
default_headers = {
'User-Agent': 'py-gocd',
}
default_headers.update(headers or {})
return urllib2.Request(
self._url(path),
data=data,
headers=default_headers
)
def _url(self, path):
return urljoin(self.host, path)
Remove hard coded realm and assume any is fine
|
import urllib2
from urlparse import urljoin
from gocd.api import Pipeline
class Server(object):
def __init__(self, host, user=None, password=None):
self.host = host
self.user = user
self.password = password
if self.user and self.password:
self._add_basic_auth()
def get(self, path):
return urllib2.urlopen(self._request(path))
def pipeline(self, name):
return Pipeline(self, name)
def _add_basic_auth(self):
auth_handler = urllib2.HTTPBasicAuthHandler(
urllib2.HTTPPasswordMgrWithDefaultRealm()
)
auth_handler.add_password(
realm=None,
uri=self.host,
user=self.user,
passwd=self.password,
)
urllib2.install_opener(urllib2.build_opener(auth_handler))
def _request(self, path, data=None, headers=None):
default_headers = {
'User-Agent': 'py-gocd',
}
default_headers.update(headers or {})
return urllib2.Request(
self._url(path),
data=data,
headers=default_headers
)
def _url(self, path):
return urljoin(self.host, path)
|
<commit_before>import urllib2
from urlparse import urljoin
from gocd.api import Pipeline
class Server(object):
def __init__(self, host, user=None, password=None):
self.host = host
self.user = user
self.password = password
if self.user and self.password:
self._add_basic_auth()
def get(self, path):
return urllib2.urlopen(self._request(path))
def pipeline(self, name):
return Pipeline(self, name)
def _add_basic_auth(self):
auth_handler = urllib2.HTTPBasicAuthHandler()
auth_handler.add_password(
realm='Cruise', # This seems to be hard coded.
uri=self.host,
user=self.user,
passwd=self.password,
)
urllib2.install_opener(urllib2.build_opener(auth_handler))
def _request(self, path, data=None, headers=None):
default_headers = {
'User-Agent': 'py-gocd',
}
default_headers.update(headers or {})
return urllib2.Request(
self._url(path),
data=data,
headers=default_headers
)
def _url(self, path):
return urljoin(self.host, path)
<commit_msg>Remove hard coded realm and assume any is fine<commit_after>
|
import urllib2
from urlparse import urljoin
from gocd.api import Pipeline
class Server(object):
def __init__(self, host, user=None, password=None):
self.host = host
self.user = user
self.password = password
if self.user and self.password:
self._add_basic_auth()
def get(self, path):
return urllib2.urlopen(self._request(path))
def pipeline(self, name):
return Pipeline(self, name)
def _add_basic_auth(self):
auth_handler = urllib2.HTTPBasicAuthHandler(
urllib2.HTTPPasswordMgrWithDefaultRealm()
)
auth_handler.add_password(
realm=None,
uri=self.host,
user=self.user,
passwd=self.password,
)
urllib2.install_opener(urllib2.build_opener(auth_handler))
def _request(self, path, data=None, headers=None):
default_headers = {
'User-Agent': 'py-gocd',
}
default_headers.update(headers or {})
return urllib2.Request(
self._url(path),
data=data,
headers=default_headers
)
def _url(self, path):
return urljoin(self.host, path)
|
import urllib2
from urlparse import urljoin
from gocd.api import Pipeline
class Server(object):
def __init__(self, host, user=None, password=None):
self.host = host
self.user = user
self.password = password
if self.user and self.password:
self._add_basic_auth()
def get(self, path):
return urllib2.urlopen(self._request(path))
def pipeline(self, name):
return Pipeline(self, name)
def _add_basic_auth(self):
auth_handler = urllib2.HTTPBasicAuthHandler()
auth_handler.add_password(
realm='Cruise', # This seems to be hard coded.
uri=self.host,
user=self.user,
passwd=self.password,
)
urllib2.install_opener(urllib2.build_opener(auth_handler))
def _request(self, path, data=None, headers=None):
default_headers = {
'User-Agent': 'py-gocd',
}
default_headers.update(headers or {})
return urllib2.Request(
self._url(path),
data=data,
headers=default_headers
)
def _url(self, path):
return urljoin(self.host, path)
Remove hard coded realm and assume any is fineimport urllib2
from urlparse import urljoin
from gocd.api import Pipeline
class Server(object):
def __init__(self, host, user=None, password=None):
self.host = host
self.user = user
self.password = password
if self.user and self.password:
self._add_basic_auth()
def get(self, path):
return urllib2.urlopen(self._request(path))
def pipeline(self, name):
return Pipeline(self, name)
def _add_basic_auth(self):
auth_handler = urllib2.HTTPBasicAuthHandler(
urllib2.HTTPPasswordMgrWithDefaultRealm()
)
auth_handler.add_password(
realm=None,
uri=self.host,
user=self.user,
passwd=self.password,
)
urllib2.install_opener(urllib2.build_opener(auth_handler))
def _request(self, path, data=None, headers=None):
default_headers = {
'User-Agent': 'py-gocd',
}
default_headers.update(headers or {})
return urllib2.Request(
self._url(path),
data=data,
headers=default_headers
)
def _url(self, path):
return urljoin(self.host, path)
|
<commit_before>import urllib2
from urlparse import urljoin
from gocd.api import Pipeline
class Server(object):
def __init__(self, host, user=None, password=None):
self.host = host
self.user = user
self.password = password
if self.user and self.password:
self._add_basic_auth()
def get(self, path):
return urllib2.urlopen(self._request(path))
def pipeline(self, name):
return Pipeline(self, name)
def _add_basic_auth(self):
auth_handler = urllib2.HTTPBasicAuthHandler()
auth_handler.add_password(
realm='Cruise', # This seems to be hard coded.
uri=self.host,
user=self.user,
passwd=self.password,
)
urllib2.install_opener(urllib2.build_opener(auth_handler))
def _request(self, path, data=None, headers=None):
default_headers = {
'User-Agent': 'py-gocd',
}
default_headers.update(headers or {})
return urllib2.Request(
self._url(path),
data=data,
headers=default_headers
)
def _url(self, path):
return urljoin(self.host, path)
<commit_msg>Remove hard coded realm and assume any is fine<commit_after>import urllib2
from urlparse import urljoin
from gocd.api import Pipeline
class Server(object):
def __init__(self, host, user=None, password=None):
self.host = host
self.user = user
self.password = password
if self.user and self.password:
self._add_basic_auth()
def get(self, path):
return urllib2.urlopen(self._request(path))
def pipeline(self, name):
return Pipeline(self, name)
def _add_basic_auth(self):
auth_handler = urllib2.HTTPBasicAuthHandler(
urllib2.HTTPPasswordMgrWithDefaultRealm()
)
auth_handler.add_password(
realm=None,
uri=self.host,
user=self.user,
passwd=self.password,
)
urllib2.install_opener(urllib2.build_opener(auth_handler))
def _request(self, path, data=None, headers=None):
default_headers = {
'User-Agent': 'py-gocd',
}
default_headers.update(headers or {})
return urllib2.Request(
self._url(path),
data=data,
headers=default_headers
)
def _url(self, path):
return urljoin(self.host, path)
|
9088d706e08317241ab8238020780d6140507589
|
colour/adaptation/__init__.py
|
colour/adaptation/__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from .dataset import *
from . import dataset
from .vonkries import (
chromatic_adaptation_matrix_VonKries,
chromatic_adaptation_VonKries)
from .fairchild1990 import chromatic_adaptation_Fairchild1990
from .cmccat2000 import (
CMCCAT2000_InductionFactors,
CMCCAT2000_VIEWING_CONDITIONS,
CMCCAT2000_forward,
CMCCAT2000_reverse,
chromatic_adaptation_CMCCAT2000)
from .cie1994 import chromatic_adaptation_CIE1994
__all__ = dataset.__all__
__all__ += ['chromatic_adaptation_matrix_VonKries',
'chromatic_adaptation_VonKries']
__all__ += ['chromatic_adaptation_Fairchild1990']
__all__ += ['CMCCAT2000_InductionFactors',
'CMCCAT2000_VIEWING_CONDITIONS',
'CMCCAT2000_forward',
'CMCCAT2000_reverse',
'chromatic_adaptation_CMCCAT2000']
__all__ += ['chromatic_adaptation_CIE1994']
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from .dataset import *
from . import dataset
from .vonkries import (
chromatic_adaptation_matrix_VonKries,
chromatic_adaptation_VonKries)
from .fairchild1990 import chromatic_adaptation_Fairchild1990
from .cmccat2000 import (
CMCCAT2000_InductionFactors,
CMCCAT2000_VIEWING_CONDITIONS,
CMCCAT2000_forward,
CMCCAT2000_reverse,
chromatic_adaptation_CMCCAT2000)
from .cie1994 import chromatic_adaptation_CIE1994
__all__ = []
__all__ += dataset.__all__
__all__ += ['chromatic_adaptation_matrix_VonKries',
'chromatic_adaptation_VonKries']
__all__ += ['chromatic_adaptation_Fairchild1990']
__all__ += ['CMCCAT2000_InductionFactors',
'CMCCAT2000_VIEWING_CONDITIONS',
'CMCCAT2000_forward',
'CMCCAT2000_reverse',
'chromatic_adaptation_CMCCAT2000']
__all__ += ['chromatic_adaptation_CIE1994']
|
Fix various documentation related warnings.
|
Fix various documentation related warnings.
|
Python
|
bsd-3-clause
|
colour-science/colour
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from .dataset import *
from . import dataset
from .vonkries import (
chromatic_adaptation_matrix_VonKries,
chromatic_adaptation_VonKries)
from .fairchild1990 import chromatic_adaptation_Fairchild1990
from .cmccat2000 import (
CMCCAT2000_InductionFactors,
CMCCAT2000_VIEWING_CONDITIONS,
CMCCAT2000_forward,
CMCCAT2000_reverse,
chromatic_adaptation_CMCCAT2000)
from .cie1994 import chromatic_adaptation_CIE1994
__all__ = dataset.__all__
__all__ += ['chromatic_adaptation_matrix_VonKries',
'chromatic_adaptation_VonKries']
__all__ += ['chromatic_adaptation_Fairchild1990']
__all__ += ['CMCCAT2000_InductionFactors',
'CMCCAT2000_VIEWING_CONDITIONS',
'CMCCAT2000_forward',
'CMCCAT2000_reverse',
'chromatic_adaptation_CMCCAT2000']
__all__ += ['chromatic_adaptation_CIE1994']
Fix various documentation related warnings.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from .dataset import *
from . import dataset
from .vonkries import (
chromatic_adaptation_matrix_VonKries,
chromatic_adaptation_VonKries)
from .fairchild1990 import chromatic_adaptation_Fairchild1990
from .cmccat2000 import (
CMCCAT2000_InductionFactors,
CMCCAT2000_VIEWING_CONDITIONS,
CMCCAT2000_forward,
CMCCAT2000_reverse,
chromatic_adaptation_CMCCAT2000)
from .cie1994 import chromatic_adaptation_CIE1994
__all__ = []
__all__ += dataset.__all__
__all__ += ['chromatic_adaptation_matrix_VonKries',
'chromatic_adaptation_VonKries']
__all__ += ['chromatic_adaptation_Fairchild1990']
__all__ += ['CMCCAT2000_InductionFactors',
'CMCCAT2000_VIEWING_CONDITIONS',
'CMCCAT2000_forward',
'CMCCAT2000_reverse',
'chromatic_adaptation_CMCCAT2000']
__all__ += ['chromatic_adaptation_CIE1994']
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from .dataset import *
from . import dataset
from .vonkries import (
chromatic_adaptation_matrix_VonKries,
chromatic_adaptation_VonKries)
from .fairchild1990 import chromatic_adaptation_Fairchild1990
from .cmccat2000 import (
CMCCAT2000_InductionFactors,
CMCCAT2000_VIEWING_CONDITIONS,
CMCCAT2000_forward,
CMCCAT2000_reverse,
chromatic_adaptation_CMCCAT2000)
from .cie1994 import chromatic_adaptation_CIE1994
__all__ = dataset.__all__
__all__ += ['chromatic_adaptation_matrix_VonKries',
'chromatic_adaptation_VonKries']
__all__ += ['chromatic_adaptation_Fairchild1990']
__all__ += ['CMCCAT2000_InductionFactors',
'CMCCAT2000_VIEWING_CONDITIONS',
'CMCCAT2000_forward',
'CMCCAT2000_reverse',
'chromatic_adaptation_CMCCAT2000']
__all__ += ['chromatic_adaptation_CIE1994']
<commit_msg>Fix various documentation related warnings.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from .dataset import *
from . import dataset
from .vonkries import (
chromatic_adaptation_matrix_VonKries,
chromatic_adaptation_VonKries)
from .fairchild1990 import chromatic_adaptation_Fairchild1990
from .cmccat2000 import (
CMCCAT2000_InductionFactors,
CMCCAT2000_VIEWING_CONDITIONS,
CMCCAT2000_forward,
CMCCAT2000_reverse,
chromatic_adaptation_CMCCAT2000)
from .cie1994 import chromatic_adaptation_CIE1994
__all__ = []
__all__ += dataset.__all__
__all__ += ['chromatic_adaptation_matrix_VonKries',
'chromatic_adaptation_VonKries']
__all__ += ['chromatic_adaptation_Fairchild1990']
__all__ += ['CMCCAT2000_InductionFactors',
'CMCCAT2000_VIEWING_CONDITIONS',
'CMCCAT2000_forward',
'CMCCAT2000_reverse',
'chromatic_adaptation_CMCCAT2000']
__all__ += ['chromatic_adaptation_CIE1994']
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from .dataset import *
from . import dataset
from .vonkries import (
chromatic_adaptation_matrix_VonKries,
chromatic_adaptation_VonKries)
from .fairchild1990 import chromatic_adaptation_Fairchild1990
from .cmccat2000 import (
CMCCAT2000_InductionFactors,
CMCCAT2000_VIEWING_CONDITIONS,
CMCCAT2000_forward,
CMCCAT2000_reverse,
chromatic_adaptation_CMCCAT2000)
from .cie1994 import chromatic_adaptation_CIE1994
__all__ = dataset.__all__
__all__ += ['chromatic_adaptation_matrix_VonKries',
'chromatic_adaptation_VonKries']
__all__ += ['chromatic_adaptation_Fairchild1990']
__all__ += ['CMCCAT2000_InductionFactors',
'CMCCAT2000_VIEWING_CONDITIONS',
'CMCCAT2000_forward',
'CMCCAT2000_reverse',
'chromatic_adaptation_CMCCAT2000']
__all__ += ['chromatic_adaptation_CIE1994']
Fix various documentation related warnings.#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from .dataset import *
from . import dataset
from .vonkries import (
chromatic_adaptation_matrix_VonKries,
chromatic_adaptation_VonKries)
from .fairchild1990 import chromatic_adaptation_Fairchild1990
from .cmccat2000 import (
CMCCAT2000_InductionFactors,
CMCCAT2000_VIEWING_CONDITIONS,
CMCCAT2000_forward,
CMCCAT2000_reverse,
chromatic_adaptation_CMCCAT2000)
from .cie1994 import chromatic_adaptation_CIE1994
__all__ = []
__all__ += dataset.__all__
__all__ += ['chromatic_adaptation_matrix_VonKries',
'chromatic_adaptation_VonKries']
__all__ += ['chromatic_adaptation_Fairchild1990']
__all__ += ['CMCCAT2000_InductionFactors',
'CMCCAT2000_VIEWING_CONDITIONS',
'CMCCAT2000_forward',
'CMCCAT2000_reverse',
'chromatic_adaptation_CMCCAT2000']
__all__ += ['chromatic_adaptation_CIE1994']
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from .dataset import *
from . import dataset
from .vonkries import (
chromatic_adaptation_matrix_VonKries,
chromatic_adaptation_VonKries)
from .fairchild1990 import chromatic_adaptation_Fairchild1990
from .cmccat2000 import (
CMCCAT2000_InductionFactors,
CMCCAT2000_VIEWING_CONDITIONS,
CMCCAT2000_forward,
CMCCAT2000_reverse,
chromatic_adaptation_CMCCAT2000)
from .cie1994 import chromatic_adaptation_CIE1994
__all__ = dataset.__all__
__all__ += ['chromatic_adaptation_matrix_VonKries',
'chromatic_adaptation_VonKries']
__all__ += ['chromatic_adaptation_Fairchild1990']
__all__ += ['CMCCAT2000_InductionFactors',
'CMCCAT2000_VIEWING_CONDITIONS',
'CMCCAT2000_forward',
'CMCCAT2000_reverse',
'chromatic_adaptation_CMCCAT2000']
__all__ += ['chromatic_adaptation_CIE1994']
<commit_msg>Fix various documentation related warnings.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from .dataset import *
from . import dataset
from .vonkries import (
chromatic_adaptation_matrix_VonKries,
chromatic_adaptation_VonKries)
from .fairchild1990 import chromatic_adaptation_Fairchild1990
from .cmccat2000 import (
CMCCAT2000_InductionFactors,
CMCCAT2000_VIEWING_CONDITIONS,
CMCCAT2000_forward,
CMCCAT2000_reverse,
chromatic_adaptation_CMCCAT2000)
from .cie1994 import chromatic_adaptation_CIE1994
__all__ = []
__all__ += dataset.__all__
__all__ += ['chromatic_adaptation_matrix_VonKries',
'chromatic_adaptation_VonKries']
__all__ += ['chromatic_adaptation_Fairchild1990']
__all__ += ['CMCCAT2000_InductionFactors',
'CMCCAT2000_VIEWING_CONDITIONS',
'CMCCAT2000_forward',
'CMCCAT2000_reverse',
'chromatic_adaptation_CMCCAT2000']
__all__ += ['chromatic_adaptation_CIE1994']
|
56661432ea78f193346fe8bcf33bd19a2e1787bc
|
tests/test_manager.py
|
tests/test_manager.py
|
def test_ensure_authority(manager_transaction):
authority = manager_transaction.ensure_authority(
name='Test Authority',
rank=0,
cardinality=1234
)
assert authority.name == 'Test Authority'
assert authority.rank == 0
assert authority.cardinality == 1234
|
def test_ensure_authority(manager_transaction):
authority1 = manager_transaction.ensure_authority(
name='Test Authority',
rank=0,
cardinality=1234
)
assert authority1.name == 'Test Authority'
assert authority1.rank == 0
assert authority1.cardinality == 1234
authority2 = manager_transaction.ensure_authority(
name='Test Authority',
rank=1,
cardinality=2345
)
assert authority1 is authority2
assert authority2.name == 'Test Authority'
assert authority2.rank == 1
assert authority2.cardinality == 2345
|
Test ensure_authority for both nonexistent and already existing Authority records.
|
Test ensure_authority for both nonexistent and already existing Authority records.
|
Python
|
mit
|
scolby33/OCSPdash,scolby33/OCSPdash,scolby33/OCSPdash
|
def test_ensure_authority(manager_transaction):
authority = manager_transaction.ensure_authority(
name='Test Authority',
rank=0,
cardinality=1234
)
assert authority.name == 'Test Authority'
assert authority.rank == 0
assert authority.cardinality == 1234
Test ensure_authority for both nonexistent and already existing Authority records.
|
def test_ensure_authority(manager_transaction):
authority1 = manager_transaction.ensure_authority(
name='Test Authority',
rank=0,
cardinality=1234
)
assert authority1.name == 'Test Authority'
assert authority1.rank == 0
assert authority1.cardinality == 1234
authority2 = manager_transaction.ensure_authority(
name='Test Authority',
rank=1,
cardinality=2345
)
assert authority1 is authority2
assert authority2.name == 'Test Authority'
assert authority2.rank == 1
assert authority2.cardinality == 2345
|
<commit_before>def test_ensure_authority(manager_transaction):
authority = manager_transaction.ensure_authority(
name='Test Authority',
rank=0,
cardinality=1234
)
assert authority.name == 'Test Authority'
assert authority.rank == 0
assert authority.cardinality == 1234
<commit_msg>Test ensure_authority for both nonexistent and already existing Authority records.<commit_after>
|
def test_ensure_authority(manager_transaction):
authority1 = manager_transaction.ensure_authority(
name='Test Authority',
rank=0,
cardinality=1234
)
assert authority1.name == 'Test Authority'
assert authority1.rank == 0
assert authority1.cardinality == 1234
authority2 = manager_transaction.ensure_authority(
name='Test Authority',
rank=1,
cardinality=2345
)
assert authority1 is authority2
assert authority2.name == 'Test Authority'
assert authority2.rank == 1
assert authority2.cardinality == 2345
|
def test_ensure_authority(manager_transaction):
authority = manager_transaction.ensure_authority(
name='Test Authority',
rank=0,
cardinality=1234
)
assert authority.name == 'Test Authority'
assert authority.rank == 0
assert authority.cardinality == 1234
Test ensure_authority for both nonexistent and already existing Authority records.def test_ensure_authority(manager_transaction):
authority1 = manager_transaction.ensure_authority(
name='Test Authority',
rank=0,
cardinality=1234
)
assert authority1.name == 'Test Authority'
assert authority1.rank == 0
assert authority1.cardinality == 1234
authority2 = manager_transaction.ensure_authority(
name='Test Authority',
rank=1,
cardinality=2345
)
assert authority1 is authority2
assert authority2.name == 'Test Authority'
assert authority2.rank == 1
assert authority2.cardinality == 2345
|
<commit_before>def test_ensure_authority(manager_transaction):
authority = manager_transaction.ensure_authority(
name='Test Authority',
rank=0,
cardinality=1234
)
assert authority.name == 'Test Authority'
assert authority.rank == 0
assert authority.cardinality == 1234
<commit_msg>Test ensure_authority for both nonexistent and already existing Authority records.<commit_after>def test_ensure_authority(manager_transaction):
authority1 = manager_transaction.ensure_authority(
name='Test Authority',
rank=0,
cardinality=1234
)
assert authority1.name == 'Test Authority'
assert authority1.rank == 0
assert authority1.cardinality == 1234
authority2 = manager_transaction.ensure_authority(
name='Test Authority',
rank=1,
cardinality=2345
)
assert authority1 is authority2
assert authority2.name == 'Test Authority'
assert authority2.rank == 1
assert authority2.cardinality == 2345
|
a5ca04fd5e7498276f9c7683cb4c4b059d7852ac
|
examples/test-mh/policies/participant_3.py
|
examples/test-mh/policies/participant_3.py
|
{
"inbound": [
{
"cookie": 1,
"match":
{
"tcp_dst": 4321
},
"action":
{
"fwd": 0
}
},
{
"cookie": 2,
"match":
{
"tcp_dst": 4322
},
"action":
{
"fwd": 1
}
},
{
"cookie": 3,
"match":
{
"eth_src": '08:00:27:89:3b:9f'
},
"action":
{
"drop": 0
}
}
]
}
|
{
"inbound": [
{
"cookie": 1,
"match":
{
"tcp_dst": 4321
},
"action":
{
"fwd": 0
}
},
{
"cookie": 2,
"match":
{
"tcp_dst": 4322
},
"action":
{
"fwd": 1
}
},
{
"cookie": 3,
"match":
{
"eth_src": "08:00:27:89:3b:9f"
},
"action":
{
"drop": 0
}
}
]
}
|
Add double quotes for MAC address
|
Add double quotes for MAC address
|
Python
|
apache-2.0
|
h2020-endeavour/endeavour,h2020-endeavour/endeavour
|
{
"inbound": [
{
"cookie": 1,
"match":
{
"tcp_dst": 4321
},
"action":
{
"fwd": 0
}
},
{
"cookie": 2,
"match":
{
"tcp_dst": 4322
},
"action":
{
"fwd": 1
}
},
{
"cookie": 3,
"match":
{
"eth_src": '08:00:27:89:3b:9f'
},
"action":
{
"drop": 0
}
}
]
}Add double quotes for MAC address
|
{
"inbound": [
{
"cookie": 1,
"match":
{
"tcp_dst": 4321
},
"action":
{
"fwd": 0
}
},
{
"cookie": 2,
"match":
{
"tcp_dst": 4322
},
"action":
{
"fwd": 1
}
},
{
"cookie": 3,
"match":
{
"eth_src": "08:00:27:89:3b:9f"
},
"action":
{
"drop": 0
}
}
]
}
|
<commit_before>{
"inbound": [
{
"cookie": 1,
"match":
{
"tcp_dst": 4321
},
"action":
{
"fwd": 0
}
},
{
"cookie": 2,
"match":
{
"tcp_dst": 4322
},
"action":
{
"fwd": 1
}
},
{
"cookie": 3,
"match":
{
"eth_src": '08:00:27:89:3b:9f'
},
"action":
{
"drop": 0
}
}
]
}<commit_msg>Add double quotes for MAC address<commit_after>
|
{
"inbound": [
{
"cookie": 1,
"match":
{
"tcp_dst": 4321
},
"action":
{
"fwd": 0
}
},
{
"cookie": 2,
"match":
{
"tcp_dst": 4322
},
"action":
{
"fwd": 1
}
},
{
"cookie": 3,
"match":
{
"eth_src": "08:00:27:89:3b:9f"
},
"action":
{
"drop": 0
}
}
]
}
|
{
"inbound": [
{
"cookie": 1,
"match":
{
"tcp_dst": 4321
},
"action":
{
"fwd": 0
}
},
{
"cookie": 2,
"match":
{
"tcp_dst": 4322
},
"action":
{
"fwd": 1
}
},
{
"cookie": 3,
"match":
{
"eth_src": '08:00:27:89:3b:9f'
},
"action":
{
"drop": 0
}
}
]
}Add double quotes for MAC address{
"inbound": [
{
"cookie": 1,
"match":
{
"tcp_dst": 4321
},
"action":
{
"fwd": 0
}
},
{
"cookie": 2,
"match":
{
"tcp_dst": 4322
},
"action":
{
"fwd": 1
}
},
{
"cookie": 3,
"match":
{
"eth_src": "08:00:27:89:3b:9f"
},
"action":
{
"drop": 0
}
}
]
}
|
<commit_before>{
"inbound": [
{
"cookie": 1,
"match":
{
"tcp_dst": 4321
},
"action":
{
"fwd": 0
}
},
{
"cookie": 2,
"match":
{
"tcp_dst": 4322
},
"action":
{
"fwd": 1
}
},
{
"cookie": 3,
"match":
{
"eth_src": '08:00:27:89:3b:9f'
},
"action":
{
"drop": 0
}
}
]
}<commit_msg>Add double quotes for MAC address<commit_after>{
"inbound": [
{
"cookie": 1,
"match":
{
"tcp_dst": 4321
},
"action":
{
"fwd": 0
}
},
{
"cookie": 2,
"match":
{
"tcp_dst": 4322
},
"action":
{
"fwd": 1
}
},
{
"cookie": 3,
"match":
{
"eth_src": "08:00:27:89:3b:9f"
},
"action":
{
"drop": 0
}
}
]
}
|
462fafa047b05d0e11b9a730ecfb8e1be9dc675a
|
pattern_matcher/regex.py
|
pattern_matcher/regex.py
|
import re
class RegexFactory(object):
"""Generates a regex pattern."""
WORD_GROUP = '({0}|\*)'
SEP = '/'
def _generate_pattern(self, path):
"""Generates a regex pattern."""
# Split the path up into a list using the forward slash as a
# delimiter.
words = (word for word in path.split(self.SEP) if word)
# Compose a list of regular expression groups for each word in
# the path.
patterns = (self.WORD_GROUP.format(re.escape(word)) for word in words)
# Implode the list into a single regex pattern that will match
# the path pattern format.
return '^{0}$'.format(('\,').join(patterns))
def create(self, path):
pattern = self._generate_pattern(path)
return re.compile(pattern, re.ASCII | re.MULTILINE)
|
import re
class RegexFactory(object):
"""Generates a regex pattern."""
WORD_GROUP = '({0}|\*)'
SEP = '/'
def _generate_pattern(self, path):
"""Generates a regex pattern."""
# Split the path up into a list using the forward slash as a
# delimiter.
words = (word for word in path.split(self.SEP) if word)
# Compose a list of regular expression groups for each word in
# the path.
patterns = (self.WORD_GROUP.format(re.escape(word)) for word in words)
# Implode the list into a single regex pattern that will match
# the path pattern format.
return '^{0}$'.format(('\,').join(patterns))
@classmethod
def create(cls, path):
rf = cls()
pattern = rf._generate_pattern(path)
return re.compile(pattern, re.ASCII | re.MULTILINE)
|
Make create a class method.
|
Make create a class method.
|
Python
|
mit
|
damonkelley/pattern-matcher
|
import re
class RegexFactory(object):
"""Generates a regex pattern."""
WORD_GROUP = '({0}|\*)'
SEP = '/'
def _generate_pattern(self, path):
"""Generates a regex pattern."""
# Split the path up into a list using the forward slash as a
# delimiter.
words = (word for word in path.split(self.SEP) if word)
# Compose a list of regular expression groups for each word in
# the path.
patterns = (self.WORD_GROUP.format(re.escape(word)) for word in words)
# Implode the list into a single regex pattern that will match
# the path pattern format.
return '^{0}$'.format(('\,').join(patterns))
def create(self, path):
pattern = self._generate_pattern(path)
return re.compile(pattern, re.ASCII | re.MULTILINE)
Make create a class method.
|
import re
class RegexFactory(object):
"""Generates a regex pattern."""
WORD_GROUP = '({0}|\*)'
SEP = '/'
def _generate_pattern(self, path):
"""Generates a regex pattern."""
# Split the path up into a list using the forward slash as a
# delimiter.
words = (word for word in path.split(self.SEP) if word)
# Compose a list of regular expression groups for each word in
# the path.
patterns = (self.WORD_GROUP.format(re.escape(word)) for word in words)
# Implode the list into a single regex pattern that will match
# the path pattern format.
return '^{0}$'.format(('\,').join(patterns))
@classmethod
def create(cls, path):
rf = cls()
pattern = rf._generate_pattern(path)
return re.compile(pattern, re.ASCII | re.MULTILINE)
|
<commit_before>import re
class RegexFactory(object):
"""Generates a regex pattern."""
WORD_GROUP = '({0}|\*)'
SEP = '/'
def _generate_pattern(self, path):
"""Generates a regex pattern."""
# Split the path up into a list using the forward slash as a
# delimiter.
words = (word for word in path.split(self.SEP) if word)
# Compose a list of regular expression groups for each word in
# the path.
patterns = (self.WORD_GROUP.format(re.escape(word)) for word in words)
# Implode the list into a single regex pattern that will match
# the path pattern format.
return '^{0}$'.format(('\,').join(patterns))
def create(self, path):
pattern = self._generate_pattern(path)
return re.compile(pattern, re.ASCII | re.MULTILINE)
<commit_msg>Make create a class method.<commit_after>
|
import re
class RegexFactory(object):
"""Generates a regex pattern."""
WORD_GROUP = '({0}|\*)'
SEP = '/'
def _generate_pattern(self, path):
"""Generates a regex pattern."""
# Split the path up into a list using the forward slash as a
# delimiter.
words = (word for word in path.split(self.SEP) if word)
# Compose a list of regular expression groups for each word in
# the path.
patterns = (self.WORD_GROUP.format(re.escape(word)) for word in words)
# Implode the list into a single regex pattern that will match
# the path pattern format.
return '^{0}$'.format(('\,').join(patterns))
@classmethod
def create(cls, path):
rf = cls()
pattern = rf._generate_pattern(path)
return re.compile(pattern, re.ASCII | re.MULTILINE)
|
import re
class RegexFactory(object):
"""Generates a regex pattern."""
WORD_GROUP = '({0}|\*)'
SEP = '/'
def _generate_pattern(self, path):
"""Generates a regex pattern."""
# Split the path up into a list using the forward slash as a
# delimiter.
words = (word for word in path.split(self.SEP) if word)
# Compose a list of regular expression groups for each word in
# the path.
patterns = (self.WORD_GROUP.format(re.escape(word)) for word in words)
# Implode the list into a single regex pattern that will match
# the path pattern format.
return '^{0}$'.format(('\,').join(patterns))
def create(self, path):
pattern = self._generate_pattern(path)
return re.compile(pattern, re.ASCII | re.MULTILINE)
Make create a class method.import re
class RegexFactory(object):
"""Generates a regex pattern."""
WORD_GROUP = '({0}|\*)'
SEP = '/'
def _generate_pattern(self, path):
"""Generates a regex pattern."""
# Split the path up into a list using the forward slash as a
# delimiter.
words = (word for word in path.split(self.SEP) if word)
# Compose a list of regular expression groups for each word in
# the path.
patterns = (self.WORD_GROUP.format(re.escape(word)) for word in words)
# Implode the list into a single regex pattern that will match
# the path pattern format.
return '^{0}$'.format(('\,').join(patterns))
@classmethod
def create(cls, path):
rf = cls()
pattern = rf._generate_pattern(path)
return re.compile(pattern, re.ASCII | re.MULTILINE)
|
<commit_before>import re
class RegexFactory(object):
"""Generates a regex pattern."""
WORD_GROUP = '({0}|\*)'
SEP = '/'
def _generate_pattern(self, path):
"""Generates a regex pattern."""
# Split the path up into a list using the forward slash as a
# delimiter.
words = (word for word in path.split(self.SEP) if word)
# Compose a list of regular expression groups for each word in
# the path.
patterns = (self.WORD_GROUP.format(re.escape(word)) for word in words)
# Implode the list into a single regex pattern that will match
# the path pattern format.
return '^{0}$'.format(('\,').join(patterns))
def create(self, path):
pattern = self._generate_pattern(path)
return re.compile(pattern, re.ASCII | re.MULTILINE)
<commit_msg>Make create a class method.<commit_after>import re
class RegexFactory(object):
"""Generates a regex pattern."""
WORD_GROUP = '({0}|\*)'
SEP = '/'
def _generate_pattern(self, path):
"""Generates a regex pattern."""
# Split the path up into a list using the forward slash as a
# delimiter.
words = (word for word in path.split(self.SEP) if word)
# Compose a list of regular expression groups for each word in
# the path.
patterns = (self.WORD_GROUP.format(re.escape(word)) for word in words)
# Implode the list into a single regex pattern that will match
# the path pattern format.
return '^{0}$'.format(('\,').join(patterns))
@classmethod
def create(cls, path):
rf = cls()
pattern = rf._generate_pattern(path)
return re.compile(pattern, re.ASCII | re.MULTILINE)
|
0e471ed468c0f43f8f8b562ca9cc7d44869f53b2
|
hermes/views.py
|
hermes/views.py
|
from django.views.generic import ListView, DetailView
from .models import Post
class PostListView(ListView):
context_object_name = 'posts'
model = Post
template_name = 'hermes/post_list.html'
class CategoryPostListView(PostListView):
def get_queryset(self):
category_slug = self.kwargs.get('slug', '')
return self.model.objects.in_category(category_slug)
class ArchivePostListView(PostListView):
def get_queryset(self):
year = self.kwargs.get('year', None)
month = self.kwargs.get('month', None)
day = self.kwargs.get('day', None)
return self.model.objects.created_on(year=year, month=month, day=day)
class PostDetail(DetailView):
context_object_name = 'post'
model = Post
template_name = "hermes/post_detail.html"
|
from django.views.generic import ListView, DetailView
from .models import Post
class PostListView(ListView):
context_object_name = 'posts'
model = Post
template_name = 'hermes/post_list.html'
def get_queryset(self):
return self.model.objects.order_by('-created_on')
class CategoryPostListView(PostListView):
def get_queryset(self):
category_slug = self.kwargs.get('slug', '')
return self.model.objects.in_category(category_slug)
class ArchivePostListView(PostListView):
def get_queryset(self):
year = self.kwargs.get('year', None)
month = self.kwargs.get('month', None)
day = self.kwargs.get('day', None)
return self.model.objects.created_on(year=year, month=month, day=day)
class PostDetail(DetailView):
context_object_name = 'post'
model = Post
template_name = "hermes/post_detail.html"
|
Order posts by the creation date
|
Order posts by the creation date
|
Python
|
mit
|
emilian/django-hermes
|
from django.views.generic import ListView, DetailView
from .models import Post
class PostListView(ListView):
context_object_name = 'posts'
model = Post
template_name = 'hermes/post_list.html'
class CategoryPostListView(PostListView):
def get_queryset(self):
category_slug = self.kwargs.get('slug', '')
return self.model.objects.in_category(category_slug)
class ArchivePostListView(PostListView):
def get_queryset(self):
year = self.kwargs.get('year', None)
month = self.kwargs.get('month', None)
day = self.kwargs.get('day', None)
return self.model.objects.created_on(year=year, month=month, day=day)
class PostDetail(DetailView):
context_object_name = 'post'
model = Post
template_name = "hermes/post_detail.html"
Order posts by the creation date
|
from django.views.generic import ListView, DetailView
from .models import Post
class PostListView(ListView):
context_object_name = 'posts'
model = Post
template_name = 'hermes/post_list.html'
def get_queryset(self):
return self.model.objects.order_by('-created_on')
class CategoryPostListView(PostListView):
def get_queryset(self):
category_slug = self.kwargs.get('slug', '')
return self.model.objects.in_category(category_slug)
class ArchivePostListView(PostListView):
def get_queryset(self):
year = self.kwargs.get('year', None)
month = self.kwargs.get('month', None)
day = self.kwargs.get('day', None)
return self.model.objects.created_on(year=year, month=month, day=day)
class PostDetail(DetailView):
context_object_name = 'post'
model = Post
template_name = "hermes/post_detail.html"
|
<commit_before>from django.views.generic import ListView, DetailView
from .models import Post
class PostListView(ListView):
context_object_name = 'posts'
model = Post
template_name = 'hermes/post_list.html'
class CategoryPostListView(PostListView):
def get_queryset(self):
category_slug = self.kwargs.get('slug', '')
return self.model.objects.in_category(category_slug)
class ArchivePostListView(PostListView):
def get_queryset(self):
year = self.kwargs.get('year', None)
month = self.kwargs.get('month', None)
day = self.kwargs.get('day', None)
return self.model.objects.created_on(year=year, month=month, day=day)
class PostDetail(DetailView):
context_object_name = 'post'
model = Post
template_name = "hermes/post_detail.html"
<commit_msg>Order posts by the creation date<commit_after>
|
from django.views.generic import ListView, DetailView
from .models import Post
class PostListView(ListView):
context_object_name = 'posts'
model = Post
template_name = 'hermes/post_list.html'
def get_queryset(self):
return self.model.objects.order_by('-created_on')
class CategoryPostListView(PostListView):
def get_queryset(self):
category_slug = self.kwargs.get('slug', '')
return self.model.objects.in_category(category_slug)
class ArchivePostListView(PostListView):
def get_queryset(self):
year = self.kwargs.get('year', None)
month = self.kwargs.get('month', None)
day = self.kwargs.get('day', None)
return self.model.objects.created_on(year=year, month=month, day=day)
class PostDetail(DetailView):
context_object_name = 'post'
model = Post
template_name = "hermes/post_detail.html"
|
from django.views.generic import ListView, DetailView
from .models import Post
class PostListView(ListView):
context_object_name = 'posts'
model = Post
template_name = 'hermes/post_list.html'
class CategoryPostListView(PostListView):
def get_queryset(self):
category_slug = self.kwargs.get('slug', '')
return self.model.objects.in_category(category_slug)
class ArchivePostListView(PostListView):
def get_queryset(self):
year = self.kwargs.get('year', None)
month = self.kwargs.get('month', None)
day = self.kwargs.get('day', None)
return self.model.objects.created_on(year=year, month=month, day=day)
class PostDetail(DetailView):
context_object_name = 'post'
model = Post
template_name = "hermes/post_detail.html"
Order posts by the creation datefrom django.views.generic import ListView, DetailView
from .models import Post
class PostListView(ListView):
context_object_name = 'posts'
model = Post
template_name = 'hermes/post_list.html'
def get_queryset(self):
return self.model.objects.order_by('-created_on')
class CategoryPostListView(PostListView):
def get_queryset(self):
category_slug = self.kwargs.get('slug', '')
return self.model.objects.in_category(category_slug)
class ArchivePostListView(PostListView):
def get_queryset(self):
year = self.kwargs.get('year', None)
month = self.kwargs.get('month', None)
day = self.kwargs.get('day', None)
return self.model.objects.created_on(year=year, month=month, day=day)
class PostDetail(DetailView):
context_object_name = 'post'
model = Post
template_name = "hermes/post_detail.html"
|
<commit_before>from django.views.generic import ListView, DetailView
from .models import Post
class PostListView(ListView):
context_object_name = 'posts'
model = Post
template_name = 'hermes/post_list.html'
class CategoryPostListView(PostListView):
def get_queryset(self):
category_slug = self.kwargs.get('slug', '')
return self.model.objects.in_category(category_slug)
class ArchivePostListView(PostListView):
def get_queryset(self):
year = self.kwargs.get('year', None)
month = self.kwargs.get('month', None)
day = self.kwargs.get('day', None)
return self.model.objects.created_on(year=year, month=month, day=day)
class PostDetail(DetailView):
context_object_name = 'post'
model = Post
template_name = "hermes/post_detail.html"
<commit_msg>Order posts by the creation date<commit_after>from django.views.generic import ListView, DetailView
from .models import Post
class PostListView(ListView):
context_object_name = 'posts'
model = Post
template_name = 'hermes/post_list.html'
def get_queryset(self):
return self.model.objects.order_by('-created_on')
class CategoryPostListView(PostListView):
def get_queryset(self):
category_slug = self.kwargs.get('slug', '')
return self.model.objects.in_category(category_slug)
class ArchivePostListView(PostListView):
def get_queryset(self):
year = self.kwargs.get('year', None)
month = self.kwargs.get('month', None)
day = self.kwargs.get('day', None)
return self.model.objects.created_on(year=year, month=month, day=day)
class PostDetail(DetailView):
context_object_name = 'post'
model = Post
template_name = "hermes/post_detail.html"
|
419ca24e9f6b8dc0599a79ed93c535e5d31f46ba
|
version.py
|
version.py
|
major = 0
minor=0
patch=17
branch="master"
timestamp=1376509025.95
|
major = 0
minor=0
patch=18
branch="master"
timestamp=1376526164.38
|
Tag commit for v0.0.18-master generated by gitmake.py
|
Tag commit for v0.0.18-master generated by gitmake.py
|
Python
|
mit
|
ryansturmer/gitmake
|
major = 0
minor=0
patch=17
branch="master"
timestamp=1376509025.95Tag commit for v0.0.18-master generated by gitmake.py
|
major = 0
minor=0
patch=18
branch="master"
timestamp=1376526164.38
|
<commit_before>major = 0
minor=0
patch=17
branch="master"
timestamp=1376509025.95<commit_msg>Tag commit for v0.0.18-master generated by gitmake.py<commit_after>
|
major = 0
minor=0
patch=18
branch="master"
timestamp=1376526164.38
|
major = 0
minor=0
patch=17
branch="master"
timestamp=1376509025.95Tag commit for v0.0.18-master generated by gitmake.pymajor = 0
minor=0
patch=18
branch="master"
timestamp=1376526164.38
|
<commit_before>major = 0
minor=0
patch=17
branch="master"
timestamp=1376509025.95<commit_msg>Tag commit for v0.0.18-master generated by gitmake.py<commit_after>major = 0
minor=0
patch=18
branch="master"
timestamp=1376526164.38
|
9ab6ebf1b6610d1d5917bd542abefd0cd34dc532
|
labonneboite/common/email_util.py
|
labonneboite/common/email_util.py
|
# coding: utf8
import json
import logging
from labonneboite.conf import settings
logger = logging.getLogger('main')
class EmailClient(object):
to = settings.FORM_EMAIL
from_email = settings.ADMIN_EMAIL
subject = 'nouveau message entreprise LBB'
class MandrillClient(EmailClient):
def __init__(self, mandrill):
self.mandrill = mandrill
def send(self, html):
from_email = self.from_email
to_email = self.to
response = self.mandrill.send_email(
subject=self.subject,
to=[{'email': to_email}],
html=html,
from_email=from_email)
content = json.loads(response.content)
if content[0]["status"] != "sent":
raise Exception("email was not sent from %s to %s" % (from_email, to_email))
return response
|
# coding: utf8
import json
import logging
from labonneboite.conf import settings
logger = logging.getLogger('main')
class EmailClient(object):
to = settings.FORM_EMAIL
from_email = settings.ADMIN_EMAIL
subject = 'nouveau message entreprise LBB'
class MandrillClient(EmailClient):
def __init__(self, mandrill):
self.mandrill = mandrill
def send(self, html):
from_email = self.from_email
to_email = self.to
response = self.mandrill.send_email(
subject=self.subject,
to=[{'email': to_email}],
html=html,
from_email=from_email)
content = json.loads(response.content.decode())
if content[0]["status"] != "sent":
raise Exception("email was not sent from %s to %s" % (from_email, to_email))
return response
|
Fix TypeError on mandrill email sending
|
Fix TypeError on mandrill email sending
json.loads() does not accept bytes as input in python < 3.6.
|
Python
|
agpl-3.0
|
StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite
|
# coding: utf8
import json
import logging
from labonneboite.conf import settings
logger = logging.getLogger('main')
class EmailClient(object):
to = settings.FORM_EMAIL
from_email = settings.ADMIN_EMAIL
subject = 'nouveau message entreprise LBB'
class MandrillClient(EmailClient):
def __init__(self, mandrill):
self.mandrill = mandrill
def send(self, html):
from_email = self.from_email
to_email = self.to
response = self.mandrill.send_email(
subject=self.subject,
to=[{'email': to_email}],
html=html,
from_email=from_email)
content = json.loads(response.content)
if content[0]["status"] != "sent":
raise Exception("email was not sent from %s to %s" % (from_email, to_email))
return response
Fix TypeError on mandrill email sending
json.loads() does not accept bytes as input in python < 3.6.
|
# coding: utf8
import json
import logging
from labonneboite.conf import settings
logger = logging.getLogger('main')
class EmailClient(object):
to = settings.FORM_EMAIL
from_email = settings.ADMIN_EMAIL
subject = 'nouveau message entreprise LBB'
class MandrillClient(EmailClient):
def __init__(self, mandrill):
self.mandrill = mandrill
def send(self, html):
from_email = self.from_email
to_email = self.to
response = self.mandrill.send_email(
subject=self.subject,
to=[{'email': to_email}],
html=html,
from_email=from_email)
content = json.loads(response.content.decode())
if content[0]["status"] != "sent":
raise Exception("email was not sent from %s to %s" % (from_email, to_email))
return response
|
<commit_before># coding: utf8
import json
import logging
from labonneboite.conf import settings
logger = logging.getLogger('main')
class EmailClient(object):
to = settings.FORM_EMAIL
from_email = settings.ADMIN_EMAIL
subject = 'nouveau message entreprise LBB'
class MandrillClient(EmailClient):
def __init__(self, mandrill):
self.mandrill = mandrill
def send(self, html):
from_email = self.from_email
to_email = self.to
response = self.mandrill.send_email(
subject=self.subject,
to=[{'email': to_email}],
html=html,
from_email=from_email)
content = json.loads(response.content)
if content[0]["status"] != "sent":
raise Exception("email was not sent from %s to %s" % (from_email, to_email))
return response
<commit_msg>Fix TypeError on mandrill email sending
json.loads() does not accept bytes as input in python < 3.6.<commit_after>
|
# coding: utf8
import json
import logging
from labonneboite.conf import settings
logger = logging.getLogger('main')
class EmailClient(object):
to = settings.FORM_EMAIL
from_email = settings.ADMIN_EMAIL
subject = 'nouveau message entreprise LBB'
class MandrillClient(EmailClient):
def __init__(self, mandrill):
self.mandrill = mandrill
def send(self, html):
from_email = self.from_email
to_email = self.to
response = self.mandrill.send_email(
subject=self.subject,
to=[{'email': to_email}],
html=html,
from_email=from_email)
content = json.loads(response.content.decode())
if content[0]["status"] != "sent":
raise Exception("email was not sent from %s to %s" % (from_email, to_email))
return response
|
# coding: utf8
import json
import logging
from labonneboite.conf import settings
logger = logging.getLogger('main')
class EmailClient(object):
to = settings.FORM_EMAIL
from_email = settings.ADMIN_EMAIL
subject = 'nouveau message entreprise LBB'
class MandrillClient(EmailClient):
def __init__(self, mandrill):
self.mandrill = mandrill
def send(self, html):
from_email = self.from_email
to_email = self.to
response = self.mandrill.send_email(
subject=self.subject,
to=[{'email': to_email}],
html=html,
from_email=from_email)
content = json.loads(response.content)
if content[0]["status"] != "sent":
raise Exception("email was not sent from %s to %s" % (from_email, to_email))
return response
Fix TypeError on mandrill email sending
json.loads() does not accept bytes as input in python < 3.6.# coding: utf8
import json
import logging
from labonneboite.conf import settings
logger = logging.getLogger('main')
class EmailClient(object):
to = settings.FORM_EMAIL
from_email = settings.ADMIN_EMAIL
subject = 'nouveau message entreprise LBB'
class MandrillClient(EmailClient):
def __init__(self, mandrill):
self.mandrill = mandrill
def send(self, html):
from_email = self.from_email
to_email = self.to
response = self.mandrill.send_email(
subject=self.subject,
to=[{'email': to_email}],
html=html,
from_email=from_email)
content = json.loads(response.content.decode())
if content[0]["status"] != "sent":
raise Exception("email was not sent from %s to %s" % (from_email, to_email))
return response
|
<commit_before># coding: utf8
import json
import logging
from labonneboite.conf import settings
logger = logging.getLogger('main')
class EmailClient(object):
to = settings.FORM_EMAIL
from_email = settings.ADMIN_EMAIL
subject = 'nouveau message entreprise LBB'
class MandrillClient(EmailClient):
def __init__(self, mandrill):
self.mandrill = mandrill
def send(self, html):
from_email = self.from_email
to_email = self.to
response = self.mandrill.send_email(
subject=self.subject,
to=[{'email': to_email}],
html=html,
from_email=from_email)
content = json.loads(response.content)
if content[0]["status"] != "sent":
raise Exception("email was not sent from %s to %s" % (from_email, to_email))
return response
<commit_msg>Fix TypeError on mandrill email sending
json.loads() does not accept bytes as input in python < 3.6.<commit_after># coding: utf8
import json
import logging
from labonneboite.conf import settings
logger = logging.getLogger('main')
class EmailClient(object):
to = settings.FORM_EMAIL
from_email = settings.ADMIN_EMAIL
subject = 'nouveau message entreprise LBB'
class MandrillClient(EmailClient):
def __init__(self, mandrill):
self.mandrill = mandrill
def send(self, html):
from_email = self.from_email
to_email = self.to
response = self.mandrill.send_email(
subject=self.subject,
to=[{'email': to_email}],
html=html,
from_email=from_email)
content = json.loads(response.content.decode())
if content[0]["status"] != "sent":
raise Exception("email was not sent from %s to %s" % (from_email, to_email))
return response
|
d9c2a7112ba239fb64ecc76ce844caed9146a5dc
|
nova/db/sqlalchemy/migrate_repo/versions/023_add_vm_mode_to_instances.py
|
nova/db/sqlalchemy/migrate_repo/versions/023_add_vm_mode_to_instances.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Column, Integer, MetaData, String, Table
meta = MetaData()
instances = Table('instances', meta,
Column('id', Integer(), primary_key=True, nullable=False),
)
instances_vm_mode = Column('vm_mode',
String(length=255, convert_unicode=False,
assert_unicode=None, unicode_error=None,
_warn_on_bytestring=False),
nullable=True)
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine;
# bind migrate_engine to your metadata
meta.bind = migrate_engine
instances.create_column(instances_vm_mode)
def downgrade(migrate_engine):
meta.bind = migrate_engine
instances.drop_column('vm_mode')
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Column, Integer, MetaData, String, Table
meta = MetaData()
instances = Table('instances', meta, autoload=True)
instances_vm_mode = Column('vm_mode',
String(length=255, convert_unicode=False,
assert_unicode=None, unicode_error=None,
_warn_on_bytestring=False),
nullable=True)
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine;
# bind migrate_engine to your metadata
meta.bind = migrate_engine
instances.create_column(instances_vm_mode)
def downgrade(migrate_engine):
meta.bind = migrate_engine
instances.drop_column('vm_mode')
|
Load table schema automatically instead of stubbing out
|
Load table schema automatically instead of stubbing out
|
Python
|
apache-2.0
|
barnsnake351/nova,NeCTAR-RC/nova,zaina/nova,NewpTone/stacklab-nova,russellb/nova,Juniper/nova,tangfeixiong/nova,JioCloud/nova,affo/nova,NewpTone/stacklab-nova,gspilio/nova,maoy/zknova,SUSE-Cloud/nova,josephsuh/extra-specs,vmturbo/nova,usc-isi/nova,russellb/nova,psiwczak/openstack,ewindisch/nova,TieWei/nova,dstroppa/openstack-smartos-nova-grizzly,cloudbau/nova,vmturbo/nova,imsplitbit/nova,rajalokan/nova,Brocade-OpenSource/OpenStack-DNRM-Nova,vmturbo/nova,TwinkleChawla/nova,DirectXMan12/nova-hacking,CloudServer/nova,psiwczak/openstack,shail2810/nova,cloudbase/nova-virtualbox,houshengbo/nova_vmware_compute_driver,varunarya10/nova_test_latest,viggates/nova,JianyuWang/nova,tianweizhang/nova,gooddata/openstack-nova,alexandrucoman/vbox-nova-driver,silenceli/nova,rajalokan/nova,scripnichenko/nova,tianweizhang/nova,zzicewind/nova,eharney/nova,orbitfp7/nova,iuliat/nova,gooddata/openstack-nova,zhimin711/nova,isyippee/nova,ted-gould/nova,apporc/nova,Triv90/Nova,luogangyi/bcec-nova,klmitch/nova,akash1808/nova,cernops/nova,shahar-stratoscale/nova,eonpatapon/nova,phenoxim/nova,MountainWei/nova,virtualopensystems/nova,hanlind/nova,maheshp/novatest,sridevikoushik31/openstack,Stavitsky/nova,usc-isi/extra-specs,devendermishrajio/nova_test_latest,alvarolopez/nova,vladikr/nova_drafts,NoBodyCam/TftpPxeBootBareMetal,mikalstill/nova,angdraug/nova,leilihh/nova,rrader/nova-docker-plugin,alexandrucoman/vbox-nova-driver,projectcalico/calico-nova,eayunstack/nova,tudorvio/nova,isyippee/nova,yosshy/nova,mmnelemane/nova,openstack/nova,fajoy/nova,sebrandon1/nova,usc-isi/extra-specs,badock/nova,qwefi/nova,devendermishrajio/nova,LoHChina/nova,scripnichenko/nova,berrange/nova,openstack/nova,psiwczak/openstack,houshengbo/nova_vmware_compute_driver,Yusuke1987/openstack_template,Metaswitch/calico-nova,badock/nova,savi-dev/nova,maelnor/nova,ntt-sic/nova,jianghuaw/nova,watonyweng/nova,Tehsmash/nova,mgagne/nova,mahak/nova,bgxavier/nova,tangfeixiong/nova,Brocade-OpenSource/OpenStack-DNRM-Nova,whitepages/nova,mandeepdhami/nova,NeCTAR-RC/nova,affo/nova,ewindisch/nova,rickerc/nova_audit,eharney/nova,CloudServer/nova,Stavitsky/nova,devendermishrajio/nova_test_latest,cernops/nova,gspilio/nova,SUSE-Cloud/nova,nikesh-mahalka/nova,CCI-MOC/nova,petrutlucian94/nova,CiscoSystems/nova,hanlind/nova,maheshp/novatest,yrobla/nova,mahak/nova,cloudbase/nova,citrix-openstack-build/nova,qwefi/nova,virtualopensystems/nova,Yuriy-Leonov/nova,cloudbase/nova,blueboxgroup/nova,shootstar/novatest,josephsuh/extra-specs,houshengbo/nova_vmware_compute_driver,mgagne/nova,CEG-FYP-OpenStack/scheduler,DirectXMan12/nova-hacking,ruslanloman/nova,tealover/nova,salv-orlando/MyRepo,eneabio/nova,bigswitch/nova,BeyondTheClouds/nova,edulramirez/nova,j-carpentier/nova,joker946/nova,fajoy/nova,Triv90/Nova,sileht/deb-openstack-nova,cyx1231st/nova,iuliat/nova,sacharya/nova,NoBodyCam/TftpPxeBootBareMetal,JioCloud/nova_test_latest,raildo/nova,NoBodyCam/TftpPxeBootBareMetal,salv-orlando/MyRepo,citrix-openstack-build/nova,paulmathews/nova,j-carpentier/nova,sridevikoushik31/nova,gspilio/nova,aristanetworks/arista-ovs-nova,zhimin711/nova,josephsuh/extra-specs,fajoy/nova,usc-isi/nova,salv-orlando/MyRepo,klmitch/nova,spring-week-topos/nova-week,orbitfp7/nova,berrange/nova,KarimAllah/nova,imsplitbit/nova,akash1808/nova_test_latest,whitepages/nova,kimjaejoong/nova,saleemjaveds/https-github.com-openstack-nova,spring-week-topos/nova-week,tanglei528/nova,nikesh-mahalka/nova,Yusuke1987/openstack_template,noironetworks/nova,jeffrey4l/nova,devoid/nova,CCI-MOC/nova,ntt-sic/nova,petrutlucian94/nova_dev,dstroppa/openstack-smartos-nova-grizzly,Yuriy-Leonov/nova,fnordahl/nova,gooddata/openstack-nova,rajalokan/nova,yosshy/nova,rahulunair/nova,TieWei/nova,paulmathews/nova,phenoxim/nova,shahar-stratoscale/nova,dstroppa/openstack-smartos-nova-grizzly,yrobla/nova,klmitch/nova,openstack/nova,sileht/deb-openstack-nova,sridevikoushik31/openstack,plumgrid/plumgrid-nova,dawnpower/nova,KarimAllah/nova,adelina-t/nova,Triv90/Nova,sacharya/nova,eneabio/nova,felixma/nova,Metaswitch/calico-nova,bclau/nova,Francis-Liu/animated-broccoli,rahulunair/nova,petrutlucian94/nova,usc-isi/extra-specs,savi-dev/nova,CiscoSystems/nova,felixma/nova,shail2810/nova,NewpTone/stacklab-nova,sileht/deb-openstack-nova,russellb/nova,cyx1231st/nova,tudorvio/nova,sridevikoushik31/nova,cernops/nova,BeyondTheClouds/nova,noironetworks/nova,maoy/zknova,sebrandon1/nova,savi-dev/nova,varunarya10/nova_test_latest,Juniper/nova,maelnor/nova,akash1808/nova,gooddata/openstack-nova,Juniper/nova,joker946/nova,eonpatapon/nova,dims/nova,LoHChina/nova,cloudbase/nova,yatinkumbhare/openstack-nova,plumgrid/plumgrid-nova,fnordahl/nova,dawnpower/nova,usc-isi/nova,KarimAllah/nova,tealover/nova,sridevikoushik31/nova,bgxavier/nova,JianyuWang/nova,akash1808/nova_test_latest,jianghuaw/nova,eayunstack/nova,alaski/nova,double12gzh/nova,watonyweng/nova,MountainWei/nova,eneabio/nova,belmiromoreira/nova,Juniper/nova,zzicewind/nova,DirectXMan12/nova-hacking,jeffrey4l/nova,ruslanloman/nova,kimjaejoong/nova,sridevikoushik31/openstack,Francis-Liu/animated-broccoli,devoid/nova,vmturbo/nova,leilihh/nova,rickerc/nova_audit,yatinkumbhare/openstack-nova,mandeepdhami/nova,TwinkleChawla/nova,petrutlucian94/nova_dev,zaina/nova,yrobla/nova,leilihh/novaha,rajalokan/nova,CEG-FYP-OpenStack/scheduler,bigswitch/nova,projectcalico/calico-nova,ted-gould/nova,mikalstill/nova,maheshp/novatest,barnsnake351/nova,raildo/nova,saleemjaveds/https-github.com-openstack-nova,mikalstill/nova,angdraug/nova,vladikr/nova_drafts,rrader/nova-docker-plugin,belmiromoreira/nova,redhat-openstack/nova,silenceli/nova,apporc/nova,dims/nova,OpenAcademy-OpenStack/nova-scheduler,mmnelemane/nova,sebrandon1/nova,sridevikoushik31/nova,JioCloud/nova,thomasem/nova,edulramirez/nova,cloudbase/nova-virtualbox,JioCloud/nova_test_latest,alaski/nova,mahak/nova,redhat-openstack/nova,cloudbau/nova,jianghuaw/nova,paulmathews/nova,shootstar/novatest,OpenAcademy-OpenStack/nova-scheduler,tanglei528/nova,rahulunair/nova,luogangyi/bcec-nova,aristanetworks/arista-ovs-nova,jianghuaw/nova,takeshineshiro/nova,thomasem/nova,aristanetworks/arista-ovs-nova,Tehsmash/nova,hanlind/nova,alvarolopez/nova,double12gzh/nova,devendermishrajio/nova,maoy/zknova,klmitch/nova,leilihh/novaha,BeyondTheClouds/nova,blueboxgroup/nova,adelina-t/nova,bclau/nova,viggates/nova,takeshineshiro/nova
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Column, Integer, MetaData, String, Table
meta = MetaData()
instances = Table('instances', meta,
Column('id', Integer(), primary_key=True, nullable=False),
)
instances_vm_mode = Column('vm_mode',
String(length=255, convert_unicode=False,
assert_unicode=None, unicode_error=None,
_warn_on_bytestring=False),
nullable=True)
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine;
# bind migrate_engine to your metadata
meta.bind = migrate_engine
instances.create_column(instances_vm_mode)
def downgrade(migrate_engine):
meta.bind = migrate_engine
instances.drop_column('vm_mode')
Load table schema automatically instead of stubbing out
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Column, Integer, MetaData, String, Table
meta = MetaData()
instances = Table('instances', meta, autoload=True)
instances_vm_mode = Column('vm_mode',
String(length=255, convert_unicode=False,
assert_unicode=None, unicode_error=None,
_warn_on_bytestring=False),
nullable=True)
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine;
# bind migrate_engine to your metadata
meta.bind = migrate_engine
instances.create_column(instances_vm_mode)
def downgrade(migrate_engine):
meta.bind = migrate_engine
instances.drop_column('vm_mode')
|
<commit_before># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Column, Integer, MetaData, String, Table
meta = MetaData()
instances = Table('instances', meta,
Column('id', Integer(), primary_key=True, nullable=False),
)
instances_vm_mode = Column('vm_mode',
String(length=255, convert_unicode=False,
assert_unicode=None, unicode_error=None,
_warn_on_bytestring=False),
nullable=True)
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine;
# bind migrate_engine to your metadata
meta.bind = migrate_engine
instances.create_column(instances_vm_mode)
def downgrade(migrate_engine):
meta.bind = migrate_engine
instances.drop_column('vm_mode')
<commit_msg>Load table schema automatically instead of stubbing out<commit_after>
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Column, Integer, MetaData, String, Table
meta = MetaData()
instances = Table('instances', meta, autoload=True)
instances_vm_mode = Column('vm_mode',
String(length=255, convert_unicode=False,
assert_unicode=None, unicode_error=None,
_warn_on_bytestring=False),
nullable=True)
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine;
# bind migrate_engine to your metadata
meta.bind = migrate_engine
instances.create_column(instances_vm_mode)
def downgrade(migrate_engine):
meta.bind = migrate_engine
instances.drop_column('vm_mode')
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Column, Integer, MetaData, String, Table
meta = MetaData()
instances = Table('instances', meta,
Column('id', Integer(), primary_key=True, nullable=False),
)
instances_vm_mode = Column('vm_mode',
String(length=255, convert_unicode=False,
assert_unicode=None, unicode_error=None,
_warn_on_bytestring=False),
nullable=True)
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine;
# bind migrate_engine to your metadata
meta.bind = migrate_engine
instances.create_column(instances_vm_mode)
def downgrade(migrate_engine):
meta.bind = migrate_engine
instances.drop_column('vm_mode')
Load table schema automatically instead of stubbing out# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Column, Integer, MetaData, String, Table
meta = MetaData()
instances = Table('instances', meta, autoload=True)
instances_vm_mode = Column('vm_mode',
String(length=255, convert_unicode=False,
assert_unicode=None, unicode_error=None,
_warn_on_bytestring=False),
nullable=True)
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine;
# bind migrate_engine to your metadata
meta.bind = migrate_engine
instances.create_column(instances_vm_mode)
def downgrade(migrate_engine):
meta.bind = migrate_engine
instances.drop_column('vm_mode')
|
<commit_before># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Column, Integer, MetaData, String, Table
meta = MetaData()
instances = Table('instances', meta,
Column('id', Integer(), primary_key=True, nullable=False),
)
instances_vm_mode = Column('vm_mode',
String(length=255, convert_unicode=False,
assert_unicode=None, unicode_error=None,
_warn_on_bytestring=False),
nullable=True)
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine;
# bind migrate_engine to your metadata
meta.bind = migrate_engine
instances.create_column(instances_vm_mode)
def downgrade(migrate_engine):
meta.bind = migrate_engine
instances.drop_column('vm_mode')
<commit_msg>Load table schema automatically instead of stubbing out<commit_after># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Column, Integer, MetaData, String, Table
meta = MetaData()
instances = Table('instances', meta, autoload=True)
instances_vm_mode = Column('vm_mode',
String(length=255, convert_unicode=False,
assert_unicode=None, unicode_error=None,
_warn_on_bytestring=False),
nullable=True)
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine;
# bind migrate_engine to your metadata
meta.bind = migrate_engine
instances.create_column(instances_vm_mode)
def downgrade(migrate_engine):
meta.bind = migrate_engine
instances.drop_column('vm_mode')
|
332dbcb5eb0badccb843cdbdd167dcfa4b446352
|
rpmlint.conf.py
|
rpmlint.conf.py
|
from Config import *
# Not sure what this number does, but we need some threshold that we'd like to
# avoid crossing.
setOption("BadnessThreshold", 42)
# Ignore all lint warnings in submodules:
addFilter('third_party/submodules/')
# Ignore all lint warnings in symlinks from submodules.
addFilter('SPECS/cmake.spec')
addFilter('SPECS/gdb.spec')
addFilter('SPECS/gperftools.spec')
addFilter('SPECS/libcomps.spec')
addFilter('SPECS/nginx.spec')
addFilter('SPECS/perl.spec')
addFilter('perl.src')
addFilter('SPECS/python-iniparse.spec')
addFilter('SPECS/os-prober.spec')
addFilter('SPECS/yum.spec')
# Python is mostly third-party and has lots of warnings.
addFilter('SPECS/python.spec')
addFilter('SPECS/python3.spec')
addFilter('third_party/subtrees/python/python.spec')
addFilter('third_party/subtrees/python3/python3.spec')
# RPM is special, let's ignore warnings from it.
addFilter('SPECS/rpm.spec')
addFilter('third_party/subtrees/rpm/rpm.spec')
# DNF have a lot of weird stuff:
addFilter('dnf.spec.*libdir-macro-in-noarch-package')
|
from Config import *
# Not sure what this number does, but we need some threshold that we'd like to
# avoid crossing.
setOption("BadnessThreshold", 42)
# Ignore all lint warnings in submodules:
addFilter('third_party/submodules/')
# Ignore all lint warnings in symlinks from submodules.
addFilter('SPECS/cmake.spec')
addFilter('SPECS/gdb.spec')
addFilter('SPECS/gperftools.spec')
addFilter('SPECS/libcomps.spec')
addFilter('SPECS/nginx.spec')
addFilter('SPECS/perl.spec')
addFilter('perl.src')
addFilter('SPECS/python-iniparse.spec')
addFilter('SPECS/os-prober.spec')
addFilter('SPECS/yum.spec')
# Python is mostly third-party and has lots of warnings.
addFilter('SPECS/python.spec')
addFilter('SPECS/python3.spec')
addFilter('third_party/subtrees/python/python.spec')
addFilter('third_party/subtrees/python3/python3.spec')
# RPM is special, let's ignore warnings from it.
addFilter('SPECS/rpm.spec')
addFilter('third_party/subtrees/rpm/rpm.spec')
# DNF have a lot of weird stuff:
addFilter('dnf.spec.*libdir-macro-in-noarch-package')
# VIM: allow mixed space/tab usage in specific line.
addFilter('vim.spec:218: W: mixed-use-of-spaces-and-tabs')
|
Allow mixed-use-of-spaces-and-tabs in specific line of the spec file.
|
vim: Allow mixed-use-of-spaces-and-tabs in specific line of the spec file.
|
Python
|
apache-2.0
|
vrusinov/copr-sundry,google/copr-sundry,google/copr-sundry,vrusinov/copr-sundry
|
from Config import *
# Not sure what this number does, but we need some threshold that we'd like to
# avoid crossing.
setOption("BadnessThreshold", 42)
# Ignore all lint warnings in submodules:
addFilter('third_party/submodules/')
# Ignore all lint warnings in symlinks from submodules.
addFilter('SPECS/cmake.spec')
addFilter('SPECS/gdb.spec')
addFilter('SPECS/gperftools.spec')
addFilter('SPECS/libcomps.spec')
addFilter('SPECS/nginx.spec')
addFilter('SPECS/perl.spec')
addFilter('perl.src')
addFilter('SPECS/python-iniparse.spec')
addFilter('SPECS/os-prober.spec')
addFilter('SPECS/yum.spec')
# Python is mostly third-party and has lots of warnings.
addFilter('SPECS/python.spec')
addFilter('SPECS/python3.spec')
addFilter('third_party/subtrees/python/python.spec')
addFilter('third_party/subtrees/python3/python3.spec')
# RPM is special, let's ignore warnings from it.
addFilter('SPECS/rpm.spec')
addFilter('third_party/subtrees/rpm/rpm.spec')
# DNF have a lot of weird stuff:
addFilter('dnf.spec.*libdir-macro-in-noarch-package')
vim: Allow mixed-use-of-spaces-and-tabs in specific line of the spec file.
|
from Config import *
# Not sure what this number does, but we need some threshold that we'd like to
# avoid crossing.
setOption("BadnessThreshold", 42)
# Ignore all lint warnings in submodules:
addFilter('third_party/submodules/')
# Ignore all lint warnings in symlinks from submodules.
addFilter('SPECS/cmake.spec')
addFilter('SPECS/gdb.spec')
addFilter('SPECS/gperftools.spec')
addFilter('SPECS/libcomps.spec')
addFilter('SPECS/nginx.spec')
addFilter('SPECS/perl.spec')
addFilter('perl.src')
addFilter('SPECS/python-iniparse.spec')
addFilter('SPECS/os-prober.spec')
addFilter('SPECS/yum.spec')
# Python is mostly third-party and has lots of warnings.
addFilter('SPECS/python.spec')
addFilter('SPECS/python3.spec')
addFilter('third_party/subtrees/python/python.spec')
addFilter('third_party/subtrees/python3/python3.spec')
# RPM is special, let's ignore warnings from it.
addFilter('SPECS/rpm.spec')
addFilter('third_party/subtrees/rpm/rpm.spec')
# DNF have a lot of weird stuff:
addFilter('dnf.spec.*libdir-macro-in-noarch-package')
# VIM: allow mixed space/tab usage in specific line.
addFilter('vim.spec:218: W: mixed-use-of-spaces-and-tabs')
|
<commit_before>from Config import *
# Not sure what this number does, but we need some threshold that we'd like to
# avoid crossing.
setOption("BadnessThreshold", 42)
# Ignore all lint warnings in submodules:
addFilter('third_party/submodules/')
# Ignore all lint warnings in symlinks from submodules.
addFilter('SPECS/cmake.spec')
addFilter('SPECS/gdb.spec')
addFilter('SPECS/gperftools.spec')
addFilter('SPECS/libcomps.spec')
addFilter('SPECS/nginx.spec')
addFilter('SPECS/perl.spec')
addFilter('perl.src')
addFilter('SPECS/python-iniparse.spec')
addFilter('SPECS/os-prober.spec')
addFilter('SPECS/yum.spec')
# Python is mostly third-party and has lots of warnings.
addFilter('SPECS/python.spec')
addFilter('SPECS/python3.spec')
addFilter('third_party/subtrees/python/python.spec')
addFilter('third_party/subtrees/python3/python3.spec')
# RPM is special, let's ignore warnings from it.
addFilter('SPECS/rpm.spec')
addFilter('third_party/subtrees/rpm/rpm.spec')
# DNF have a lot of weird stuff:
addFilter('dnf.spec.*libdir-macro-in-noarch-package')
<commit_msg>vim: Allow mixed-use-of-spaces-and-tabs in specific line of the spec file.<commit_after>
|
from Config import *
# Not sure what this number does, but we need some threshold that we'd like to
# avoid crossing.
setOption("BadnessThreshold", 42)
# Ignore all lint warnings in submodules:
addFilter('third_party/submodules/')
# Ignore all lint warnings in symlinks from submodules.
addFilter('SPECS/cmake.spec')
addFilter('SPECS/gdb.spec')
addFilter('SPECS/gperftools.spec')
addFilter('SPECS/libcomps.spec')
addFilter('SPECS/nginx.spec')
addFilter('SPECS/perl.spec')
addFilter('perl.src')
addFilter('SPECS/python-iniparse.spec')
addFilter('SPECS/os-prober.spec')
addFilter('SPECS/yum.spec')
# Python is mostly third-party and has lots of warnings.
addFilter('SPECS/python.spec')
addFilter('SPECS/python3.spec')
addFilter('third_party/subtrees/python/python.spec')
addFilter('third_party/subtrees/python3/python3.spec')
# RPM is special, let's ignore warnings from it.
addFilter('SPECS/rpm.spec')
addFilter('third_party/subtrees/rpm/rpm.spec')
# DNF have a lot of weird stuff:
addFilter('dnf.spec.*libdir-macro-in-noarch-package')
# VIM: allow mixed space/tab usage in specific line.
addFilter('vim.spec:218: W: mixed-use-of-spaces-and-tabs')
|
from Config import *
# Not sure what this number does, but we need some threshold that we'd like to
# avoid crossing.
setOption("BadnessThreshold", 42)
# Ignore all lint warnings in submodules:
addFilter('third_party/submodules/')
# Ignore all lint warnings in symlinks from submodules.
addFilter('SPECS/cmake.spec')
addFilter('SPECS/gdb.spec')
addFilter('SPECS/gperftools.spec')
addFilter('SPECS/libcomps.spec')
addFilter('SPECS/nginx.spec')
addFilter('SPECS/perl.spec')
addFilter('perl.src')
addFilter('SPECS/python-iniparse.spec')
addFilter('SPECS/os-prober.spec')
addFilter('SPECS/yum.spec')
# Python is mostly third-party and has lots of warnings.
addFilter('SPECS/python.spec')
addFilter('SPECS/python3.spec')
addFilter('third_party/subtrees/python/python.spec')
addFilter('third_party/subtrees/python3/python3.spec')
# RPM is special, let's ignore warnings from it.
addFilter('SPECS/rpm.spec')
addFilter('third_party/subtrees/rpm/rpm.spec')
# DNF have a lot of weird stuff:
addFilter('dnf.spec.*libdir-macro-in-noarch-package')
vim: Allow mixed-use-of-spaces-and-tabs in specific line of the spec file.from Config import *
# Not sure what this number does, but we need some threshold that we'd like to
# avoid crossing.
setOption("BadnessThreshold", 42)
# Ignore all lint warnings in submodules:
addFilter('third_party/submodules/')
# Ignore all lint warnings in symlinks from submodules.
addFilter('SPECS/cmake.spec')
addFilter('SPECS/gdb.spec')
addFilter('SPECS/gperftools.spec')
addFilter('SPECS/libcomps.spec')
addFilter('SPECS/nginx.spec')
addFilter('SPECS/perl.spec')
addFilter('perl.src')
addFilter('SPECS/python-iniparse.spec')
addFilter('SPECS/os-prober.spec')
addFilter('SPECS/yum.spec')
# Python is mostly third-party and has lots of warnings.
addFilter('SPECS/python.spec')
addFilter('SPECS/python3.spec')
addFilter('third_party/subtrees/python/python.spec')
addFilter('third_party/subtrees/python3/python3.spec')
# RPM is special, let's ignore warnings from it.
addFilter('SPECS/rpm.spec')
addFilter('third_party/subtrees/rpm/rpm.spec')
# DNF have a lot of weird stuff:
addFilter('dnf.spec.*libdir-macro-in-noarch-package')
# VIM: allow mixed space/tab usage in specific line.
addFilter('vim.spec:218: W: mixed-use-of-spaces-and-tabs')
|
<commit_before>from Config import *
# Not sure what this number does, but we need some threshold that we'd like to
# avoid crossing.
setOption("BadnessThreshold", 42)
# Ignore all lint warnings in submodules:
addFilter('third_party/submodules/')
# Ignore all lint warnings in symlinks from submodules.
addFilter('SPECS/cmake.spec')
addFilter('SPECS/gdb.spec')
addFilter('SPECS/gperftools.spec')
addFilter('SPECS/libcomps.spec')
addFilter('SPECS/nginx.spec')
addFilter('SPECS/perl.spec')
addFilter('perl.src')
addFilter('SPECS/python-iniparse.spec')
addFilter('SPECS/os-prober.spec')
addFilter('SPECS/yum.spec')
# Python is mostly third-party and has lots of warnings.
addFilter('SPECS/python.spec')
addFilter('SPECS/python3.spec')
addFilter('third_party/subtrees/python/python.spec')
addFilter('third_party/subtrees/python3/python3.spec')
# RPM is special, let's ignore warnings from it.
addFilter('SPECS/rpm.spec')
addFilter('third_party/subtrees/rpm/rpm.spec')
# DNF have a lot of weird stuff:
addFilter('dnf.spec.*libdir-macro-in-noarch-package')
<commit_msg>vim: Allow mixed-use-of-spaces-and-tabs in specific line of the spec file.<commit_after>from Config import *
# Not sure what this number does, but we need some threshold that we'd like to
# avoid crossing.
setOption("BadnessThreshold", 42)
# Ignore all lint warnings in submodules:
addFilter('third_party/submodules/')
# Ignore all lint warnings in symlinks from submodules.
addFilter('SPECS/cmake.spec')
addFilter('SPECS/gdb.spec')
addFilter('SPECS/gperftools.spec')
addFilter('SPECS/libcomps.spec')
addFilter('SPECS/nginx.spec')
addFilter('SPECS/perl.spec')
addFilter('perl.src')
addFilter('SPECS/python-iniparse.spec')
addFilter('SPECS/os-prober.spec')
addFilter('SPECS/yum.spec')
# Python is mostly third-party and has lots of warnings.
addFilter('SPECS/python.spec')
addFilter('SPECS/python3.spec')
addFilter('third_party/subtrees/python/python.spec')
addFilter('third_party/subtrees/python3/python3.spec')
# RPM is special, let's ignore warnings from it.
addFilter('SPECS/rpm.spec')
addFilter('third_party/subtrees/rpm/rpm.spec')
# DNF have a lot of weird stuff:
addFilter('dnf.spec.*libdir-macro-in-noarch-package')
# VIM: allow mixed space/tab usage in specific line.
addFilter('vim.spec:218: W: mixed-use-of-spaces-and-tabs')
|
fa54689fada175aa10ee1b096e73a0cd33aa702b
|
pmxbot/buffer.py
|
pmxbot/buffer.py
|
import logging
import irc.buffer
log = logging.getLogger(__name__)
class ErrorReportingBuffer(irc.buffer.LineBuffer):
encoding = 'utf-8'
def lines(self):
lines = super().lines()
for line in lines:
try:
yield line.decode(self.encoding)
except UnicodeDecodeError:
log.error("Unable to decode line: {line!r}".format(line=line))
@classmethod
def install(cls):
irc.client.ServerConnection.buffer_class = cls
|
import logging
import irc.buffer
import irc.client
log = logging.getLogger(__name__)
class ErrorReportingBuffer(irc.buffer.LineBuffer):
encoding = 'utf-8'
def lines(self):
lines = super().lines()
for line in lines:
try:
yield line.decode(self.encoding)
except UnicodeDecodeError:
log.error("Unable to decode line: {line!r}".format(line=line))
@classmethod
def install(cls):
irc.client.ServerConnection.buffer_class = cls
|
Correct AttributeError when client isn't explicitly imported.
|
Correct AttributeError when client isn't explicitly imported.
|
Python
|
mit
|
yougov/pmxbot,yougov/pmxbot,yougov/pmxbot
|
import logging
import irc.buffer
log = logging.getLogger(__name__)
class ErrorReportingBuffer(irc.buffer.LineBuffer):
encoding = 'utf-8'
def lines(self):
lines = super().lines()
for line in lines:
try:
yield line.decode(self.encoding)
except UnicodeDecodeError:
log.error("Unable to decode line: {line!r}".format(line=line))
@classmethod
def install(cls):
irc.client.ServerConnection.buffer_class = cls
Correct AttributeError when client isn't explicitly imported.
|
import logging
import irc.buffer
import irc.client
log = logging.getLogger(__name__)
class ErrorReportingBuffer(irc.buffer.LineBuffer):
encoding = 'utf-8'
def lines(self):
lines = super().lines()
for line in lines:
try:
yield line.decode(self.encoding)
except UnicodeDecodeError:
log.error("Unable to decode line: {line!r}".format(line=line))
@classmethod
def install(cls):
irc.client.ServerConnection.buffer_class = cls
|
<commit_before>import logging
import irc.buffer
log = logging.getLogger(__name__)
class ErrorReportingBuffer(irc.buffer.LineBuffer):
encoding = 'utf-8'
def lines(self):
lines = super().lines()
for line in lines:
try:
yield line.decode(self.encoding)
except UnicodeDecodeError:
log.error("Unable to decode line: {line!r}".format(line=line))
@classmethod
def install(cls):
irc.client.ServerConnection.buffer_class = cls
<commit_msg>Correct AttributeError when client isn't explicitly imported.<commit_after>
|
import logging
import irc.buffer
import irc.client
log = logging.getLogger(__name__)
class ErrorReportingBuffer(irc.buffer.LineBuffer):
encoding = 'utf-8'
def lines(self):
lines = super().lines()
for line in lines:
try:
yield line.decode(self.encoding)
except UnicodeDecodeError:
log.error("Unable to decode line: {line!r}".format(line=line))
@classmethod
def install(cls):
irc.client.ServerConnection.buffer_class = cls
|
import logging
import irc.buffer
log = logging.getLogger(__name__)
class ErrorReportingBuffer(irc.buffer.LineBuffer):
encoding = 'utf-8'
def lines(self):
lines = super().lines()
for line in lines:
try:
yield line.decode(self.encoding)
except UnicodeDecodeError:
log.error("Unable to decode line: {line!r}".format(line=line))
@classmethod
def install(cls):
irc.client.ServerConnection.buffer_class = cls
Correct AttributeError when client isn't explicitly imported.import logging
import irc.buffer
import irc.client
log = logging.getLogger(__name__)
class ErrorReportingBuffer(irc.buffer.LineBuffer):
encoding = 'utf-8'
def lines(self):
lines = super().lines()
for line in lines:
try:
yield line.decode(self.encoding)
except UnicodeDecodeError:
log.error("Unable to decode line: {line!r}".format(line=line))
@classmethod
def install(cls):
irc.client.ServerConnection.buffer_class = cls
|
<commit_before>import logging
import irc.buffer
log = logging.getLogger(__name__)
class ErrorReportingBuffer(irc.buffer.LineBuffer):
encoding = 'utf-8'
def lines(self):
lines = super().lines()
for line in lines:
try:
yield line.decode(self.encoding)
except UnicodeDecodeError:
log.error("Unable to decode line: {line!r}".format(line=line))
@classmethod
def install(cls):
irc.client.ServerConnection.buffer_class = cls
<commit_msg>Correct AttributeError when client isn't explicitly imported.<commit_after>import logging
import irc.buffer
import irc.client
log = logging.getLogger(__name__)
class ErrorReportingBuffer(irc.buffer.LineBuffer):
encoding = 'utf-8'
def lines(self):
lines = super().lines()
for line in lines:
try:
yield line.decode(self.encoding)
except UnicodeDecodeError:
log.error("Unable to decode line: {line!r}".format(line=line))
@classmethod
def install(cls):
irc.client.ServerConnection.buffer_class = cls
|
01d95de1a2fc9bc7283f72e4225d49a5d65af15b
|
poyo/patterns.py
|
poyo/patterns.py
|
# -*- coding: utf-8 -*-
INDENT = r"(?P<indent>^ *)"
VARIABLE = r"(?P<variable>.+):"
VALUE = r"(?P<value>(?:(?P<q2>['\"]).*?(?P=q2))|[^#]+?)"
NEWLINE = r"$\n"
BLANK = r" +"
INLINE_COMMENT = r"(?: +#.*)?"
COMMENT = r"^ *#.*" + NEWLINE
BLANK_LINE = r"^[ \t]*" + NEWLINE
DASHES = r"^---" + NEWLINE
SECTION = INDENT + VARIABLE + INLINE_COMMENT + NEWLINE
SIMPLE = INDENT + VARIABLE + BLANK + VALUE + INLINE_COMMENT + NEWLINE
NULL = r"\b(null|Null|NULL|~)\b"
TRUE = r"\b(true|True|TRUE)\b"
FALSE = r"\b(false|False|FALSE)\b"
INT = r"[-+]?[0-9]+"
FLOAT = r"([-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?)"
STR = r"(?P<quotes>['\"]?).*(?P=quotes)"
|
# -*- coding: utf-8 -*-
INDENT = r"(?P<indent>^ *)"
VARIABLE = r"(?P<variable>.+):"
VALUE = r"(?P<value>(?:(?P<q2>['\"]).*?(?P=q2))|[^#]+?)"
NEWLINE = r"$\n"
BLANK = r" +"
INLINE_COMMENT = r"(?: +#.*)?"
COMMENT = r"^ *#.*" + NEWLINE
BLANK_LINE = r"^[ \t]*" + NEWLINE
DASHES = r"^---" + NEWLINE
SECTION = INDENT + VARIABLE + INLINE_COMMENT + NEWLINE
SIMPLE = INDENT + VARIABLE + BLANK + VALUE + INLINE_COMMENT + NEWLINE
LISTITEM = BLANK + r"-" + BLANK + VALUE + INLINE_COMMENT + NEWLINE
LIST = SECTION + r"(?P<items>(?:(?P=indent)" + LISTITEM + r")+)"
NULL = r"\b(null|Null|NULL|~)\b"
TRUE = r"\b(true|True|TRUE)\b"
FALSE = r"\b(false|False|FALSE)\b"
INT = r"[-+]?[0-9]+"
FLOAT = r"([-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?)"
STR = r"(?P<quotes>['\"]?).*(?P=quotes)"
|
Implement regex pattern for list values
|
Implement regex pattern for list values
|
Python
|
mit
|
hackebrot/poyo
|
# -*- coding: utf-8 -*-
INDENT = r"(?P<indent>^ *)"
VARIABLE = r"(?P<variable>.+):"
VALUE = r"(?P<value>(?:(?P<q2>['\"]).*?(?P=q2))|[^#]+?)"
NEWLINE = r"$\n"
BLANK = r" +"
INLINE_COMMENT = r"(?: +#.*)?"
COMMENT = r"^ *#.*" + NEWLINE
BLANK_LINE = r"^[ \t]*" + NEWLINE
DASHES = r"^---" + NEWLINE
SECTION = INDENT + VARIABLE + INLINE_COMMENT + NEWLINE
SIMPLE = INDENT + VARIABLE + BLANK + VALUE + INLINE_COMMENT + NEWLINE
NULL = r"\b(null|Null|NULL|~)\b"
TRUE = r"\b(true|True|TRUE)\b"
FALSE = r"\b(false|False|FALSE)\b"
INT = r"[-+]?[0-9]+"
FLOAT = r"([-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?)"
STR = r"(?P<quotes>['\"]?).*(?P=quotes)"
Implement regex pattern for list values
|
# -*- coding: utf-8 -*-
INDENT = r"(?P<indent>^ *)"
VARIABLE = r"(?P<variable>.+):"
VALUE = r"(?P<value>(?:(?P<q2>['\"]).*?(?P=q2))|[^#]+?)"
NEWLINE = r"$\n"
BLANK = r" +"
INLINE_COMMENT = r"(?: +#.*)?"
COMMENT = r"^ *#.*" + NEWLINE
BLANK_LINE = r"^[ \t]*" + NEWLINE
DASHES = r"^---" + NEWLINE
SECTION = INDENT + VARIABLE + INLINE_COMMENT + NEWLINE
SIMPLE = INDENT + VARIABLE + BLANK + VALUE + INLINE_COMMENT + NEWLINE
LISTITEM = BLANK + r"-" + BLANK + VALUE + INLINE_COMMENT + NEWLINE
LIST = SECTION + r"(?P<items>(?:(?P=indent)" + LISTITEM + r")+)"
NULL = r"\b(null|Null|NULL|~)\b"
TRUE = r"\b(true|True|TRUE)\b"
FALSE = r"\b(false|False|FALSE)\b"
INT = r"[-+]?[0-9]+"
FLOAT = r"([-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?)"
STR = r"(?P<quotes>['\"]?).*(?P=quotes)"
|
<commit_before># -*- coding: utf-8 -*-
INDENT = r"(?P<indent>^ *)"
VARIABLE = r"(?P<variable>.+):"
VALUE = r"(?P<value>(?:(?P<q2>['\"]).*?(?P=q2))|[^#]+?)"
NEWLINE = r"$\n"
BLANK = r" +"
INLINE_COMMENT = r"(?: +#.*)?"
COMMENT = r"^ *#.*" + NEWLINE
BLANK_LINE = r"^[ \t]*" + NEWLINE
DASHES = r"^---" + NEWLINE
SECTION = INDENT + VARIABLE + INLINE_COMMENT + NEWLINE
SIMPLE = INDENT + VARIABLE + BLANK + VALUE + INLINE_COMMENT + NEWLINE
NULL = r"\b(null|Null|NULL|~)\b"
TRUE = r"\b(true|True|TRUE)\b"
FALSE = r"\b(false|False|FALSE)\b"
INT = r"[-+]?[0-9]+"
FLOAT = r"([-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?)"
STR = r"(?P<quotes>['\"]?).*(?P=quotes)"
<commit_msg>Implement regex pattern for list values<commit_after>
|
# -*- coding: utf-8 -*-
INDENT = r"(?P<indent>^ *)"
VARIABLE = r"(?P<variable>.+):"
VALUE = r"(?P<value>(?:(?P<q2>['\"]).*?(?P=q2))|[^#]+?)"
NEWLINE = r"$\n"
BLANK = r" +"
INLINE_COMMENT = r"(?: +#.*)?"
COMMENT = r"^ *#.*" + NEWLINE
BLANK_LINE = r"^[ \t]*" + NEWLINE
DASHES = r"^---" + NEWLINE
SECTION = INDENT + VARIABLE + INLINE_COMMENT + NEWLINE
SIMPLE = INDENT + VARIABLE + BLANK + VALUE + INLINE_COMMENT + NEWLINE
LISTITEM = BLANK + r"-" + BLANK + VALUE + INLINE_COMMENT + NEWLINE
LIST = SECTION + r"(?P<items>(?:(?P=indent)" + LISTITEM + r")+)"
NULL = r"\b(null|Null|NULL|~)\b"
TRUE = r"\b(true|True|TRUE)\b"
FALSE = r"\b(false|False|FALSE)\b"
INT = r"[-+]?[0-9]+"
FLOAT = r"([-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?)"
STR = r"(?P<quotes>['\"]?).*(?P=quotes)"
|
# -*- coding: utf-8 -*-
INDENT = r"(?P<indent>^ *)"
VARIABLE = r"(?P<variable>.+):"
VALUE = r"(?P<value>(?:(?P<q2>['\"]).*?(?P=q2))|[^#]+?)"
NEWLINE = r"$\n"
BLANK = r" +"
INLINE_COMMENT = r"(?: +#.*)?"
COMMENT = r"^ *#.*" + NEWLINE
BLANK_LINE = r"^[ \t]*" + NEWLINE
DASHES = r"^---" + NEWLINE
SECTION = INDENT + VARIABLE + INLINE_COMMENT + NEWLINE
SIMPLE = INDENT + VARIABLE + BLANK + VALUE + INLINE_COMMENT + NEWLINE
NULL = r"\b(null|Null|NULL|~)\b"
TRUE = r"\b(true|True|TRUE)\b"
FALSE = r"\b(false|False|FALSE)\b"
INT = r"[-+]?[0-9]+"
FLOAT = r"([-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?)"
STR = r"(?P<quotes>['\"]?).*(?P=quotes)"
Implement regex pattern for list values# -*- coding: utf-8 -*-
INDENT = r"(?P<indent>^ *)"
VARIABLE = r"(?P<variable>.+):"
VALUE = r"(?P<value>(?:(?P<q2>['\"]).*?(?P=q2))|[^#]+?)"
NEWLINE = r"$\n"
BLANK = r" +"
INLINE_COMMENT = r"(?: +#.*)?"
COMMENT = r"^ *#.*" + NEWLINE
BLANK_LINE = r"^[ \t]*" + NEWLINE
DASHES = r"^---" + NEWLINE
SECTION = INDENT + VARIABLE + INLINE_COMMENT + NEWLINE
SIMPLE = INDENT + VARIABLE + BLANK + VALUE + INLINE_COMMENT + NEWLINE
LISTITEM = BLANK + r"-" + BLANK + VALUE + INLINE_COMMENT + NEWLINE
LIST = SECTION + r"(?P<items>(?:(?P=indent)" + LISTITEM + r")+)"
NULL = r"\b(null|Null|NULL|~)\b"
TRUE = r"\b(true|True|TRUE)\b"
FALSE = r"\b(false|False|FALSE)\b"
INT = r"[-+]?[0-9]+"
FLOAT = r"([-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?)"
STR = r"(?P<quotes>['\"]?).*(?P=quotes)"
|
<commit_before># -*- coding: utf-8 -*-
INDENT = r"(?P<indent>^ *)"
VARIABLE = r"(?P<variable>.+):"
VALUE = r"(?P<value>(?:(?P<q2>['\"]).*?(?P=q2))|[^#]+?)"
NEWLINE = r"$\n"
BLANK = r" +"
INLINE_COMMENT = r"(?: +#.*)?"
COMMENT = r"^ *#.*" + NEWLINE
BLANK_LINE = r"^[ \t]*" + NEWLINE
DASHES = r"^---" + NEWLINE
SECTION = INDENT + VARIABLE + INLINE_COMMENT + NEWLINE
SIMPLE = INDENT + VARIABLE + BLANK + VALUE + INLINE_COMMENT + NEWLINE
NULL = r"\b(null|Null|NULL|~)\b"
TRUE = r"\b(true|True|TRUE)\b"
FALSE = r"\b(false|False|FALSE)\b"
INT = r"[-+]?[0-9]+"
FLOAT = r"([-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?)"
STR = r"(?P<quotes>['\"]?).*(?P=quotes)"
<commit_msg>Implement regex pattern for list values<commit_after># -*- coding: utf-8 -*-
INDENT = r"(?P<indent>^ *)"
VARIABLE = r"(?P<variable>.+):"
VALUE = r"(?P<value>(?:(?P<q2>['\"]).*?(?P=q2))|[^#]+?)"
NEWLINE = r"$\n"
BLANK = r" +"
INLINE_COMMENT = r"(?: +#.*)?"
COMMENT = r"^ *#.*" + NEWLINE
BLANK_LINE = r"^[ \t]*" + NEWLINE
DASHES = r"^---" + NEWLINE
SECTION = INDENT + VARIABLE + INLINE_COMMENT + NEWLINE
SIMPLE = INDENT + VARIABLE + BLANK + VALUE + INLINE_COMMENT + NEWLINE
LISTITEM = BLANK + r"-" + BLANK + VALUE + INLINE_COMMENT + NEWLINE
LIST = SECTION + r"(?P<items>(?:(?P=indent)" + LISTITEM + r")+)"
NULL = r"\b(null|Null|NULL|~)\b"
TRUE = r"\b(true|True|TRUE)\b"
FALSE = r"\b(false|False|FALSE)\b"
INT = r"[-+]?[0-9]+"
FLOAT = r"([-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?)"
STR = r"(?P<quotes>['\"]?).*(?P=quotes)"
|
a4efdb71c2c067af52d871711632eba0c06dc811
|
django_extensions/jobs/daily/daily_cleanup.py
|
django_extensions/jobs/daily/daily_cleanup.py
|
"""
Daily cleanup job.
Can be run as a cronjob to clean out old data from the database (only expired
sessions at the moment).
"""
from django_extensions.management.jobs import DailyJob
class Job(DailyJob):
help = "Django Daily Cleanup Job"
def execute(self):
from django.core import management
try:
management.call_command('clearsessions')
except management.CommandError:
management.call_command("cleanup") # Django <1.5
|
"""
Daily cleanup job.
Can be run as a cronjob to clean out old data from the database (only expired
sessions at the moment).
"""
from django_extensions.management.jobs import DailyJob
class Job(DailyJob):
help = "Django Daily Cleanup Job"
def execute(self):
from django.core import management
from django import VERSION
if VERSION[:2] < (1, 5):
management.call_command("cleanup")
else:
management.call_command("clearsessions")
|
Use Django's VERSION to determine which cleanup command to call
|
Use Django's VERSION to determine which cleanup command to call
|
Python
|
mit
|
marctc/django-extensions,jpadilla/django-extensions,frewsxcv/django-extensions,gvangool/django-extensions,nikolas/django-extensions,haakenlid/django-extensions,ctrl-alt-d/django-extensions,django-extensions/django-extensions,helenst/django-extensions,helenst/django-extensions,github-account-because-they-want-it/django-extensions,levic/django-extensions,atchariya/django-extensions,artscoop/django-extensions,haakenlid/django-extensions,joeyespo/django-extensions,lamby/django-extensions,django-extensions/django-extensions,maroux/django-extensions,JoseTomasTocino/django-extensions,mandx/django-extensions,bionikspoon/django-extensions,bionikspoon/django-extensions,ewjoachim/django-extensions,nikolas/django-extensions,mandx/django-extensions,helenst/django-extensions,levic/django-extensions,barseghyanartur/django-extensions,barseghyanartur/django-extensions,barseghyanartur/django-extensions,rodo/django-extensions,dpetzold/django-extensions,maroux/django-extensions,joeyespo/django-extensions,gvangool/django-extensions,frewsxcv/django-extensions,github-account-because-they-want-it/django-extensions,marctc/django-extensions,marctc/django-extensions,jpadilla/django-extensions,linuxmaniac/django-extensions,ctrl-alt-d/django-extensions,t1m0thy/django-extensions,bionikspoon/django-extensions,zefciu/django-extensions,VishvajitP/django-extensions,t1m0thy/django-extensions,nikolas/django-extensions,haakenlid/django-extensions,VishvajitP/django-extensions,django-extensions/django-extensions,ewjoachim/django-extensions,jpadilla/django-extensions,ctrl-alt-d/django-extensions,lamby/django-extensions,kevgathuku/django-extensions,atchariya/django-extensions,JoseTomasTocino/django-extensions,linuxmaniac/django-extensions,dpetzold/django-extensions,dpetzold/django-extensions,VishvajitP/django-extensions,Moulde/django-extensions,artscoop/django-extensions,zefciu/django-extensions,kevgathuku/django-extensions,joeyespo/django-extensions,zefciu/django-extensions,mandx/django-extensions,lamby/django-extensions,ewjoachim/django-extensions,levic/django-extensions,t1m0thy/django-extensions,frewsxcv/django-extensions,Moulde/django-extensions,Moulde/django-extensions,kevgathuku/django-extensions,github-account-because-they-want-it/django-extensions,linuxmaniac/django-extensions,rodo/django-extensions,artscoop/django-extensions,JoseTomasTocino/django-extensions,atchariya/django-extensions,rodo/django-extensions,gvangool/django-extensions,maroux/django-extensions
|
"""
Daily cleanup job.
Can be run as a cronjob to clean out old data from the database (only expired
sessions at the moment).
"""
from django_extensions.management.jobs import DailyJob
class Job(DailyJob):
help = "Django Daily Cleanup Job"
def execute(self):
from django.core import management
try:
management.call_command('clearsessions')
except management.CommandError:
management.call_command("cleanup") # Django <1.5
Use Django's VERSION to determine which cleanup command to call
|
"""
Daily cleanup job.
Can be run as a cronjob to clean out old data from the database (only expired
sessions at the moment).
"""
from django_extensions.management.jobs import DailyJob
class Job(DailyJob):
help = "Django Daily Cleanup Job"
def execute(self):
from django.core import management
from django import VERSION
if VERSION[:2] < (1, 5):
management.call_command("cleanup")
else:
management.call_command("clearsessions")
|
<commit_before>"""
Daily cleanup job.
Can be run as a cronjob to clean out old data from the database (only expired
sessions at the moment).
"""
from django_extensions.management.jobs import DailyJob
class Job(DailyJob):
help = "Django Daily Cleanup Job"
def execute(self):
from django.core import management
try:
management.call_command('clearsessions')
except management.CommandError:
management.call_command("cleanup") # Django <1.5
<commit_msg>Use Django's VERSION to determine which cleanup command to call<commit_after>
|
"""
Daily cleanup job.
Can be run as a cronjob to clean out old data from the database (only expired
sessions at the moment).
"""
from django_extensions.management.jobs import DailyJob
class Job(DailyJob):
help = "Django Daily Cleanup Job"
def execute(self):
from django.core import management
from django import VERSION
if VERSION[:2] < (1, 5):
management.call_command("cleanup")
else:
management.call_command("clearsessions")
|
"""
Daily cleanup job.
Can be run as a cronjob to clean out old data from the database (only expired
sessions at the moment).
"""
from django_extensions.management.jobs import DailyJob
class Job(DailyJob):
help = "Django Daily Cleanup Job"
def execute(self):
from django.core import management
try:
management.call_command('clearsessions')
except management.CommandError:
management.call_command("cleanup") # Django <1.5
Use Django's VERSION to determine which cleanup command to call"""
Daily cleanup job.
Can be run as a cronjob to clean out old data from the database (only expired
sessions at the moment).
"""
from django_extensions.management.jobs import DailyJob
class Job(DailyJob):
help = "Django Daily Cleanup Job"
def execute(self):
from django.core import management
from django import VERSION
if VERSION[:2] < (1, 5):
management.call_command("cleanup")
else:
management.call_command("clearsessions")
|
<commit_before>"""
Daily cleanup job.
Can be run as a cronjob to clean out old data from the database (only expired
sessions at the moment).
"""
from django_extensions.management.jobs import DailyJob
class Job(DailyJob):
help = "Django Daily Cleanup Job"
def execute(self):
from django.core import management
try:
management.call_command('clearsessions')
except management.CommandError:
management.call_command("cleanup") # Django <1.5
<commit_msg>Use Django's VERSION to determine which cleanup command to call<commit_after>"""
Daily cleanup job.
Can be run as a cronjob to clean out old data from the database (only expired
sessions at the moment).
"""
from django_extensions.management.jobs import DailyJob
class Job(DailyJob):
help = "Django Daily Cleanup Job"
def execute(self):
from django.core import management
from django import VERSION
if VERSION[:2] < (1, 5):
management.call_command("cleanup")
else:
management.call_command("clearsessions")
|
e820b807abb2b7b0b92bdaa4c07d3bcb49dbad3d
|
drivnal/remove_snapshot.py
|
drivnal/remove_snapshot.py
|
from constants import *
from exec_task import ExecTask
from event import Event
import os
import time
import shlex
import logging
import shutil
import subprocess
logger = logging.getLogger(APP_NAME)
class RemoveSnapshot(ExecTask):
type = REMOVE_SNAPSHOT
def _pre_aborted(self):
self.snapshot.set_state(FAILED)
def run(self, keep_log=False):
self.snapshot.set_state(REMOVING)
if not keep_log:
logger.debug('Removing snapshot log file. %r' % {
'volume_id': self.volume_id,
'snapshot_id': self.snapshot_id,
'task_id': self.id,
})
if os.path.isfile(self.snapshot.log_path):
os.remove(self.snapshot.log_path)
logger.debug('Removing snapshot directory. %r' % {
'volume_id': self.volume_id,
'snapshot_id': self.snapshot_id,
'task_id': self.id,
})
args = ['rm', '-rf', self.snapshot.path]
self._exec(args)
Event(volume_id=self.volume_id, type=SNAPSHOTS_UPDATED)
|
from constants import *
from exec_task import ExecTask
from event import Event
import os
import time
import shlex
import logging
import shutil
import subprocess
logger = logging.getLogger(APP_NAME)
class RemoveSnapshot(ExecTask):
type = REMOVE_SNAPSHOT
def _pre_aborted(self):
self.snapshot.set_state(FAILED)
def run(self, keep_log=False):
self.snapshot.set_state(REMOVING)
Event(volume_id=self.volume_id, type=SNAPSHOTS_UPDATED)
if not keep_log:
logger.debug('Removing snapshot log file. %r' % {
'volume_id': self.volume_id,
'snapshot_id': self.snapshot_id,
'task_id': self.id,
})
if os.path.isfile(self.snapshot.log_path):
os.remove(self.snapshot.log_path)
logger.debug('Removing snapshot directory. %r' % {
'volume_id': self.volume_id,
'snapshot_id': self.snapshot_id,
'task_id': self.id,
})
args = ['rm', '-rf', self.snapshot.path]
self._exec(args)
Event(volume_id=self.volume_id, type=SNAPSHOTS_UPDATED)
|
Add snapshots updated event to remove snapshot
|
Add snapshots updated event to remove snapshot
|
Python
|
agpl-3.0
|
drivnal/drivnal,drivnal/drivnal,drivnal/drivnal
|
from constants import *
from exec_task import ExecTask
from event import Event
import os
import time
import shlex
import logging
import shutil
import subprocess
logger = logging.getLogger(APP_NAME)
class RemoveSnapshot(ExecTask):
type = REMOVE_SNAPSHOT
def _pre_aborted(self):
self.snapshot.set_state(FAILED)
def run(self, keep_log=False):
self.snapshot.set_state(REMOVING)
if not keep_log:
logger.debug('Removing snapshot log file. %r' % {
'volume_id': self.volume_id,
'snapshot_id': self.snapshot_id,
'task_id': self.id,
})
if os.path.isfile(self.snapshot.log_path):
os.remove(self.snapshot.log_path)
logger.debug('Removing snapshot directory. %r' % {
'volume_id': self.volume_id,
'snapshot_id': self.snapshot_id,
'task_id': self.id,
})
args = ['rm', '-rf', self.snapshot.path]
self._exec(args)
Event(volume_id=self.volume_id, type=SNAPSHOTS_UPDATED)
Add snapshots updated event to remove snapshot
|
from constants import *
from exec_task import ExecTask
from event import Event
import os
import time
import shlex
import logging
import shutil
import subprocess
logger = logging.getLogger(APP_NAME)
class RemoveSnapshot(ExecTask):
type = REMOVE_SNAPSHOT
def _pre_aborted(self):
self.snapshot.set_state(FAILED)
def run(self, keep_log=False):
self.snapshot.set_state(REMOVING)
Event(volume_id=self.volume_id, type=SNAPSHOTS_UPDATED)
if not keep_log:
logger.debug('Removing snapshot log file. %r' % {
'volume_id': self.volume_id,
'snapshot_id': self.snapshot_id,
'task_id': self.id,
})
if os.path.isfile(self.snapshot.log_path):
os.remove(self.snapshot.log_path)
logger.debug('Removing snapshot directory. %r' % {
'volume_id': self.volume_id,
'snapshot_id': self.snapshot_id,
'task_id': self.id,
})
args = ['rm', '-rf', self.snapshot.path]
self._exec(args)
Event(volume_id=self.volume_id, type=SNAPSHOTS_UPDATED)
|
<commit_before>from constants import *
from exec_task import ExecTask
from event import Event
import os
import time
import shlex
import logging
import shutil
import subprocess
logger = logging.getLogger(APP_NAME)
class RemoveSnapshot(ExecTask):
type = REMOVE_SNAPSHOT
def _pre_aborted(self):
self.snapshot.set_state(FAILED)
def run(self, keep_log=False):
self.snapshot.set_state(REMOVING)
if not keep_log:
logger.debug('Removing snapshot log file. %r' % {
'volume_id': self.volume_id,
'snapshot_id': self.snapshot_id,
'task_id': self.id,
})
if os.path.isfile(self.snapshot.log_path):
os.remove(self.snapshot.log_path)
logger.debug('Removing snapshot directory. %r' % {
'volume_id': self.volume_id,
'snapshot_id': self.snapshot_id,
'task_id': self.id,
})
args = ['rm', '-rf', self.snapshot.path]
self._exec(args)
Event(volume_id=self.volume_id, type=SNAPSHOTS_UPDATED)
<commit_msg>Add snapshots updated event to remove snapshot<commit_after>
|
from constants import *
from exec_task import ExecTask
from event import Event
import os
import time
import shlex
import logging
import shutil
import subprocess
logger = logging.getLogger(APP_NAME)
class RemoveSnapshot(ExecTask):
type = REMOVE_SNAPSHOT
def _pre_aborted(self):
self.snapshot.set_state(FAILED)
def run(self, keep_log=False):
self.snapshot.set_state(REMOVING)
Event(volume_id=self.volume_id, type=SNAPSHOTS_UPDATED)
if not keep_log:
logger.debug('Removing snapshot log file. %r' % {
'volume_id': self.volume_id,
'snapshot_id': self.snapshot_id,
'task_id': self.id,
})
if os.path.isfile(self.snapshot.log_path):
os.remove(self.snapshot.log_path)
logger.debug('Removing snapshot directory. %r' % {
'volume_id': self.volume_id,
'snapshot_id': self.snapshot_id,
'task_id': self.id,
})
args = ['rm', '-rf', self.snapshot.path]
self._exec(args)
Event(volume_id=self.volume_id, type=SNAPSHOTS_UPDATED)
|
from constants import *
from exec_task import ExecTask
from event import Event
import os
import time
import shlex
import logging
import shutil
import subprocess
logger = logging.getLogger(APP_NAME)
class RemoveSnapshot(ExecTask):
type = REMOVE_SNAPSHOT
def _pre_aborted(self):
self.snapshot.set_state(FAILED)
def run(self, keep_log=False):
self.snapshot.set_state(REMOVING)
if not keep_log:
logger.debug('Removing snapshot log file. %r' % {
'volume_id': self.volume_id,
'snapshot_id': self.snapshot_id,
'task_id': self.id,
})
if os.path.isfile(self.snapshot.log_path):
os.remove(self.snapshot.log_path)
logger.debug('Removing snapshot directory. %r' % {
'volume_id': self.volume_id,
'snapshot_id': self.snapshot_id,
'task_id': self.id,
})
args = ['rm', '-rf', self.snapshot.path]
self._exec(args)
Event(volume_id=self.volume_id, type=SNAPSHOTS_UPDATED)
Add snapshots updated event to remove snapshotfrom constants import *
from exec_task import ExecTask
from event import Event
import os
import time
import shlex
import logging
import shutil
import subprocess
logger = logging.getLogger(APP_NAME)
class RemoveSnapshot(ExecTask):
type = REMOVE_SNAPSHOT
def _pre_aborted(self):
self.snapshot.set_state(FAILED)
def run(self, keep_log=False):
self.snapshot.set_state(REMOVING)
Event(volume_id=self.volume_id, type=SNAPSHOTS_UPDATED)
if not keep_log:
logger.debug('Removing snapshot log file. %r' % {
'volume_id': self.volume_id,
'snapshot_id': self.snapshot_id,
'task_id': self.id,
})
if os.path.isfile(self.snapshot.log_path):
os.remove(self.snapshot.log_path)
logger.debug('Removing snapshot directory. %r' % {
'volume_id': self.volume_id,
'snapshot_id': self.snapshot_id,
'task_id': self.id,
})
args = ['rm', '-rf', self.snapshot.path]
self._exec(args)
Event(volume_id=self.volume_id, type=SNAPSHOTS_UPDATED)
|
<commit_before>from constants import *
from exec_task import ExecTask
from event import Event
import os
import time
import shlex
import logging
import shutil
import subprocess
logger = logging.getLogger(APP_NAME)
class RemoveSnapshot(ExecTask):
type = REMOVE_SNAPSHOT
def _pre_aborted(self):
self.snapshot.set_state(FAILED)
def run(self, keep_log=False):
self.snapshot.set_state(REMOVING)
if not keep_log:
logger.debug('Removing snapshot log file. %r' % {
'volume_id': self.volume_id,
'snapshot_id': self.snapshot_id,
'task_id': self.id,
})
if os.path.isfile(self.snapshot.log_path):
os.remove(self.snapshot.log_path)
logger.debug('Removing snapshot directory. %r' % {
'volume_id': self.volume_id,
'snapshot_id': self.snapshot_id,
'task_id': self.id,
})
args = ['rm', '-rf', self.snapshot.path]
self._exec(args)
Event(volume_id=self.volume_id, type=SNAPSHOTS_UPDATED)
<commit_msg>Add snapshots updated event to remove snapshot<commit_after>from constants import *
from exec_task import ExecTask
from event import Event
import os
import time
import shlex
import logging
import shutil
import subprocess
logger = logging.getLogger(APP_NAME)
class RemoveSnapshot(ExecTask):
type = REMOVE_SNAPSHOT
def _pre_aborted(self):
self.snapshot.set_state(FAILED)
def run(self, keep_log=False):
self.snapshot.set_state(REMOVING)
Event(volume_id=self.volume_id, type=SNAPSHOTS_UPDATED)
if not keep_log:
logger.debug('Removing snapshot log file. %r' % {
'volume_id': self.volume_id,
'snapshot_id': self.snapshot_id,
'task_id': self.id,
})
if os.path.isfile(self.snapshot.log_path):
os.remove(self.snapshot.log_path)
logger.debug('Removing snapshot directory. %r' % {
'volume_id': self.volume_id,
'snapshot_id': self.snapshot_id,
'task_id': self.id,
})
args = ['rm', '-rf', self.snapshot.path]
self._exec(args)
Event(volume_id=self.volume_id, type=SNAPSHOTS_UPDATED)
|
db1ded6aa53b41f8b6e90fb45236560d492eed47
|
addie/utilities/__init__.py
|
addie/utilities/__init__.py
|
import os
from qtpy.uic import loadUi
def load_ui(ui_filename, baseinstance):
cwd = os.getcwd()
ui_filename = os.path.split(ui_filename)[-1]
# get the location of the designer directory
# this function assumes that all ui files are there
filename = os.path.join(cwd, 'designer', ui_filename)
return loadUi(filename, baseinstance=baseinstance)
|
import os
from qtpy.uic import loadUi
import addie
addie_path = os.path.dirname(os.path.abspath(addie.__file__))
designer_path = os.path.join(addie_path, '../designer')
def load_ui(ui_filename, baseinstance):
ui_filename = os.path.split(ui_filename)[-1]
# get the location of the designer directory
# this function assumes that all ui files are there
filename = os.path.join(designer_path, ui_filename)
return loadUi(filename, baseinstance=baseinstance)
|
Fix ui designer dir location
|
Fix ui designer dir location
|
Python
|
mit
|
neutrons/FastGR,neutrons/FastGR,neutrons/FastGR
|
import os
from qtpy.uic import loadUi
def load_ui(ui_filename, baseinstance):
cwd = os.getcwd()
ui_filename = os.path.split(ui_filename)[-1]
# get the location of the designer directory
# this function assumes that all ui files are there
filename = os.path.join(cwd, 'designer', ui_filename)
return loadUi(filename, baseinstance=baseinstance)
Fix ui designer dir location
|
import os
from qtpy.uic import loadUi
import addie
addie_path = os.path.dirname(os.path.abspath(addie.__file__))
designer_path = os.path.join(addie_path, '../designer')
def load_ui(ui_filename, baseinstance):
ui_filename = os.path.split(ui_filename)[-1]
# get the location of the designer directory
# this function assumes that all ui files are there
filename = os.path.join(designer_path, ui_filename)
return loadUi(filename, baseinstance=baseinstance)
|
<commit_before>import os
from qtpy.uic import loadUi
def load_ui(ui_filename, baseinstance):
cwd = os.getcwd()
ui_filename = os.path.split(ui_filename)[-1]
# get the location of the designer directory
# this function assumes that all ui files are there
filename = os.path.join(cwd, 'designer', ui_filename)
return loadUi(filename, baseinstance=baseinstance)
<commit_msg>Fix ui designer dir location<commit_after>
|
import os
from qtpy.uic import loadUi
import addie
addie_path = os.path.dirname(os.path.abspath(addie.__file__))
designer_path = os.path.join(addie_path, '../designer')
def load_ui(ui_filename, baseinstance):
ui_filename = os.path.split(ui_filename)[-1]
# get the location of the designer directory
# this function assumes that all ui files are there
filename = os.path.join(designer_path, ui_filename)
return loadUi(filename, baseinstance=baseinstance)
|
import os
from qtpy.uic import loadUi
def load_ui(ui_filename, baseinstance):
cwd = os.getcwd()
ui_filename = os.path.split(ui_filename)[-1]
# get the location of the designer directory
# this function assumes that all ui files are there
filename = os.path.join(cwd, 'designer', ui_filename)
return loadUi(filename, baseinstance=baseinstance)
Fix ui designer dir locationimport os
from qtpy.uic import loadUi
import addie
addie_path = os.path.dirname(os.path.abspath(addie.__file__))
designer_path = os.path.join(addie_path, '../designer')
def load_ui(ui_filename, baseinstance):
ui_filename = os.path.split(ui_filename)[-1]
# get the location of the designer directory
# this function assumes that all ui files are there
filename = os.path.join(designer_path, ui_filename)
return loadUi(filename, baseinstance=baseinstance)
|
<commit_before>import os
from qtpy.uic import loadUi
def load_ui(ui_filename, baseinstance):
cwd = os.getcwd()
ui_filename = os.path.split(ui_filename)[-1]
# get the location of the designer directory
# this function assumes that all ui files are there
filename = os.path.join(cwd, 'designer', ui_filename)
return loadUi(filename, baseinstance=baseinstance)
<commit_msg>Fix ui designer dir location<commit_after>import os
from qtpy.uic import loadUi
import addie
addie_path = os.path.dirname(os.path.abspath(addie.__file__))
designer_path = os.path.join(addie_path, '../designer')
def load_ui(ui_filename, baseinstance):
ui_filename = os.path.split(ui_filename)[-1]
# get the location of the designer directory
# this function assumes that all ui files are there
filename = os.path.join(designer_path, ui_filename)
return loadUi(filename, baseinstance=baseinstance)
|
4d970593c3dd068ee36a4ee3194c1d65f6a34d26
|
Heap/HeapSort.py
|
Heap/HeapSort.py
|
class HeapSort:
@classmethod
def heapsort_support(cls, heap):
# create support array
ordered_array = []
# extract min until heap is empty
while not heap.is_empty():
min_node = heap.delete_min()
if min_node is None:
break
else:
ordered_array.insert(0, min_node)
return ordered_array
|
class HeapSort:
@classmethod
def heapsort_support(cls, heap):
# create support array
ordered_array = []
# extract min until heap is empty
while not heap.is_empty():
min_node = heap.delete_min()
if min_node is None:
break
else:
ordered_array.append(min_node)
return ordered_array
|
Change order in ordered array
|
Change order in ordered array
|
Python
|
mit
|
andreweb/HeapHeap,andreeweb/HeapHeap
|
class HeapSort:
@classmethod
def heapsort_support(cls, heap):
# create support array
ordered_array = []
# extract min until heap is empty
while not heap.is_empty():
min_node = heap.delete_min()
if min_node is None:
break
else:
ordered_array.insert(0, min_node)
return ordered_array
Change order in ordered array
|
class HeapSort:
@classmethod
def heapsort_support(cls, heap):
# create support array
ordered_array = []
# extract min until heap is empty
while not heap.is_empty():
min_node = heap.delete_min()
if min_node is None:
break
else:
ordered_array.append(min_node)
return ordered_array
|
<commit_before>
class HeapSort:
@classmethod
def heapsort_support(cls, heap):
# create support array
ordered_array = []
# extract min until heap is empty
while not heap.is_empty():
min_node = heap.delete_min()
if min_node is None:
break
else:
ordered_array.insert(0, min_node)
return ordered_array
<commit_msg>Change order in ordered array<commit_after>
|
class HeapSort:
@classmethod
def heapsort_support(cls, heap):
# create support array
ordered_array = []
# extract min until heap is empty
while not heap.is_empty():
min_node = heap.delete_min()
if min_node is None:
break
else:
ordered_array.append(min_node)
return ordered_array
|
class HeapSort:
@classmethod
def heapsort_support(cls, heap):
# create support array
ordered_array = []
# extract min until heap is empty
while not heap.is_empty():
min_node = heap.delete_min()
if min_node is None:
break
else:
ordered_array.insert(0, min_node)
return ordered_array
Change order in ordered array
class HeapSort:
@classmethod
def heapsort_support(cls, heap):
# create support array
ordered_array = []
# extract min until heap is empty
while not heap.is_empty():
min_node = heap.delete_min()
if min_node is None:
break
else:
ordered_array.append(min_node)
return ordered_array
|
<commit_before>
class HeapSort:
@classmethod
def heapsort_support(cls, heap):
# create support array
ordered_array = []
# extract min until heap is empty
while not heap.is_empty():
min_node = heap.delete_min()
if min_node is None:
break
else:
ordered_array.insert(0, min_node)
return ordered_array
<commit_msg>Change order in ordered array<commit_after>
class HeapSort:
@classmethod
def heapsort_support(cls, heap):
# create support array
ordered_array = []
# extract min until heap is empty
while not heap.is_empty():
min_node = heap.delete_min()
if min_node is None:
break
else:
ordered_array.append(min_node)
return ordered_array
|
713da17448f7d6c23c8527c737b9c9c03dea5d80
|
adhocracy4/emails/mixins.py
|
adhocracy4/emails/mixins.py
|
from email.mime.image import MIMEImage
from django.contrib.staticfiles import finders
from .base import EmailBase
class PlatformEmailMixin:
"""
Attaches the static file images/logo.png so it can be used in an html
email.
"""
def get_attachments(self):
attachments = super().get_attachments()
filename = (
finders.find('images/email_logo.png')
or finders.find('images/email_logo.svg')
)
if filename:
f = open(filename, 'rb')
logo = MIMEImage(f.read())
logo.add_header('Content-ID', '<{}>'.format('logo'))
return attachments + [logo]
return attachments
class SyncEmailMixin(EmailBase):
"""Send Emails synchronously."""
@classmethod
def send(cls, object, *args, **kwargs):
"""Call dispatch immediately"""
return cls().dispatch(object, *args, **kwargs)
|
from email.mime.image import MIMEImage
from django.contrib.staticfiles import finders
from .base import EmailBase
class PlatformEmailMixin:
"""
Attaches the static file images/logo.png so it can be used in an html
email.
"""
def get_attachments(self):
attachments = super().get_attachments()
filename = (
finders.find('images/email_logo.png')
or finders.find('images/email_logo.svg')
)
if filename:
with open(filename, 'rb') as f:
logo = MIMEImage(f.read())
logo.add_header('Content-ID', '<{}>'.format('logo'))
return attachments + [logo]
return attachments
class SyncEmailMixin(EmailBase):
"""Send Emails synchronously."""
@classmethod
def send(cls, object, *args, **kwargs):
"""Call dispatch immediately"""
return cls().dispatch(object, *args, **kwargs)
|
Fix missing close for email logo file handle
|
Fix missing close for email logo file handle
|
Python
|
agpl-3.0
|
liqd/adhocracy4,liqd/adhocracy4,liqd/adhocracy4,liqd/adhocracy4
|
from email.mime.image import MIMEImage
from django.contrib.staticfiles import finders
from .base import EmailBase
class PlatformEmailMixin:
"""
Attaches the static file images/logo.png so it can be used in an html
email.
"""
def get_attachments(self):
attachments = super().get_attachments()
filename = (
finders.find('images/email_logo.png')
or finders.find('images/email_logo.svg')
)
if filename:
f = open(filename, 'rb')
logo = MIMEImage(f.read())
logo.add_header('Content-ID', '<{}>'.format('logo'))
return attachments + [logo]
return attachments
class SyncEmailMixin(EmailBase):
"""Send Emails synchronously."""
@classmethod
def send(cls, object, *args, **kwargs):
"""Call dispatch immediately"""
return cls().dispatch(object, *args, **kwargs)
Fix missing close for email logo file handle
|
from email.mime.image import MIMEImage
from django.contrib.staticfiles import finders
from .base import EmailBase
class PlatformEmailMixin:
"""
Attaches the static file images/logo.png so it can be used in an html
email.
"""
def get_attachments(self):
attachments = super().get_attachments()
filename = (
finders.find('images/email_logo.png')
or finders.find('images/email_logo.svg')
)
if filename:
with open(filename, 'rb') as f:
logo = MIMEImage(f.read())
logo.add_header('Content-ID', '<{}>'.format('logo'))
return attachments + [logo]
return attachments
class SyncEmailMixin(EmailBase):
"""Send Emails synchronously."""
@classmethod
def send(cls, object, *args, **kwargs):
"""Call dispatch immediately"""
return cls().dispatch(object, *args, **kwargs)
|
<commit_before>from email.mime.image import MIMEImage
from django.contrib.staticfiles import finders
from .base import EmailBase
class PlatformEmailMixin:
"""
Attaches the static file images/logo.png so it can be used in an html
email.
"""
def get_attachments(self):
attachments = super().get_attachments()
filename = (
finders.find('images/email_logo.png')
or finders.find('images/email_logo.svg')
)
if filename:
f = open(filename, 'rb')
logo = MIMEImage(f.read())
logo.add_header('Content-ID', '<{}>'.format('logo'))
return attachments + [logo]
return attachments
class SyncEmailMixin(EmailBase):
"""Send Emails synchronously."""
@classmethod
def send(cls, object, *args, **kwargs):
"""Call dispatch immediately"""
return cls().dispatch(object, *args, **kwargs)
<commit_msg>Fix missing close for email logo file handle<commit_after>
|
from email.mime.image import MIMEImage
from django.contrib.staticfiles import finders
from .base import EmailBase
class PlatformEmailMixin:
"""
Attaches the static file images/logo.png so it can be used in an html
email.
"""
def get_attachments(self):
attachments = super().get_attachments()
filename = (
finders.find('images/email_logo.png')
or finders.find('images/email_logo.svg')
)
if filename:
with open(filename, 'rb') as f:
logo = MIMEImage(f.read())
logo.add_header('Content-ID', '<{}>'.format('logo'))
return attachments + [logo]
return attachments
class SyncEmailMixin(EmailBase):
"""Send Emails synchronously."""
@classmethod
def send(cls, object, *args, **kwargs):
"""Call dispatch immediately"""
return cls().dispatch(object, *args, **kwargs)
|
from email.mime.image import MIMEImage
from django.contrib.staticfiles import finders
from .base import EmailBase
class PlatformEmailMixin:
"""
Attaches the static file images/logo.png so it can be used in an html
email.
"""
def get_attachments(self):
attachments = super().get_attachments()
filename = (
finders.find('images/email_logo.png')
or finders.find('images/email_logo.svg')
)
if filename:
f = open(filename, 'rb')
logo = MIMEImage(f.read())
logo.add_header('Content-ID', '<{}>'.format('logo'))
return attachments + [logo]
return attachments
class SyncEmailMixin(EmailBase):
"""Send Emails synchronously."""
@classmethod
def send(cls, object, *args, **kwargs):
"""Call dispatch immediately"""
return cls().dispatch(object, *args, **kwargs)
Fix missing close for email logo file handlefrom email.mime.image import MIMEImage
from django.contrib.staticfiles import finders
from .base import EmailBase
class PlatformEmailMixin:
"""
Attaches the static file images/logo.png so it can be used in an html
email.
"""
def get_attachments(self):
attachments = super().get_attachments()
filename = (
finders.find('images/email_logo.png')
or finders.find('images/email_logo.svg')
)
if filename:
with open(filename, 'rb') as f:
logo = MIMEImage(f.read())
logo.add_header('Content-ID', '<{}>'.format('logo'))
return attachments + [logo]
return attachments
class SyncEmailMixin(EmailBase):
"""Send Emails synchronously."""
@classmethod
def send(cls, object, *args, **kwargs):
"""Call dispatch immediately"""
return cls().dispatch(object, *args, **kwargs)
|
<commit_before>from email.mime.image import MIMEImage
from django.contrib.staticfiles import finders
from .base import EmailBase
class PlatformEmailMixin:
"""
Attaches the static file images/logo.png so it can be used in an html
email.
"""
def get_attachments(self):
attachments = super().get_attachments()
filename = (
finders.find('images/email_logo.png')
or finders.find('images/email_logo.svg')
)
if filename:
f = open(filename, 'rb')
logo = MIMEImage(f.read())
logo.add_header('Content-ID', '<{}>'.format('logo'))
return attachments + [logo]
return attachments
class SyncEmailMixin(EmailBase):
"""Send Emails synchronously."""
@classmethod
def send(cls, object, *args, **kwargs):
"""Call dispatch immediately"""
return cls().dispatch(object, *args, **kwargs)
<commit_msg>Fix missing close for email logo file handle<commit_after>from email.mime.image import MIMEImage
from django.contrib.staticfiles import finders
from .base import EmailBase
class PlatformEmailMixin:
"""
Attaches the static file images/logo.png so it can be used in an html
email.
"""
def get_attachments(self):
attachments = super().get_attachments()
filename = (
finders.find('images/email_logo.png')
or finders.find('images/email_logo.svg')
)
if filename:
with open(filename, 'rb') as f:
logo = MIMEImage(f.read())
logo.add_header('Content-ID', '<{}>'.format('logo'))
return attachments + [logo]
return attachments
class SyncEmailMixin(EmailBase):
"""Send Emails synchronously."""
@classmethod
def send(cls, object, *args, **kwargs):
"""Call dispatch immediately"""
return cls().dispatch(object, *args, **kwargs)
|
bf853b3e215f1a9018007cb6efc5cc027c447a33
|
examples/books_collection/collection/forms.py
|
examples/books_collection/collection/forms.py
|
from flaskext import wtf
from collection.documents import Book
class BookForm(wtf.Form):
document_class = Book
title = wtf.TextField(validators=[wtf.Required()])
year = wtf.IntegerField(validators=[wtf.Required()])
_instance = None
def __init__(self, document=None, *args, **kwargs):
super(BookForm, self).__init__(*args, **kwargs)
if document is not None:
self._instance = document
self._copy_data_to_form()
def _copy_data_to_form(self):
self.title.data = self._instance.title
self.year.data = self._instance.year
def save(self):
if self._instance is None:
self._instance = self.document_class()
self._instance.title = self.title.data
self._instance.year = self.year.data
self._instance.save()
return self._instance
|
from flaskext import wtf
from collection.documents import Book
class BookForm(wtf.Form):
document_class = Book
title = wtf.TextField(validators=[wtf.Required()])
year = wtf.IntegerField(validators=[wtf.Required()])
instance = None
def __init__(self, document=None, *args, **kwargs):
super(BookForm, self).__init__(*args, **kwargs)
if document is not None:
self.instance = document
self._copy_data_to_form()
def _copy_data_to_form(self):
self.title.data = self.instance.title
self.year.data = self.instance.year
def save(self):
if self.instance is None:
self.instance = self.document_class()
self.instance.title = self.title.data
self.instance.year = self.year.data
self.instance.save()
return self.instance
|
Document instance made acessible on form
|
Document instance made acessible on form
|
Python
|
bsd-2-clause
|
cobrateam/flask-mongoalchemy
|
from flaskext import wtf
from collection.documents import Book
class BookForm(wtf.Form):
document_class = Book
title = wtf.TextField(validators=[wtf.Required()])
year = wtf.IntegerField(validators=[wtf.Required()])
_instance = None
def __init__(self, document=None, *args, **kwargs):
super(BookForm, self).__init__(*args, **kwargs)
if document is not None:
self._instance = document
self._copy_data_to_form()
def _copy_data_to_form(self):
self.title.data = self._instance.title
self.year.data = self._instance.year
def save(self):
if self._instance is None:
self._instance = self.document_class()
self._instance.title = self.title.data
self._instance.year = self.year.data
self._instance.save()
return self._instance
Document instance made acessible on form
|
from flaskext import wtf
from collection.documents import Book
class BookForm(wtf.Form):
document_class = Book
title = wtf.TextField(validators=[wtf.Required()])
year = wtf.IntegerField(validators=[wtf.Required()])
instance = None
def __init__(self, document=None, *args, **kwargs):
super(BookForm, self).__init__(*args, **kwargs)
if document is not None:
self.instance = document
self._copy_data_to_form()
def _copy_data_to_form(self):
self.title.data = self.instance.title
self.year.data = self.instance.year
def save(self):
if self.instance is None:
self.instance = self.document_class()
self.instance.title = self.title.data
self.instance.year = self.year.data
self.instance.save()
return self.instance
|
<commit_before>from flaskext import wtf
from collection.documents import Book
class BookForm(wtf.Form):
document_class = Book
title = wtf.TextField(validators=[wtf.Required()])
year = wtf.IntegerField(validators=[wtf.Required()])
_instance = None
def __init__(self, document=None, *args, **kwargs):
super(BookForm, self).__init__(*args, **kwargs)
if document is not None:
self._instance = document
self._copy_data_to_form()
def _copy_data_to_form(self):
self.title.data = self._instance.title
self.year.data = self._instance.year
def save(self):
if self._instance is None:
self._instance = self.document_class()
self._instance.title = self.title.data
self._instance.year = self.year.data
self._instance.save()
return self._instance
<commit_msg>Document instance made acessible on form<commit_after>
|
from flaskext import wtf
from collection.documents import Book
class BookForm(wtf.Form):
document_class = Book
title = wtf.TextField(validators=[wtf.Required()])
year = wtf.IntegerField(validators=[wtf.Required()])
instance = None
def __init__(self, document=None, *args, **kwargs):
super(BookForm, self).__init__(*args, **kwargs)
if document is not None:
self.instance = document
self._copy_data_to_form()
def _copy_data_to_form(self):
self.title.data = self.instance.title
self.year.data = self.instance.year
def save(self):
if self.instance is None:
self.instance = self.document_class()
self.instance.title = self.title.data
self.instance.year = self.year.data
self.instance.save()
return self.instance
|
from flaskext import wtf
from collection.documents import Book
class BookForm(wtf.Form):
document_class = Book
title = wtf.TextField(validators=[wtf.Required()])
year = wtf.IntegerField(validators=[wtf.Required()])
_instance = None
def __init__(self, document=None, *args, **kwargs):
super(BookForm, self).__init__(*args, **kwargs)
if document is not None:
self._instance = document
self._copy_data_to_form()
def _copy_data_to_form(self):
self.title.data = self._instance.title
self.year.data = self._instance.year
def save(self):
if self._instance is None:
self._instance = self.document_class()
self._instance.title = self.title.data
self._instance.year = self.year.data
self._instance.save()
return self._instance
Document instance made acessible on formfrom flaskext import wtf
from collection.documents import Book
class BookForm(wtf.Form):
document_class = Book
title = wtf.TextField(validators=[wtf.Required()])
year = wtf.IntegerField(validators=[wtf.Required()])
instance = None
def __init__(self, document=None, *args, **kwargs):
super(BookForm, self).__init__(*args, **kwargs)
if document is not None:
self.instance = document
self._copy_data_to_form()
def _copy_data_to_form(self):
self.title.data = self.instance.title
self.year.data = self.instance.year
def save(self):
if self.instance is None:
self.instance = self.document_class()
self.instance.title = self.title.data
self.instance.year = self.year.data
self.instance.save()
return self.instance
|
<commit_before>from flaskext import wtf
from collection.documents import Book
class BookForm(wtf.Form):
document_class = Book
title = wtf.TextField(validators=[wtf.Required()])
year = wtf.IntegerField(validators=[wtf.Required()])
_instance = None
def __init__(self, document=None, *args, **kwargs):
super(BookForm, self).__init__(*args, **kwargs)
if document is not None:
self._instance = document
self._copy_data_to_form()
def _copy_data_to_form(self):
self.title.data = self._instance.title
self.year.data = self._instance.year
def save(self):
if self._instance is None:
self._instance = self.document_class()
self._instance.title = self.title.data
self._instance.year = self.year.data
self._instance.save()
return self._instance
<commit_msg>Document instance made acessible on form<commit_after>from flaskext import wtf
from collection.documents import Book
class BookForm(wtf.Form):
document_class = Book
title = wtf.TextField(validators=[wtf.Required()])
year = wtf.IntegerField(validators=[wtf.Required()])
instance = None
def __init__(self, document=None, *args, **kwargs):
super(BookForm, self).__init__(*args, **kwargs)
if document is not None:
self.instance = document
self._copy_data_to_form()
def _copy_data_to_form(self):
self.title.data = self.instance.title
self.year.data = self.instance.year
def save(self):
if self.instance is None:
self.instance = self.document_class()
self.instance.title = self.title.data
self.instance.year = self.year.data
self.instance.save()
return self.instance
|
7da06023dd5d49314d255d36c9b74ac0a1ed4b6c
|
example/app/tests/managers.py
|
example/app/tests/managers.py
|
from django.core import management
from django.test import TestCase
from tx_salaries.factories import (OrganizationFactory, PostFactory,
MembershipFactory, EmployeeFactory)
class EvenEmployeeBreakdownMedianTest(TestCase):
def test_update_cohort(self):
parent_org = OrganizationFactory(name="Test Parent Organization")
department = OrganizationFactory(name="Test Organization",
parent=parent_org)
post = PostFactory(organization=department)
# POST MUST HAVE UNICODE VALUE
membership_one = MembershipFactory(post=post, organization=department,
person__gender='F')
membership_two = MembershipFactory(post=post, organization=department,
person__gender='F')
# create two employees
employee_one = EmployeeFactory(compensation=135000,
position=membership_one)
employee_two = EmployeeFactory(compensation=62217,
position=membership_two)
management.call_command('denormalize_salary_data')
# assert median salary of the organization is 98608.5
self.assertEqual(
float(department.stats.female['median_paid']), 98608.5)
# and the total number of female employees is 2
self.assertEqual(department.stats.female['total_number'], 2)
|
Test female breakdown median of organization with 2 employees
|
Test female breakdown median of organization with 2 employees
|
Python
|
apache-2.0
|
texastribune/tx_salaries,texastribune/tx_salaries
|
Test female breakdown median of organization with 2 employees
|
from django.core import management
from django.test import TestCase
from tx_salaries.factories import (OrganizationFactory, PostFactory,
MembershipFactory, EmployeeFactory)
class EvenEmployeeBreakdownMedianTest(TestCase):
def test_update_cohort(self):
parent_org = OrganizationFactory(name="Test Parent Organization")
department = OrganizationFactory(name="Test Organization",
parent=parent_org)
post = PostFactory(organization=department)
# POST MUST HAVE UNICODE VALUE
membership_one = MembershipFactory(post=post, organization=department,
person__gender='F')
membership_two = MembershipFactory(post=post, organization=department,
person__gender='F')
# create two employees
employee_one = EmployeeFactory(compensation=135000,
position=membership_one)
employee_two = EmployeeFactory(compensation=62217,
position=membership_two)
management.call_command('denormalize_salary_data')
# assert median salary of the organization is 98608.5
self.assertEqual(
float(department.stats.female['median_paid']), 98608.5)
# and the total number of female employees is 2
self.assertEqual(department.stats.female['total_number'], 2)
|
<commit_before><commit_msg>Test female breakdown median of organization with 2 employees<commit_after>
|
from django.core import management
from django.test import TestCase
from tx_salaries.factories import (OrganizationFactory, PostFactory,
MembershipFactory, EmployeeFactory)
class EvenEmployeeBreakdownMedianTest(TestCase):
def test_update_cohort(self):
parent_org = OrganizationFactory(name="Test Parent Organization")
department = OrganizationFactory(name="Test Organization",
parent=parent_org)
post = PostFactory(organization=department)
# POST MUST HAVE UNICODE VALUE
membership_one = MembershipFactory(post=post, organization=department,
person__gender='F')
membership_two = MembershipFactory(post=post, organization=department,
person__gender='F')
# create two employees
employee_one = EmployeeFactory(compensation=135000,
position=membership_one)
employee_two = EmployeeFactory(compensation=62217,
position=membership_two)
management.call_command('denormalize_salary_data')
# assert median salary of the organization is 98608.5
self.assertEqual(
float(department.stats.female['median_paid']), 98608.5)
# and the total number of female employees is 2
self.assertEqual(department.stats.female['total_number'], 2)
|
Test female breakdown median of organization with 2 employeesfrom django.core import management
from django.test import TestCase
from tx_salaries.factories import (OrganizationFactory, PostFactory,
MembershipFactory, EmployeeFactory)
class EvenEmployeeBreakdownMedianTest(TestCase):
def test_update_cohort(self):
parent_org = OrganizationFactory(name="Test Parent Organization")
department = OrganizationFactory(name="Test Organization",
parent=parent_org)
post = PostFactory(organization=department)
# POST MUST HAVE UNICODE VALUE
membership_one = MembershipFactory(post=post, organization=department,
person__gender='F')
membership_two = MembershipFactory(post=post, organization=department,
person__gender='F')
# create two employees
employee_one = EmployeeFactory(compensation=135000,
position=membership_one)
employee_two = EmployeeFactory(compensation=62217,
position=membership_two)
management.call_command('denormalize_salary_data')
# assert median salary of the organization is 98608.5
self.assertEqual(
float(department.stats.female['median_paid']), 98608.5)
# and the total number of female employees is 2
self.assertEqual(department.stats.female['total_number'], 2)
|
<commit_before><commit_msg>Test female breakdown median of organization with 2 employees<commit_after>from django.core import management
from django.test import TestCase
from tx_salaries.factories import (OrganizationFactory, PostFactory,
MembershipFactory, EmployeeFactory)
class EvenEmployeeBreakdownMedianTest(TestCase):
def test_update_cohort(self):
parent_org = OrganizationFactory(name="Test Parent Organization")
department = OrganizationFactory(name="Test Organization",
parent=parent_org)
post = PostFactory(organization=department)
# POST MUST HAVE UNICODE VALUE
membership_one = MembershipFactory(post=post, organization=department,
person__gender='F')
membership_two = MembershipFactory(post=post, organization=department,
person__gender='F')
# create two employees
employee_one = EmployeeFactory(compensation=135000,
position=membership_one)
employee_two = EmployeeFactory(compensation=62217,
position=membership_two)
management.call_command('denormalize_salary_data')
# assert median salary of the organization is 98608.5
self.assertEqual(
float(department.stats.female['median_paid']), 98608.5)
# and the total number of female employees is 2
self.assertEqual(department.stats.female['total_number'], 2)
|
|
9608e32ded51ce87e890fd880044f252c6574ea5
|
examples/aiohttp_server.py
|
examples/aiohttp_server.py
|
from aiohttp import web
from jsonrpcserver.aio import methods
@methods.add
async def ping():
return 'pong'
async def handle(request):
request = await request.text()
response = await methods.dispatch(request)
if response.is_notification:
return web.Response()
else:
return web.json_response(response)
app = web.Application()
app.router.add_post('/', handle)
if __name__ == '__main__':
web.run_app(app, port=5000)
|
from aiohttp import web
from jsonrpcserver.aio import methods
@methods.add
async def ping():
return 'pong'
async def handle(request):
request = await request.text()
response = await methods.dispatch(request)
if response.is_notification:
return web.Response()
else:
return web.json_response(response, status=response.http_status)
app = web.Application()
app.router.add_post('/', handle)
if __name__ == '__main__':
web.run_app(app, port=5000)
|
Return http status in aiohttp example
|
Return http status in aiohttp example
|
Python
|
mit
|
bcb/jsonrpcserver
|
from aiohttp import web
from jsonrpcserver.aio import methods
@methods.add
async def ping():
return 'pong'
async def handle(request):
request = await request.text()
response = await methods.dispatch(request)
if response.is_notification:
return web.Response()
else:
return web.json_response(response)
app = web.Application()
app.router.add_post('/', handle)
if __name__ == '__main__':
web.run_app(app, port=5000)
Return http status in aiohttp example
|
from aiohttp import web
from jsonrpcserver.aio import methods
@methods.add
async def ping():
return 'pong'
async def handle(request):
request = await request.text()
response = await methods.dispatch(request)
if response.is_notification:
return web.Response()
else:
return web.json_response(response, status=response.http_status)
app = web.Application()
app.router.add_post('/', handle)
if __name__ == '__main__':
web.run_app(app, port=5000)
|
<commit_before>from aiohttp import web
from jsonrpcserver.aio import methods
@methods.add
async def ping():
return 'pong'
async def handle(request):
request = await request.text()
response = await methods.dispatch(request)
if response.is_notification:
return web.Response()
else:
return web.json_response(response)
app = web.Application()
app.router.add_post('/', handle)
if __name__ == '__main__':
web.run_app(app, port=5000)
<commit_msg>Return http status in aiohttp example<commit_after>
|
from aiohttp import web
from jsonrpcserver.aio import methods
@methods.add
async def ping():
return 'pong'
async def handle(request):
request = await request.text()
response = await methods.dispatch(request)
if response.is_notification:
return web.Response()
else:
return web.json_response(response, status=response.http_status)
app = web.Application()
app.router.add_post('/', handle)
if __name__ == '__main__':
web.run_app(app, port=5000)
|
from aiohttp import web
from jsonrpcserver.aio import methods
@methods.add
async def ping():
return 'pong'
async def handle(request):
request = await request.text()
response = await methods.dispatch(request)
if response.is_notification:
return web.Response()
else:
return web.json_response(response)
app = web.Application()
app.router.add_post('/', handle)
if __name__ == '__main__':
web.run_app(app, port=5000)
Return http status in aiohttp examplefrom aiohttp import web
from jsonrpcserver.aio import methods
@methods.add
async def ping():
return 'pong'
async def handle(request):
request = await request.text()
response = await methods.dispatch(request)
if response.is_notification:
return web.Response()
else:
return web.json_response(response, status=response.http_status)
app = web.Application()
app.router.add_post('/', handle)
if __name__ == '__main__':
web.run_app(app, port=5000)
|
<commit_before>from aiohttp import web
from jsonrpcserver.aio import methods
@methods.add
async def ping():
return 'pong'
async def handle(request):
request = await request.text()
response = await methods.dispatch(request)
if response.is_notification:
return web.Response()
else:
return web.json_response(response)
app = web.Application()
app.router.add_post('/', handle)
if __name__ == '__main__':
web.run_app(app, port=5000)
<commit_msg>Return http status in aiohttp example<commit_after>from aiohttp import web
from jsonrpcserver.aio import methods
@methods.add
async def ping():
return 'pong'
async def handle(request):
request = await request.text()
response = await methods.dispatch(request)
if response.is_notification:
return web.Response()
else:
return web.json_response(response, status=response.http_status)
app = web.Application()
app.router.add_post('/', handle)
if __name__ == '__main__':
web.run_app(app, port=5000)
|
0fa1370d7ee04f373f11b844295f1706686a0cc5
|
account_bank_reconciliation_summary_xlsx/__manifest__.py
|
account_bank_reconciliation_summary_xlsx/__manifest__.py
|
# -*- coding: utf-8 -*-
# © 2017 Akretion (Alexis de Lattre <alexis.delattre@akretion.com>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'Account Bank Statement Reconciliation Summary',
'version': '10.0.1.0.0',
'license': 'AGPL-3',
'author': "Akretion,Odoo Community Association (OCA)",
'website': 'http://www.akretion.com',
'summary': 'Adds an XLSX report to help on bank statement reconciliation',
'depends': ['account', 'report_xlsx'],
'data': [
'report/report.xml',
'views/account_bank_statement_view.xml',
],
'installable': True,
}
|
# -*- coding: utf-8 -*-
# © 2017 Akretion (Alexis de Lattre <alexis.delattre@akretion.com>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'Bank Reconciliation Report',
'version': '10.0.1.0.0',
'license': 'AGPL-3',
'author': "Akretion,Odoo Community Association (OCA)",
'website': 'http://www.akretion.com',
'summary': 'Adds an XLSX report to help on bank reconciliation',
'depends': ['account', 'report_xlsx'],
'data': [
'report/report.xml',
'views/account_bank_statement_view.xml',
],
'installable': True,
}
|
Update module name and summary
|
Update module name and summary
|
Python
|
agpl-3.0
|
OCA/account-financial-reporting,OCA/account-financial-reporting,OCA/account-financial-reporting
|
# -*- coding: utf-8 -*-
# © 2017 Akretion (Alexis de Lattre <alexis.delattre@akretion.com>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'Account Bank Statement Reconciliation Summary',
'version': '10.0.1.0.0',
'license': 'AGPL-3',
'author': "Akretion,Odoo Community Association (OCA)",
'website': 'http://www.akretion.com',
'summary': 'Adds an XLSX report to help on bank statement reconciliation',
'depends': ['account', 'report_xlsx'],
'data': [
'report/report.xml',
'views/account_bank_statement_view.xml',
],
'installable': True,
}
Update module name and summary
|
# -*- coding: utf-8 -*-
# © 2017 Akretion (Alexis de Lattre <alexis.delattre@akretion.com>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'Bank Reconciliation Report',
'version': '10.0.1.0.0',
'license': 'AGPL-3',
'author': "Akretion,Odoo Community Association (OCA)",
'website': 'http://www.akretion.com',
'summary': 'Adds an XLSX report to help on bank reconciliation',
'depends': ['account', 'report_xlsx'],
'data': [
'report/report.xml',
'views/account_bank_statement_view.xml',
],
'installable': True,
}
|
<commit_before># -*- coding: utf-8 -*-
# © 2017 Akretion (Alexis de Lattre <alexis.delattre@akretion.com>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'Account Bank Statement Reconciliation Summary',
'version': '10.0.1.0.0',
'license': 'AGPL-3',
'author': "Akretion,Odoo Community Association (OCA)",
'website': 'http://www.akretion.com',
'summary': 'Adds an XLSX report to help on bank statement reconciliation',
'depends': ['account', 'report_xlsx'],
'data': [
'report/report.xml',
'views/account_bank_statement_view.xml',
],
'installable': True,
}
<commit_msg>Update module name and summary<commit_after>
|
# -*- coding: utf-8 -*-
# © 2017 Akretion (Alexis de Lattre <alexis.delattre@akretion.com>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'Bank Reconciliation Report',
'version': '10.0.1.0.0',
'license': 'AGPL-3',
'author': "Akretion,Odoo Community Association (OCA)",
'website': 'http://www.akretion.com',
'summary': 'Adds an XLSX report to help on bank reconciliation',
'depends': ['account', 'report_xlsx'],
'data': [
'report/report.xml',
'views/account_bank_statement_view.xml',
],
'installable': True,
}
|
# -*- coding: utf-8 -*-
# © 2017 Akretion (Alexis de Lattre <alexis.delattre@akretion.com>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'Account Bank Statement Reconciliation Summary',
'version': '10.0.1.0.0',
'license': 'AGPL-3',
'author': "Akretion,Odoo Community Association (OCA)",
'website': 'http://www.akretion.com',
'summary': 'Adds an XLSX report to help on bank statement reconciliation',
'depends': ['account', 'report_xlsx'],
'data': [
'report/report.xml',
'views/account_bank_statement_view.xml',
],
'installable': True,
}
Update module name and summary# -*- coding: utf-8 -*-
# © 2017 Akretion (Alexis de Lattre <alexis.delattre@akretion.com>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'Bank Reconciliation Report',
'version': '10.0.1.0.0',
'license': 'AGPL-3',
'author': "Akretion,Odoo Community Association (OCA)",
'website': 'http://www.akretion.com',
'summary': 'Adds an XLSX report to help on bank reconciliation',
'depends': ['account', 'report_xlsx'],
'data': [
'report/report.xml',
'views/account_bank_statement_view.xml',
],
'installable': True,
}
|
<commit_before># -*- coding: utf-8 -*-
# © 2017 Akretion (Alexis de Lattre <alexis.delattre@akretion.com>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'Account Bank Statement Reconciliation Summary',
'version': '10.0.1.0.0',
'license': 'AGPL-3',
'author': "Akretion,Odoo Community Association (OCA)",
'website': 'http://www.akretion.com',
'summary': 'Adds an XLSX report to help on bank statement reconciliation',
'depends': ['account', 'report_xlsx'],
'data': [
'report/report.xml',
'views/account_bank_statement_view.xml',
],
'installable': True,
}
<commit_msg>Update module name and summary<commit_after># -*- coding: utf-8 -*-
# © 2017 Akretion (Alexis de Lattre <alexis.delattre@akretion.com>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'Bank Reconciliation Report',
'version': '10.0.1.0.0',
'license': 'AGPL-3',
'author': "Akretion,Odoo Community Association (OCA)",
'website': 'http://www.akretion.com',
'summary': 'Adds an XLSX report to help on bank reconciliation',
'depends': ['account', 'report_xlsx'],
'data': [
'report/report.xml',
'views/account_bank_statement_view.xml',
],
'installable': True,
}
|
a47242fcd93ca3e2eb37f351747847867ef34588
|
jacquard/service/commands.py
|
jacquard/service/commands.py
|
"""Command-line utilities for HTTP service subsystem."""
import werkzeug.debug
import werkzeug.serving
from jacquard.commands import BaseCommand
from jacquard.service import get_wsgi_app
class RunServer(BaseCommand):
"""
Run a debug server.
**This is for debug, local use only, not production.**
This command is named to mirror its equivalent in Django. It configures
the WSGI app and serves it through Werkzeug's simple serving mechanism,
with a debugger attached, and auto-reloading.
"""
help = "run a (local, debug) server"
def add_arguments(self, parser):
"""Add argparse arguments."""
parser.add_argument(
'-p',
'--port',
type=int,
default=1212,
help="port to bind to",
)
parser.add_argument(
'-b',
'--bind',
type=str,
default='::1',
help="address to bind to",
)
def handle(self, config, options):
"""Run command."""
app = get_wsgi_app(config)
werkzeug.serving.run_simple(
options.bind,
options.port,
app,
use_reloader=True,
use_debugger=True,
use_evalex=True,
threaded=False,
processes=1,
)
|
"""Command-line utilities for HTTP service subsystem."""
import werkzeug.debug
import werkzeug.serving
from jacquard.commands import BaseCommand
from jacquard.service import get_wsgi_app
class RunServer(BaseCommand):
"""
Run a debug server.
**This is for debug, local use only, not production.**
This command is named to mirror its equivalent in Django. It configures
the WSGI app and serves it through Werkzeug's simple serving mechanism,
with a debugger attached, and auto-reloading.
"""
plumbing = True
help = "run a (local, debug) server"
def add_arguments(self, parser):
"""Add argparse arguments."""
parser.add_argument(
'-p',
'--port',
type=int,
default=1212,
help="port to bind to",
)
parser.add_argument(
'-b',
'--bind',
type=str,
default='::1',
help="address to bind to",
)
def handle(self, config, options):
"""Run command."""
app = get_wsgi_app(config)
werkzeug.serving.run_simple(
options.bind,
options.port,
app,
use_reloader=True,
use_debugger=True,
use_evalex=True,
threaded=False,
processes=1,
)
|
Mark runserver as a plumbing command
|
Mark runserver as a plumbing command
|
Python
|
mit
|
prophile/jacquard,prophile/jacquard
|
"""Command-line utilities for HTTP service subsystem."""
import werkzeug.debug
import werkzeug.serving
from jacquard.commands import BaseCommand
from jacquard.service import get_wsgi_app
class RunServer(BaseCommand):
"""
Run a debug server.
**This is for debug, local use only, not production.**
This command is named to mirror its equivalent in Django. It configures
the WSGI app and serves it through Werkzeug's simple serving mechanism,
with a debugger attached, and auto-reloading.
"""
help = "run a (local, debug) server"
def add_arguments(self, parser):
"""Add argparse arguments."""
parser.add_argument(
'-p',
'--port',
type=int,
default=1212,
help="port to bind to",
)
parser.add_argument(
'-b',
'--bind',
type=str,
default='::1',
help="address to bind to",
)
def handle(self, config, options):
"""Run command."""
app = get_wsgi_app(config)
werkzeug.serving.run_simple(
options.bind,
options.port,
app,
use_reloader=True,
use_debugger=True,
use_evalex=True,
threaded=False,
processes=1,
)
Mark runserver as a plumbing command
|
"""Command-line utilities for HTTP service subsystem."""
import werkzeug.debug
import werkzeug.serving
from jacquard.commands import BaseCommand
from jacquard.service import get_wsgi_app
class RunServer(BaseCommand):
"""
Run a debug server.
**This is for debug, local use only, not production.**
This command is named to mirror its equivalent in Django. It configures
the WSGI app and serves it through Werkzeug's simple serving mechanism,
with a debugger attached, and auto-reloading.
"""
plumbing = True
help = "run a (local, debug) server"
def add_arguments(self, parser):
"""Add argparse arguments."""
parser.add_argument(
'-p',
'--port',
type=int,
default=1212,
help="port to bind to",
)
parser.add_argument(
'-b',
'--bind',
type=str,
default='::1',
help="address to bind to",
)
def handle(self, config, options):
"""Run command."""
app = get_wsgi_app(config)
werkzeug.serving.run_simple(
options.bind,
options.port,
app,
use_reloader=True,
use_debugger=True,
use_evalex=True,
threaded=False,
processes=1,
)
|
<commit_before>"""Command-line utilities for HTTP service subsystem."""
import werkzeug.debug
import werkzeug.serving
from jacquard.commands import BaseCommand
from jacquard.service import get_wsgi_app
class RunServer(BaseCommand):
"""
Run a debug server.
**This is for debug, local use only, not production.**
This command is named to mirror its equivalent in Django. It configures
the WSGI app and serves it through Werkzeug's simple serving mechanism,
with a debugger attached, and auto-reloading.
"""
help = "run a (local, debug) server"
def add_arguments(self, parser):
"""Add argparse arguments."""
parser.add_argument(
'-p',
'--port',
type=int,
default=1212,
help="port to bind to",
)
parser.add_argument(
'-b',
'--bind',
type=str,
default='::1',
help="address to bind to",
)
def handle(self, config, options):
"""Run command."""
app = get_wsgi_app(config)
werkzeug.serving.run_simple(
options.bind,
options.port,
app,
use_reloader=True,
use_debugger=True,
use_evalex=True,
threaded=False,
processes=1,
)
<commit_msg>Mark runserver as a plumbing command<commit_after>
|
"""Command-line utilities for HTTP service subsystem."""
import werkzeug.debug
import werkzeug.serving
from jacquard.commands import BaseCommand
from jacquard.service import get_wsgi_app
class RunServer(BaseCommand):
"""
Run a debug server.
**This is for debug, local use only, not production.**
This command is named to mirror its equivalent in Django. It configures
the WSGI app and serves it through Werkzeug's simple serving mechanism,
with a debugger attached, and auto-reloading.
"""
plumbing = True
help = "run a (local, debug) server"
def add_arguments(self, parser):
"""Add argparse arguments."""
parser.add_argument(
'-p',
'--port',
type=int,
default=1212,
help="port to bind to",
)
parser.add_argument(
'-b',
'--bind',
type=str,
default='::1',
help="address to bind to",
)
def handle(self, config, options):
"""Run command."""
app = get_wsgi_app(config)
werkzeug.serving.run_simple(
options.bind,
options.port,
app,
use_reloader=True,
use_debugger=True,
use_evalex=True,
threaded=False,
processes=1,
)
|
"""Command-line utilities for HTTP service subsystem."""
import werkzeug.debug
import werkzeug.serving
from jacquard.commands import BaseCommand
from jacquard.service import get_wsgi_app
class RunServer(BaseCommand):
"""
Run a debug server.
**This is for debug, local use only, not production.**
This command is named to mirror its equivalent in Django. It configures
the WSGI app and serves it through Werkzeug's simple serving mechanism,
with a debugger attached, and auto-reloading.
"""
help = "run a (local, debug) server"
def add_arguments(self, parser):
"""Add argparse arguments."""
parser.add_argument(
'-p',
'--port',
type=int,
default=1212,
help="port to bind to",
)
parser.add_argument(
'-b',
'--bind',
type=str,
default='::1',
help="address to bind to",
)
def handle(self, config, options):
"""Run command."""
app = get_wsgi_app(config)
werkzeug.serving.run_simple(
options.bind,
options.port,
app,
use_reloader=True,
use_debugger=True,
use_evalex=True,
threaded=False,
processes=1,
)
Mark runserver as a plumbing command"""Command-line utilities for HTTP service subsystem."""
import werkzeug.debug
import werkzeug.serving
from jacquard.commands import BaseCommand
from jacquard.service import get_wsgi_app
class RunServer(BaseCommand):
"""
Run a debug server.
**This is for debug, local use only, not production.**
This command is named to mirror its equivalent in Django. It configures
the WSGI app and serves it through Werkzeug's simple serving mechanism,
with a debugger attached, and auto-reloading.
"""
plumbing = True
help = "run a (local, debug) server"
def add_arguments(self, parser):
"""Add argparse arguments."""
parser.add_argument(
'-p',
'--port',
type=int,
default=1212,
help="port to bind to",
)
parser.add_argument(
'-b',
'--bind',
type=str,
default='::1',
help="address to bind to",
)
def handle(self, config, options):
"""Run command."""
app = get_wsgi_app(config)
werkzeug.serving.run_simple(
options.bind,
options.port,
app,
use_reloader=True,
use_debugger=True,
use_evalex=True,
threaded=False,
processes=1,
)
|
<commit_before>"""Command-line utilities for HTTP service subsystem."""
import werkzeug.debug
import werkzeug.serving
from jacquard.commands import BaseCommand
from jacquard.service import get_wsgi_app
class RunServer(BaseCommand):
"""
Run a debug server.
**This is for debug, local use only, not production.**
This command is named to mirror its equivalent in Django. It configures
the WSGI app and serves it through Werkzeug's simple serving mechanism,
with a debugger attached, and auto-reloading.
"""
help = "run a (local, debug) server"
def add_arguments(self, parser):
"""Add argparse arguments."""
parser.add_argument(
'-p',
'--port',
type=int,
default=1212,
help="port to bind to",
)
parser.add_argument(
'-b',
'--bind',
type=str,
default='::1',
help="address to bind to",
)
def handle(self, config, options):
"""Run command."""
app = get_wsgi_app(config)
werkzeug.serving.run_simple(
options.bind,
options.port,
app,
use_reloader=True,
use_debugger=True,
use_evalex=True,
threaded=False,
processes=1,
)
<commit_msg>Mark runserver as a plumbing command<commit_after>"""Command-line utilities for HTTP service subsystem."""
import werkzeug.debug
import werkzeug.serving
from jacquard.commands import BaseCommand
from jacquard.service import get_wsgi_app
class RunServer(BaseCommand):
"""
Run a debug server.
**This is for debug, local use only, not production.**
This command is named to mirror its equivalent in Django. It configures
the WSGI app and serves it through Werkzeug's simple serving mechanism,
with a debugger attached, and auto-reloading.
"""
plumbing = True
help = "run a (local, debug) server"
def add_arguments(self, parser):
"""Add argparse arguments."""
parser.add_argument(
'-p',
'--port',
type=int,
default=1212,
help="port to bind to",
)
parser.add_argument(
'-b',
'--bind',
type=str,
default='::1',
help="address to bind to",
)
def handle(self, config, options):
"""Run command."""
app = get_wsgi_app(config)
werkzeug.serving.run_simple(
options.bind,
options.port,
app,
use_reloader=True,
use_debugger=True,
use_evalex=True,
threaded=False,
processes=1,
)
|
6ee60074bfcf9fcd0e1b1f36b4c0324f41532d6d
|
integrationtests/mayavi/test_mlab_envisage.py
|
integrationtests/mayavi/test_mlab_envisage.py
|
from mayavi import mlab
from pyface.api import GUI
def close():
"""Close the scene."""
f = mlab.gcf()
e = mlab.get_engine()
e.window.workbench.prompt_on_exit = False
e.window.close()
def test_mlab_envisage():
"""Test if mlab runs correctly when the backend is set to
'envisage'."""
@mlab.show
def f():
mlab.options.backend = 'envisage'
mlab.test_contour3d()
GUI.invoke_after(3000, close)
f()
if __name__ == '__main__':
test_mlab_envisage()
|
from mayavi import mlab
from pyface.api import GUI
def close():
"""Close the scene."""
f = mlab.gcf()
e = mlab.get_engine()
e.window.workbench.prompt_on_exit = False
e.window.close()
# Hack: on Linux the splash screen does not go away so we force it.
GUI.invoke_after(500, e.window.workbench.application.gui.stop_event_loop)
def test_mlab_envisage():
"""Test if mlab runs correctly when the backend is set to
'envisage'."""
@mlab.show
def f():
mlab.options.backend = 'envisage'
mlab.test_contour3d()
GUI.invoke_after(3000, close)
f()
if __name__ == '__main__':
test_mlab_envisage()
|
Stop event loop after test completes.
|
BUG: Stop event loop after test completes.
|
Python
|
bsd-3-clause
|
alexandreleroux/mayavi,dmsurti/mayavi,alexandreleroux/mayavi,liulion/mayavi,dmsurti/mayavi,liulion/mayavi
|
from mayavi import mlab
from pyface.api import GUI
def close():
"""Close the scene."""
f = mlab.gcf()
e = mlab.get_engine()
e.window.workbench.prompt_on_exit = False
e.window.close()
def test_mlab_envisage():
"""Test if mlab runs correctly when the backend is set to
'envisage'."""
@mlab.show
def f():
mlab.options.backend = 'envisage'
mlab.test_contour3d()
GUI.invoke_after(3000, close)
f()
if __name__ == '__main__':
test_mlab_envisage()
BUG: Stop event loop after test completes.
|
from mayavi import mlab
from pyface.api import GUI
def close():
"""Close the scene."""
f = mlab.gcf()
e = mlab.get_engine()
e.window.workbench.prompt_on_exit = False
e.window.close()
# Hack: on Linux the splash screen does not go away so we force it.
GUI.invoke_after(500, e.window.workbench.application.gui.stop_event_loop)
def test_mlab_envisage():
"""Test if mlab runs correctly when the backend is set to
'envisage'."""
@mlab.show
def f():
mlab.options.backend = 'envisage'
mlab.test_contour3d()
GUI.invoke_after(3000, close)
f()
if __name__ == '__main__':
test_mlab_envisage()
|
<commit_before>from mayavi import mlab
from pyface.api import GUI
def close():
"""Close the scene."""
f = mlab.gcf()
e = mlab.get_engine()
e.window.workbench.prompt_on_exit = False
e.window.close()
def test_mlab_envisage():
"""Test if mlab runs correctly when the backend is set to
'envisage'."""
@mlab.show
def f():
mlab.options.backend = 'envisage'
mlab.test_contour3d()
GUI.invoke_after(3000, close)
f()
if __name__ == '__main__':
test_mlab_envisage()
<commit_msg>BUG: Stop event loop after test completes.<commit_after>
|
from mayavi import mlab
from pyface.api import GUI
def close():
"""Close the scene."""
f = mlab.gcf()
e = mlab.get_engine()
e.window.workbench.prompt_on_exit = False
e.window.close()
# Hack: on Linux the splash screen does not go away so we force it.
GUI.invoke_after(500, e.window.workbench.application.gui.stop_event_loop)
def test_mlab_envisage():
"""Test if mlab runs correctly when the backend is set to
'envisage'."""
@mlab.show
def f():
mlab.options.backend = 'envisage'
mlab.test_contour3d()
GUI.invoke_after(3000, close)
f()
if __name__ == '__main__':
test_mlab_envisage()
|
from mayavi import mlab
from pyface.api import GUI
def close():
"""Close the scene."""
f = mlab.gcf()
e = mlab.get_engine()
e.window.workbench.prompt_on_exit = False
e.window.close()
def test_mlab_envisage():
"""Test if mlab runs correctly when the backend is set to
'envisage'."""
@mlab.show
def f():
mlab.options.backend = 'envisage'
mlab.test_contour3d()
GUI.invoke_after(3000, close)
f()
if __name__ == '__main__':
test_mlab_envisage()
BUG: Stop event loop after test completes.from mayavi import mlab
from pyface.api import GUI
def close():
"""Close the scene."""
f = mlab.gcf()
e = mlab.get_engine()
e.window.workbench.prompt_on_exit = False
e.window.close()
# Hack: on Linux the splash screen does not go away so we force it.
GUI.invoke_after(500, e.window.workbench.application.gui.stop_event_loop)
def test_mlab_envisage():
"""Test if mlab runs correctly when the backend is set to
'envisage'."""
@mlab.show
def f():
mlab.options.backend = 'envisage'
mlab.test_contour3d()
GUI.invoke_after(3000, close)
f()
if __name__ == '__main__':
test_mlab_envisage()
|
<commit_before>from mayavi import mlab
from pyface.api import GUI
def close():
"""Close the scene."""
f = mlab.gcf()
e = mlab.get_engine()
e.window.workbench.prompt_on_exit = False
e.window.close()
def test_mlab_envisage():
"""Test if mlab runs correctly when the backend is set to
'envisage'."""
@mlab.show
def f():
mlab.options.backend = 'envisage'
mlab.test_contour3d()
GUI.invoke_after(3000, close)
f()
if __name__ == '__main__':
test_mlab_envisage()
<commit_msg>BUG: Stop event loop after test completes.<commit_after>from mayavi import mlab
from pyface.api import GUI
def close():
"""Close the scene."""
f = mlab.gcf()
e = mlab.get_engine()
e.window.workbench.prompt_on_exit = False
e.window.close()
# Hack: on Linux the splash screen does not go away so we force it.
GUI.invoke_after(500, e.window.workbench.application.gui.stop_event_loop)
def test_mlab_envisage():
"""Test if mlab runs correctly when the backend is set to
'envisage'."""
@mlab.show
def f():
mlab.options.backend = 'envisage'
mlab.test_contour3d()
GUI.invoke_after(3000, close)
f()
if __name__ == '__main__':
test_mlab_envisage()
|
ff2d82744d1dc268a901c7d3458463bf04c1a6e8
|
saleor/plugins/migrations/0004_drop_support_for_env_vatlayer_access_key.py
|
saleor/plugins/migrations/0004_drop_support_for_env_vatlayer_access_key.py
|
from django.db import migrations
def assign_access_key(apps, schema):
vatlayer_configuration = (
apps.get_model("plugins", "PluginConfiguration")
.objects.filter(identifier="mirumee.taxes.vatlayer")
.first()
)
if vatlayer_configuration:
vatlayer_configuration.active = False
vatlayer_configuration.save()
class Migration(migrations.Migration):
dependencies = [
("plugins", "0003_auto_20200429_0142"),
]
operations = [
migrations.RunPython(assign_access_key),
]
|
from django.db import migrations
def deactivate_vatlayer(apps, schema):
vatlayer_configuration = (
apps.get_model("plugins", "PluginConfiguration")
.objects.filter(identifier="mirumee.taxes.vatlayer")
.first()
)
if vatlayer_configuration:
vatlayer_configuration.active = False
vatlayer_configuration.save()
class Migration(migrations.Migration):
dependencies = [
("plugins", "0003_auto_20200429_0142"),
]
operations = [
migrations.RunPython(deactivate_vatlayer),
]
|
Change migration name to more proper
|
Change migration name to more proper
|
Python
|
bsd-3-clause
|
mociepka/saleor,mociepka/saleor,mociepka/saleor
|
from django.db import migrations
def assign_access_key(apps, schema):
vatlayer_configuration = (
apps.get_model("plugins", "PluginConfiguration")
.objects.filter(identifier="mirumee.taxes.vatlayer")
.first()
)
if vatlayer_configuration:
vatlayer_configuration.active = False
vatlayer_configuration.save()
class Migration(migrations.Migration):
dependencies = [
("plugins", "0003_auto_20200429_0142"),
]
operations = [
migrations.RunPython(assign_access_key),
]
Change migration name to more proper
|
from django.db import migrations
def deactivate_vatlayer(apps, schema):
vatlayer_configuration = (
apps.get_model("plugins", "PluginConfiguration")
.objects.filter(identifier="mirumee.taxes.vatlayer")
.first()
)
if vatlayer_configuration:
vatlayer_configuration.active = False
vatlayer_configuration.save()
class Migration(migrations.Migration):
dependencies = [
("plugins", "0003_auto_20200429_0142"),
]
operations = [
migrations.RunPython(deactivate_vatlayer),
]
|
<commit_before>from django.db import migrations
def assign_access_key(apps, schema):
vatlayer_configuration = (
apps.get_model("plugins", "PluginConfiguration")
.objects.filter(identifier="mirumee.taxes.vatlayer")
.first()
)
if vatlayer_configuration:
vatlayer_configuration.active = False
vatlayer_configuration.save()
class Migration(migrations.Migration):
dependencies = [
("plugins", "0003_auto_20200429_0142"),
]
operations = [
migrations.RunPython(assign_access_key),
]
<commit_msg>Change migration name to more proper<commit_after>
|
from django.db import migrations
def deactivate_vatlayer(apps, schema):
vatlayer_configuration = (
apps.get_model("plugins", "PluginConfiguration")
.objects.filter(identifier="mirumee.taxes.vatlayer")
.first()
)
if vatlayer_configuration:
vatlayer_configuration.active = False
vatlayer_configuration.save()
class Migration(migrations.Migration):
dependencies = [
("plugins", "0003_auto_20200429_0142"),
]
operations = [
migrations.RunPython(deactivate_vatlayer),
]
|
from django.db import migrations
def assign_access_key(apps, schema):
vatlayer_configuration = (
apps.get_model("plugins", "PluginConfiguration")
.objects.filter(identifier="mirumee.taxes.vatlayer")
.first()
)
if vatlayer_configuration:
vatlayer_configuration.active = False
vatlayer_configuration.save()
class Migration(migrations.Migration):
dependencies = [
("plugins", "0003_auto_20200429_0142"),
]
operations = [
migrations.RunPython(assign_access_key),
]
Change migration name to more properfrom django.db import migrations
def deactivate_vatlayer(apps, schema):
vatlayer_configuration = (
apps.get_model("plugins", "PluginConfiguration")
.objects.filter(identifier="mirumee.taxes.vatlayer")
.first()
)
if vatlayer_configuration:
vatlayer_configuration.active = False
vatlayer_configuration.save()
class Migration(migrations.Migration):
dependencies = [
("plugins", "0003_auto_20200429_0142"),
]
operations = [
migrations.RunPython(deactivate_vatlayer),
]
|
<commit_before>from django.db import migrations
def assign_access_key(apps, schema):
vatlayer_configuration = (
apps.get_model("plugins", "PluginConfiguration")
.objects.filter(identifier="mirumee.taxes.vatlayer")
.first()
)
if vatlayer_configuration:
vatlayer_configuration.active = False
vatlayer_configuration.save()
class Migration(migrations.Migration):
dependencies = [
("plugins", "0003_auto_20200429_0142"),
]
operations = [
migrations.RunPython(assign_access_key),
]
<commit_msg>Change migration name to more proper<commit_after>from django.db import migrations
def deactivate_vatlayer(apps, schema):
vatlayer_configuration = (
apps.get_model("plugins", "PluginConfiguration")
.objects.filter(identifier="mirumee.taxes.vatlayer")
.first()
)
if vatlayer_configuration:
vatlayer_configuration.active = False
vatlayer_configuration.save()
class Migration(migrations.Migration):
dependencies = [
("plugins", "0003_auto_20200429_0142"),
]
operations = [
migrations.RunPython(deactivate_vatlayer),
]
|
ec30e6355fd9ea9cc22217ece7c9dab2640f6786
|
filmfest/settings/pytest.py
|
filmfest/settings/pytest.py
|
from .base import * # noqa
SECRET_KEY = 'test'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'test_db.sqlite3'),
}
}
|
from .base import * # noqa
SECRET_KEY = 'test'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'test_db.sqlite3'),
}
}
WAGTAILSEARCH_BACKENDS = {
'default': {
'BACKEND': 'wagtail.wagtailsearch.backends.db',
}
}
|
Use db search backend instead of elastic in tests
|
Use db search backend instead of elastic in tests
|
Python
|
unlicense
|
nott/next.filmfest.by,kinaklub/next.filmfest.by,kinaklub/next.filmfest.by,kinaklub/next.filmfest.by,kinaklub/next.filmfest.by,nott/next.filmfest.by,nott/next.filmfest.by,nott/next.filmfest.by
|
from .base import * # noqa
SECRET_KEY = 'test'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'test_db.sqlite3'),
}
}
Use db search backend instead of elastic in tests
|
from .base import * # noqa
SECRET_KEY = 'test'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'test_db.sqlite3'),
}
}
WAGTAILSEARCH_BACKENDS = {
'default': {
'BACKEND': 'wagtail.wagtailsearch.backends.db',
}
}
|
<commit_before>from .base import * # noqa
SECRET_KEY = 'test'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'test_db.sqlite3'),
}
}
<commit_msg>Use db search backend instead of elastic in tests<commit_after>
|
from .base import * # noqa
SECRET_KEY = 'test'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'test_db.sqlite3'),
}
}
WAGTAILSEARCH_BACKENDS = {
'default': {
'BACKEND': 'wagtail.wagtailsearch.backends.db',
}
}
|
from .base import * # noqa
SECRET_KEY = 'test'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'test_db.sqlite3'),
}
}
Use db search backend instead of elastic in testsfrom .base import * # noqa
SECRET_KEY = 'test'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'test_db.sqlite3'),
}
}
WAGTAILSEARCH_BACKENDS = {
'default': {
'BACKEND': 'wagtail.wagtailsearch.backends.db',
}
}
|
<commit_before>from .base import * # noqa
SECRET_KEY = 'test'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'test_db.sqlite3'),
}
}
<commit_msg>Use db search backend instead of elastic in tests<commit_after>from .base import * # noqa
SECRET_KEY = 'test'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'test_db.sqlite3'),
}
}
WAGTAILSEARCH_BACKENDS = {
'default': {
'BACKEND': 'wagtail.wagtailsearch.backends.db',
}
}
|
0fbd7c2f68f9f751642fd0e618dfcb6726d79f44
|
fireplace/cards/wog/mage.py
|
fireplace/cards/wog/mage.py
|
from ..utils import *
##
# Minions
|
from ..utils import *
##
# Minions
class OG_083:
"Twilight Flamecaller"
play = Hit(ENEMY_MINIONS, 1)
class OG_120:
"Anomalus"
deathrattle = Hit(ALL_MINIONS, 8)
class OG_207:
"Faceless Summoner"
play = Summon(CONTROLLER, RandomMinion(cost=3))
|
Implement Twilight Flamecaller, Anomalus, Faceless Summoner
|
Implement Twilight Flamecaller, Anomalus, Faceless Summoner
|
Python
|
agpl-3.0
|
NightKev/fireplace,jleclanche/fireplace,beheh/fireplace
|
from ..utils import *
##
# Minions
Implement Twilight Flamecaller, Anomalus, Faceless Summoner
|
from ..utils import *
##
# Minions
class OG_083:
"Twilight Flamecaller"
play = Hit(ENEMY_MINIONS, 1)
class OG_120:
"Anomalus"
deathrattle = Hit(ALL_MINIONS, 8)
class OG_207:
"Faceless Summoner"
play = Summon(CONTROLLER, RandomMinion(cost=3))
|
<commit_before>from ..utils import *
##
# Minions
<commit_msg>Implement Twilight Flamecaller, Anomalus, Faceless Summoner<commit_after>
|
from ..utils import *
##
# Minions
class OG_083:
"Twilight Flamecaller"
play = Hit(ENEMY_MINIONS, 1)
class OG_120:
"Anomalus"
deathrattle = Hit(ALL_MINIONS, 8)
class OG_207:
"Faceless Summoner"
play = Summon(CONTROLLER, RandomMinion(cost=3))
|
from ..utils import *
##
# Minions
Implement Twilight Flamecaller, Anomalus, Faceless Summonerfrom ..utils import *
##
# Minions
class OG_083:
"Twilight Flamecaller"
play = Hit(ENEMY_MINIONS, 1)
class OG_120:
"Anomalus"
deathrattle = Hit(ALL_MINIONS, 8)
class OG_207:
"Faceless Summoner"
play = Summon(CONTROLLER, RandomMinion(cost=3))
|
<commit_before>from ..utils import *
##
# Minions
<commit_msg>Implement Twilight Flamecaller, Anomalus, Faceless Summoner<commit_after>from ..utils import *
##
# Minions
class OG_083:
"Twilight Flamecaller"
play = Hit(ENEMY_MINIONS, 1)
class OG_120:
"Anomalus"
deathrattle = Hit(ALL_MINIONS, 8)
class OG_207:
"Faceless Summoner"
play = Summon(CONTROLLER, RandomMinion(cost=3))
|
74f386e68b598d20301fb365b67bfe897a94b986
|
ifbAMPdb/searchEngine/urls.py
|
ifbAMPdb/searchEngine/urls.py
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^search/peptide/(?P<pk>[A-Za-z1-9.:]+)/$', views.detail, name='PeptideDetails'),
url(r'^search/advanced/$', views.advSearch, name='AdvancedSearch'),
url(r'^search/results/basic/', views.ampBasicSearch, name='SearchResults'),
url(r'^search/results/advanced/', views.advSearchResults, name='AdvancedResults'),
]
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^search/peptide/(?P<pk>[A-Za-z0-9.:]+)/$', views.detail, name='PeptideDetails'),
url(r'^search/advanced/$', views.advSearch, name='AdvancedSearch'),
url(r'^search/results/basic/', views.ampBasicSearch, name='SearchResults'),
url(r'^search/results/advanced/', views.advSearchResults, name='AdvancedResults'),
]
|
Fix regex of peptide search.
|
Fix regex of peptide search.
Fixed peptide Regex link matcher.
|
Python
|
agpl-3.0
|
auyer/IFB_ampmDb,auyer/IFB_ampmDb
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^search/peptide/(?P<pk>[A-Za-z1-9.:]+)/$', views.detail, name='PeptideDetails'),
url(r'^search/advanced/$', views.advSearch, name='AdvancedSearch'),
url(r'^search/results/basic/', views.ampBasicSearch, name='SearchResults'),
url(r'^search/results/advanced/', views.advSearchResults, name='AdvancedResults'),
]
Fix regex of peptide search.
Fixed peptide Regex link matcher.
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^search/peptide/(?P<pk>[A-Za-z0-9.:]+)/$', views.detail, name='PeptideDetails'),
url(r'^search/advanced/$', views.advSearch, name='AdvancedSearch'),
url(r'^search/results/basic/', views.ampBasicSearch, name='SearchResults'),
url(r'^search/results/advanced/', views.advSearchResults, name='AdvancedResults'),
]
|
<commit_before>from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^search/peptide/(?P<pk>[A-Za-z1-9.:]+)/$', views.detail, name='PeptideDetails'),
url(r'^search/advanced/$', views.advSearch, name='AdvancedSearch'),
url(r'^search/results/basic/', views.ampBasicSearch, name='SearchResults'),
url(r'^search/results/advanced/', views.advSearchResults, name='AdvancedResults'),
]
<commit_msg>Fix regex of peptide search.
Fixed peptide Regex link matcher.<commit_after>
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^search/peptide/(?P<pk>[A-Za-z0-9.:]+)/$', views.detail, name='PeptideDetails'),
url(r'^search/advanced/$', views.advSearch, name='AdvancedSearch'),
url(r'^search/results/basic/', views.ampBasicSearch, name='SearchResults'),
url(r'^search/results/advanced/', views.advSearchResults, name='AdvancedResults'),
]
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^search/peptide/(?P<pk>[A-Za-z1-9.:]+)/$', views.detail, name='PeptideDetails'),
url(r'^search/advanced/$', views.advSearch, name='AdvancedSearch'),
url(r'^search/results/basic/', views.ampBasicSearch, name='SearchResults'),
url(r'^search/results/advanced/', views.advSearchResults, name='AdvancedResults'),
]
Fix regex of peptide search.
Fixed peptide Regex link matcher.from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^search/peptide/(?P<pk>[A-Za-z0-9.:]+)/$', views.detail, name='PeptideDetails'),
url(r'^search/advanced/$', views.advSearch, name='AdvancedSearch'),
url(r'^search/results/basic/', views.ampBasicSearch, name='SearchResults'),
url(r'^search/results/advanced/', views.advSearchResults, name='AdvancedResults'),
]
|
<commit_before>from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^search/peptide/(?P<pk>[A-Za-z1-9.:]+)/$', views.detail, name='PeptideDetails'),
url(r'^search/advanced/$', views.advSearch, name='AdvancedSearch'),
url(r'^search/results/basic/', views.ampBasicSearch, name='SearchResults'),
url(r'^search/results/advanced/', views.advSearchResults, name='AdvancedResults'),
]
<commit_msg>Fix regex of peptide search.
Fixed peptide Regex link matcher.<commit_after>from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^search/peptide/(?P<pk>[A-Za-z0-9.:]+)/$', views.detail, name='PeptideDetails'),
url(r'^search/advanced/$', views.advSearch, name='AdvancedSearch'),
url(r'^search/results/basic/', views.ampBasicSearch, name='SearchResults'),
url(r'^search/results/advanced/', views.advSearchResults, name='AdvancedResults'),
]
|
ba0f726ff1a777adc028110dfa94524399adb4ab
|
imager/ImagerProfile/admin.py
|
imager/ImagerProfile/admin.py
|
from django.contrib import admin
from imagerprofile.models import ImagerProfile
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
# admin.site.unregister(User)
class ImagerUserInline(admin.StackedInline):
model = ImagerProfile
can_delete = False
verbose_name_plural = 'imager user'
extra = 0
class UserAdmin(UserAdmin):
inlines = (ImagerUserInline, )
# def get_inline_formsets(self, request, formsets, inline_instances,
# obj=None):
# if obj is None:
# inline_instances.remove(ImagerUserInline)
# else:
# inline_instances.add(ImagerUserInline)
# super(UserAdmin, self).get_inline_formsets(request, formsets,
# inline_instances, obj)
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
|
from django.contrib import admin
from imagerprofile.models import ImagerProfile
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
class ImagerUserInline(admin.StackedInline):
model = ImagerProfile
can_delete = False
verbose_name_plural = 'imager user'
extra = 0
class UserAdmin(UserAdmin):
inlines = (ImagerUserInline, )
def get_inline_instances(self, request, obj=None):
if obj is None:
return []
else:
return [inline(self.model, self.admin_site)
for inline in self.inlines]
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
|
Remove Imager Profile form when creating new user, only displays when editing existing user to prevent Integrity Error
|
Remove Imager Profile form when creating new user, only displays when editing existing user to prevent Integrity Error
|
Python
|
mit
|
nbeck90/django-imager,nbeck90/django-imager
|
from django.contrib import admin
from imagerprofile.models import ImagerProfile
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
# admin.site.unregister(User)
class ImagerUserInline(admin.StackedInline):
model = ImagerProfile
can_delete = False
verbose_name_plural = 'imager user'
extra = 0
class UserAdmin(UserAdmin):
inlines = (ImagerUserInline, )
# def get_inline_formsets(self, request, formsets, inline_instances,
# obj=None):
# if obj is None:
# inline_instances.remove(ImagerUserInline)
# else:
# inline_instances.add(ImagerUserInline)
# super(UserAdmin, self).get_inline_formsets(request, formsets,
# inline_instances, obj)
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
Remove Imager Profile form when creating new user, only displays when editing existing user to prevent Integrity Error
|
from django.contrib import admin
from imagerprofile.models import ImagerProfile
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
class ImagerUserInline(admin.StackedInline):
model = ImagerProfile
can_delete = False
verbose_name_plural = 'imager user'
extra = 0
class UserAdmin(UserAdmin):
inlines = (ImagerUserInline, )
def get_inline_instances(self, request, obj=None):
if obj is None:
return []
else:
return [inline(self.model, self.admin_site)
for inline in self.inlines]
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
|
<commit_before>from django.contrib import admin
from imagerprofile.models import ImagerProfile
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
# admin.site.unregister(User)
class ImagerUserInline(admin.StackedInline):
model = ImagerProfile
can_delete = False
verbose_name_plural = 'imager user'
extra = 0
class UserAdmin(UserAdmin):
inlines = (ImagerUserInline, )
# def get_inline_formsets(self, request, formsets, inline_instances,
# obj=None):
# if obj is None:
# inline_instances.remove(ImagerUserInline)
# else:
# inline_instances.add(ImagerUserInline)
# super(UserAdmin, self).get_inline_formsets(request, formsets,
# inline_instances, obj)
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
<commit_msg>Remove Imager Profile form when creating new user, only displays when editing existing user to prevent Integrity Error<commit_after>
|
from django.contrib import admin
from imagerprofile.models import ImagerProfile
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
class ImagerUserInline(admin.StackedInline):
model = ImagerProfile
can_delete = False
verbose_name_plural = 'imager user'
extra = 0
class UserAdmin(UserAdmin):
inlines = (ImagerUserInline, )
def get_inline_instances(self, request, obj=None):
if obj is None:
return []
else:
return [inline(self.model, self.admin_site)
for inline in self.inlines]
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
|
from django.contrib import admin
from imagerprofile.models import ImagerProfile
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
# admin.site.unregister(User)
class ImagerUserInline(admin.StackedInline):
model = ImagerProfile
can_delete = False
verbose_name_plural = 'imager user'
extra = 0
class UserAdmin(UserAdmin):
inlines = (ImagerUserInline, )
# def get_inline_formsets(self, request, formsets, inline_instances,
# obj=None):
# if obj is None:
# inline_instances.remove(ImagerUserInline)
# else:
# inline_instances.add(ImagerUserInline)
# super(UserAdmin, self).get_inline_formsets(request, formsets,
# inline_instances, obj)
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
Remove Imager Profile form when creating new user, only displays when editing existing user to prevent Integrity Errorfrom django.contrib import admin
from imagerprofile.models import ImagerProfile
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
class ImagerUserInline(admin.StackedInline):
model = ImagerProfile
can_delete = False
verbose_name_plural = 'imager user'
extra = 0
class UserAdmin(UserAdmin):
inlines = (ImagerUserInline, )
def get_inline_instances(self, request, obj=None):
if obj is None:
return []
else:
return [inline(self.model, self.admin_site)
for inline in self.inlines]
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
|
<commit_before>from django.contrib import admin
from imagerprofile.models import ImagerProfile
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
# admin.site.unregister(User)
class ImagerUserInline(admin.StackedInline):
model = ImagerProfile
can_delete = False
verbose_name_plural = 'imager user'
extra = 0
class UserAdmin(UserAdmin):
inlines = (ImagerUserInline, )
# def get_inline_formsets(self, request, formsets, inline_instances,
# obj=None):
# if obj is None:
# inline_instances.remove(ImagerUserInline)
# else:
# inline_instances.add(ImagerUserInline)
# super(UserAdmin, self).get_inline_formsets(request, formsets,
# inline_instances, obj)
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
<commit_msg>Remove Imager Profile form when creating new user, only displays when editing existing user to prevent Integrity Error<commit_after>from django.contrib import admin
from imagerprofile.models import ImagerProfile
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
class ImagerUserInline(admin.StackedInline):
model = ImagerProfile
can_delete = False
verbose_name_plural = 'imager user'
extra = 0
class UserAdmin(UserAdmin):
inlines = (ImagerUserInline, )
def get_inline_instances(self, request, obj=None):
if obj is None:
return []
else:
return [inline(self.model, self.admin_site)
for inline in self.inlines]
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
|
92d9e9885e241e0bb7df64d3cd696db09cdfc74d
|
utils.py
|
utils.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
def fix_str(value):
try:
return unicode(value)
except UnicodeDecodeError:
return unicode(value.decode('latin1'))
def pandas_to_dict(df):
return [{colname: (fix_str(row[i]) if type(row[i]) is str else row[i])
for i, colname in enumerate(df.columns)
if colname not in ['pedido_data', 'cliente_data']}
for row in df.values]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
def fix_str(value):
try:
return unicode(value)
except UnicodeDecodeError:
return unicode(value.decode('latin1'))
def pandas_to_dict(df):
return [{colname: (fix_str(row[i]) if type(row[i]) is str else row[i])
for i, colname in enumerate(df.columns)}
for row in df.values]
|
Remove columns filter on pandas_to_dict
|
Remove columns filter on pandas_to_dict
|
Python
|
mit
|
mlgruby/mining,mlgruby/mining,mining/mining,chrisdamba/mining,jgabriellima/mining,chrisdamba/mining,AndrzejR/mining,seagoat/mining,jgabriellima/mining,mining/mining,avelino/mining,AndrzejR/mining,seagoat/mining,mlgruby/mining,avelino/mining
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
def fix_str(value):
try:
return unicode(value)
except UnicodeDecodeError:
return unicode(value.decode('latin1'))
def pandas_to_dict(df):
return [{colname: (fix_str(row[i]) if type(row[i]) is str else row[i])
for i, colname in enumerate(df.columns)
if colname not in ['pedido_data', 'cliente_data']}
for row in df.values]
Remove columns filter on pandas_to_dict
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
def fix_str(value):
try:
return unicode(value)
except UnicodeDecodeError:
return unicode(value.decode('latin1'))
def pandas_to_dict(df):
return [{colname: (fix_str(row[i]) if type(row[i]) is str else row[i])
for i, colname in enumerate(df.columns)}
for row in df.values]
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
def fix_str(value):
try:
return unicode(value)
except UnicodeDecodeError:
return unicode(value.decode('latin1'))
def pandas_to_dict(df):
return [{colname: (fix_str(row[i]) if type(row[i]) is str else row[i])
for i, colname in enumerate(df.columns)
if colname not in ['pedido_data', 'cliente_data']}
for row in df.values]
<commit_msg>Remove columns filter on pandas_to_dict<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
def fix_str(value):
try:
return unicode(value)
except UnicodeDecodeError:
return unicode(value.decode('latin1'))
def pandas_to_dict(df):
return [{colname: (fix_str(row[i]) if type(row[i]) is str else row[i])
for i, colname in enumerate(df.columns)}
for row in df.values]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
def fix_str(value):
try:
return unicode(value)
except UnicodeDecodeError:
return unicode(value.decode('latin1'))
def pandas_to_dict(df):
return [{colname: (fix_str(row[i]) if type(row[i]) is str else row[i])
for i, colname in enumerate(df.columns)
if colname not in ['pedido_data', 'cliente_data']}
for row in df.values]
Remove columns filter on pandas_to_dict#!/usr/bin/env python
# -*- coding: utf-8 -*-
def fix_str(value):
try:
return unicode(value)
except UnicodeDecodeError:
return unicode(value.decode('latin1'))
def pandas_to_dict(df):
return [{colname: (fix_str(row[i]) if type(row[i]) is str else row[i])
for i, colname in enumerate(df.columns)}
for row in df.values]
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
def fix_str(value):
try:
return unicode(value)
except UnicodeDecodeError:
return unicode(value.decode('latin1'))
def pandas_to_dict(df):
return [{colname: (fix_str(row[i]) if type(row[i]) is str else row[i])
for i, colname in enumerate(df.columns)
if colname not in ['pedido_data', 'cliente_data']}
for row in df.values]
<commit_msg>Remove columns filter on pandas_to_dict<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
def fix_str(value):
try:
return unicode(value)
except UnicodeDecodeError:
return unicode(value.decode('latin1'))
def pandas_to_dict(df):
return [{colname: (fix_str(row[i]) if type(row[i]) is str else row[i])
for i, colname in enumerate(df.columns)}
for row in df.values]
|
e39fadc6fead884fc3457fa7629fc8d1c72f5240
|
views.py
|
views.py
|
from django.http import HttpResponse
from django.core.cache import cache
from mixcloud.utils.decorators import staff_only
from mixcloud.speedbar.utils import DETAILS_PREFIX, TRACE_PREFIX
from gargoyle.decorators import switch_is_active
import json
@staff_only
@switch_is_active('speedbar:panel')
def panel(request, trace_id):
details = cache.get(DETAILS_PREFIX + trace_id)
if details:
details_json = json.dumps(details, skipkeys=True, default=repr, indent=2)
return HttpResponse(content=details_json)
return HttpResponse(status=404)
@staff_only
@switch_is_active('speedbar:trace')
def trace(request, trace_id):
trace = cache.get(TRACE_PREFIX + trace_id)
if trace:
trace_json = json.dumps(trace, indent=2)
return HttpResponse(content=trace_json, mimetype="application/json")
return HttpResponse(status=404)
|
from django.http import HttpResponse
from django.core.cache import cache
from mixcloud.utils.decorators import staff_only
from mixcloud.speedbar.utils import DETAILS_PREFIX, TRACE_PREFIX
from mixcloud.utils.decorators import json_response
from gargoyle.decorators import switch_is_active
import json
@staff_only
@switch_is_active('speedbar:panel')
def panel(request, trace_id):
details = cache.get(DETAILS_PREFIX + trace_id)
if details:
details_json = json.dumps(details, skipkeys=True, default=repr, indent=2) # Cannot use decorator as need default=repr
return HttpResponse(content=details_json, mimetype='text/javascript; charset=utf-8')
return HttpResponse(status=404)
@staff_only
@switch_is_active('speedbar:trace')
@json_response
def trace(request, trace_id):
trace = cache.get(TRACE_PREFIX + trace_id)
if trace:
return trace
return HttpResponse(status=404)
|
Use json decorator where appropriate
|
Use json decorator where appropriate
|
Python
|
mit
|
theospears/django-speedbar,mixcloud/django-speedbar,mixcloud/django-speedbar,theospears/django-speedbar,mixcloud/django-speedbar,theospears/django-speedbar
|
from django.http import HttpResponse
from django.core.cache import cache
from mixcloud.utils.decorators import staff_only
from mixcloud.speedbar.utils import DETAILS_PREFIX, TRACE_PREFIX
from gargoyle.decorators import switch_is_active
import json
@staff_only
@switch_is_active('speedbar:panel')
def panel(request, trace_id):
details = cache.get(DETAILS_PREFIX + trace_id)
if details:
details_json = json.dumps(details, skipkeys=True, default=repr, indent=2)
return HttpResponse(content=details_json)
return HttpResponse(status=404)
@staff_only
@switch_is_active('speedbar:trace')
def trace(request, trace_id):
trace = cache.get(TRACE_PREFIX + trace_id)
if trace:
trace_json = json.dumps(trace, indent=2)
return HttpResponse(content=trace_json, mimetype="application/json")
return HttpResponse(status=404)
Use json decorator where appropriate
|
from django.http import HttpResponse
from django.core.cache import cache
from mixcloud.utils.decorators import staff_only
from mixcloud.speedbar.utils import DETAILS_PREFIX, TRACE_PREFIX
from mixcloud.utils.decorators import json_response
from gargoyle.decorators import switch_is_active
import json
@staff_only
@switch_is_active('speedbar:panel')
def panel(request, trace_id):
details = cache.get(DETAILS_PREFIX + trace_id)
if details:
details_json = json.dumps(details, skipkeys=True, default=repr, indent=2) # Cannot use decorator as need default=repr
return HttpResponse(content=details_json, mimetype='text/javascript; charset=utf-8')
return HttpResponse(status=404)
@staff_only
@switch_is_active('speedbar:trace')
@json_response
def trace(request, trace_id):
trace = cache.get(TRACE_PREFIX + trace_id)
if trace:
return trace
return HttpResponse(status=404)
|
<commit_before>from django.http import HttpResponse
from django.core.cache import cache
from mixcloud.utils.decorators import staff_only
from mixcloud.speedbar.utils import DETAILS_PREFIX, TRACE_PREFIX
from gargoyle.decorators import switch_is_active
import json
@staff_only
@switch_is_active('speedbar:panel')
def panel(request, trace_id):
details = cache.get(DETAILS_PREFIX + trace_id)
if details:
details_json = json.dumps(details, skipkeys=True, default=repr, indent=2)
return HttpResponse(content=details_json)
return HttpResponse(status=404)
@staff_only
@switch_is_active('speedbar:trace')
def trace(request, trace_id):
trace = cache.get(TRACE_PREFIX + trace_id)
if trace:
trace_json = json.dumps(trace, indent=2)
return HttpResponse(content=trace_json, mimetype="application/json")
return HttpResponse(status=404)
<commit_msg>Use json decorator where appropriate<commit_after>
|
from django.http import HttpResponse
from django.core.cache import cache
from mixcloud.utils.decorators import staff_only
from mixcloud.speedbar.utils import DETAILS_PREFIX, TRACE_PREFIX
from mixcloud.utils.decorators import json_response
from gargoyle.decorators import switch_is_active
import json
@staff_only
@switch_is_active('speedbar:panel')
def panel(request, trace_id):
details = cache.get(DETAILS_PREFIX + trace_id)
if details:
details_json = json.dumps(details, skipkeys=True, default=repr, indent=2) # Cannot use decorator as need default=repr
return HttpResponse(content=details_json, mimetype='text/javascript; charset=utf-8')
return HttpResponse(status=404)
@staff_only
@switch_is_active('speedbar:trace')
@json_response
def trace(request, trace_id):
trace = cache.get(TRACE_PREFIX + trace_id)
if trace:
return trace
return HttpResponse(status=404)
|
from django.http import HttpResponse
from django.core.cache import cache
from mixcloud.utils.decorators import staff_only
from mixcloud.speedbar.utils import DETAILS_PREFIX, TRACE_PREFIX
from gargoyle.decorators import switch_is_active
import json
@staff_only
@switch_is_active('speedbar:panel')
def panel(request, trace_id):
details = cache.get(DETAILS_PREFIX + trace_id)
if details:
details_json = json.dumps(details, skipkeys=True, default=repr, indent=2)
return HttpResponse(content=details_json)
return HttpResponse(status=404)
@staff_only
@switch_is_active('speedbar:trace')
def trace(request, trace_id):
trace = cache.get(TRACE_PREFIX + trace_id)
if trace:
trace_json = json.dumps(trace, indent=2)
return HttpResponse(content=trace_json, mimetype="application/json")
return HttpResponse(status=404)
Use json decorator where appropriatefrom django.http import HttpResponse
from django.core.cache import cache
from mixcloud.utils.decorators import staff_only
from mixcloud.speedbar.utils import DETAILS_PREFIX, TRACE_PREFIX
from mixcloud.utils.decorators import json_response
from gargoyle.decorators import switch_is_active
import json
@staff_only
@switch_is_active('speedbar:panel')
def panel(request, trace_id):
details = cache.get(DETAILS_PREFIX + trace_id)
if details:
details_json = json.dumps(details, skipkeys=True, default=repr, indent=2) # Cannot use decorator as need default=repr
return HttpResponse(content=details_json, mimetype='text/javascript; charset=utf-8')
return HttpResponse(status=404)
@staff_only
@switch_is_active('speedbar:trace')
@json_response
def trace(request, trace_id):
trace = cache.get(TRACE_PREFIX + trace_id)
if trace:
return trace
return HttpResponse(status=404)
|
<commit_before>from django.http import HttpResponse
from django.core.cache import cache
from mixcloud.utils.decorators import staff_only
from mixcloud.speedbar.utils import DETAILS_PREFIX, TRACE_PREFIX
from gargoyle.decorators import switch_is_active
import json
@staff_only
@switch_is_active('speedbar:panel')
def panel(request, trace_id):
details = cache.get(DETAILS_PREFIX + trace_id)
if details:
details_json = json.dumps(details, skipkeys=True, default=repr, indent=2)
return HttpResponse(content=details_json)
return HttpResponse(status=404)
@staff_only
@switch_is_active('speedbar:trace')
def trace(request, trace_id):
trace = cache.get(TRACE_PREFIX + trace_id)
if trace:
trace_json = json.dumps(trace, indent=2)
return HttpResponse(content=trace_json, mimetype="application/json")
return HttpResponse(status=404)
<commit_msg>Use json decorator where appropriate<commit_after>from django.http import HttpResponse
from django.core.cache import cache
from mixcloud.utils.decorators import staff_only
from mixcloud.speedbar.utils import DETAILS_PREFIX, TRACE_PREFIX
from mixcloud.utils.decorators import json_response
from gargoyle.decorators import switch_is_active
import json
@staff_only
@switch_is_active('speedbar:panel')
def panel(request, trace_id):
details = cache.get(DETAILS_PREFIX + trace_id)
if details:
details_json = json.dumps(details, skipkeys=True, default=repr, indent=2) # Cannot use decorator as need default=repr
return HttpResponse(content=details_json, mimetype='text/javascript; charset=utf-8')
return HttpResponse(status=404)
@staff_only
@switch_is_active('speedbar:trace')
@json_response
def trace(request, trace_id):
trace = cache.get(TRACE_PREFIX + trace_id)
if trace:
return trace
return HttpResponse(status=404)
|
64671712fb465a9e940484a5f2f4b8d673aaee75
|
words.py
|
words.py
|
"""Function to fetch words."""
import random
WORDLIST = 'wordlist.txt'
def get_random_word(min_word_length):
"""Get a random word from the wordlist using no extra memory."""
num_words_processed = 0
curr_word = None
with open(WORDLIST, 'r') as f:
for word in f:
if len(word) < min_word_length:
continue
num_words_processed += 1
if random.randint(1, num_words_processed) == 1:
curr_word = word
return curr_word
|
"""Function to fetch words."""
import random
WORDLIST = 'wordlist.txt'
def get_random_word(min_word_length):
"""Get a random word from the wordlist using no extra memory."""
num_words_processed = 0
curr_word = None
with open(WORDLIST, 'r') as f:
for word in f:
word = word.strip().lower()
if len(word) < min_word_length:
continue
num_words_processed += 1
if random.randint(1, num_words_processed) == 1:
curr_word = word
return curr_word
|
Enforce lowercase on word selection
|
Enforce lowercase on word selection
|
Python
|
mit
|
andrewyang96/HangmanGame
|
"""Function to fetch words."""
import random
WORDLIST = 'wordlist.txt'
def get_random_word(min_word_length):
"""Get a random word from the wordlist using no extra memory."""
num_words_processed = 0
curr_word = None
with open(WORDLIST, 'r') as f:
for word in f:
if len(word) < min_word_length:
continue
num_words_processed += 1
if random.randint(1, num_words_processed) == 1:
curr_word = word
return curr_word
Enforce lowercase on word selection
|
"""Function to fetch words."""
import random
WORDLIST = 'wordlist.txt'
def get_random_word(min_word_length):
"""Get a random word from the wordlist using no extra memory."""
num_words_processed = 0
curr_word = None
with open(WORDLIST, 'r') as f:
for word in f:
word = word.strip().lower()
if len(word) < min_word_length:
continue
num_words_processed += 1
if random.randint(1, num_words_processed) == 1:
curr_word = word
return curr_word
|
<commit_before>"""Function to fetch words."""
import random
WORDLIST = 'wordlist.txt'
def get_random_word(min_word_length):
"""Get a random word from the wordlist using no extra memory."""
num_words_processed = 0
curr_word = None
with open(WORDLIST, 'r') as f:
for word in f:
if len(word) < min_word_length:
continue
num_words_processed += 1
if random.randint(1, num_words_processed) == 1:
curr_word = word
return curr_word
<commit_msg>Enforce lowercase on word selection<commit_after>
|
"""Function to fetch words."""
import random
WORDLIST = 'wordlist.txt'
def get_random_word(min_word_length):
"""Get a random word from the wordlist using no extra memory."""
num_words_processed = 0
curr_word = None
with open(WORDLIST, 'r') as f:
for word in f:
word = word.strip().lower()
if len(word) < min_word_length:
continue
num_words_processed += 1
if random.randint(1, num_words_processed) == 1:
curr_word = word
return curr_word
|
"""Function to fetch words."""
import random
WORDLIST = 'wordlist.txt'
def get_random_word(min_word_length):
"""Get a random word from the wordlist using no extra memory."""
num_words_processed = 0
curr_word = None
with open(WORDLIST, 'r') as f:
for word in f:
if len(word) < min_word_length:
continue
num_words_processed += 1
if random.randint(1, num_words_processed) == 1:
curr_word = word
return curr_word
Enforce lowercase on word selection"""Function to fetch words."""
import random
WORDLIST = 'wordlist.txt'
def get_random_word(min_word_length):
"""Get a random word from the wordlist using no extra memory."""
num_words_processed = 0
curr_word = None
with open(WORDLIST, 'r') as f:
for word in f:
word = word.strip().lower()
if len(word) < min_word_length:
continue
num_words_processed += 1
if random.randint(1, num_words_processed) == 1:
curr_word = word
return curr_word
|
<commit_before>"""Function to fetch words."""
import random
WORDLIST = 'wordlist.txt'
def get_random_word(min_word_length):
"""Get a random word from the wordlist using no extra memory."""
num_words_processed = 0
curr_word = None
with open(WORDLIST, 'r') as f:
for word in f:
if len(word) < min_word_length:
continue
num_words_processed += 1
if random.randint(1, num_words_processed) == 1:
curr_word = word
return curr_word
<commit_msg>Enforce lowercase on word selection<commit_after>"""Function to fetch words."""
import random
WORDLIST = 'wordlist.txt'
def get_random_word(min_word_length):
"""Get a random word from the wordlist using no extra memory."""
num_words_processed = 0
curr_word = None
with open(WORDLIST, 'r') as f:
for word in f:
word = word.strip().lower()
if len(word) < min_word_length:
continue
num_words_processed += 1
if random.randint(1, num_words_processed) == 1:
curr_word = word
return curr_word
|
2f98576b4b4fe5ec55c4125e2b9105dbef4e5900
|
hedonist/config.py
|
hedonist/config.py
|
"""Configuration data for training or evaluating a reinforcement learning
agent.
"""
import agents
def get_config():
config = {
'game': 'BreakoutDeterministic-v3',
'agent_type': agents.DeepQLearner,
'history_length': 4,
'training_steps': 50000000,
'training_freq': 4,
'num_eval_episodes': 30,
'max_steps_per_eval_episode': 135000,
'eval_freq': 150000,
'initial_replay_size': 50000,
'epsilon_start': 1.0,
'epsilon_end': 0.1,
'epsilon_decay_steps': 1000000,
'eval_epsilon': 0.05,
'screen_dims': (84, 84),
'reward_processing': 'clip',
'discount_factor': 0.99,
'learning_rate': 0.00025,
'rms_scale': 0.95,
'rms_constant': 0.01,
'error_clipping': 1.0,
'target_update_freq': 10000,
'memory_capacity': 1000000,
'batch_size': 32,
'summary_freq': 50000,
}
return config
|
"""Configuration data for training or evaluating a reinforcement learning
agent.
"""
import agents
def get_config():
config = {
'game': 'BreakoutDeterministic-v3',
'agent_type': agents.DoubleDeepQLearner,
'history_length': 4,
'training_steps': 50000000,
'training_freq': 4,
'num_eval_episodes': 30,
'max_steps_per_eval_episode': 135000,
'eval_freq': 150000,
'initial_replay_size': 50000,
'epsilon_start': 1.0,
'epsilon_end': 0.01,
'epsilon_decay_steps': 1000000,
'eval_epsilon': 0.001,
'screen_dims': (84, 84),
'reward_processing': 'clip',
'discount_factor': 0.99,
'learning_rate': 0.00025,
'rms_scale': 0.95,
'rms_constant': 0.01,
'error_clipping': 1.0,
'target_update_freq': 30000,
'memory_capacity': 1000000,
'batch_size': 32,
'summary_freq': 50000,
}
return config
|
Switch to Double DQN as the default algorithm.
|
Switch to Double DQN as the default algorithm.
|
Python
|
mit
|
nerdoid/hedonist
|
"""Configuration data for training or evaluating a reinforcement learning
agent.
"""
import agents
def get_config():
config = {
'game': 'BreakoutDeterministic-v3',
'agent_type': agents.DeepQLearner,
'history_length': 4,
'training_steps': 50000000,
'training_freq': 4,
'num_eval_episodes': 30,
'max_steps_per_eval_episode': 135000,
'eval_freq': 150000,
'initial_replay_size': 50000,
'epsilon_start': 1.0,
'epsilon_end': 0.1,
'epsilon_decay_steps': 1000000,
'eval_epsilon': 0.05,
'screen_dims': (84, 84),
'reward_processing': 'clip',
'discount_factor': 0.99,
'learning_rate': 0.00025,
'rms_scale': 0.95,
'rms_constant': 0.01,
'error_clipping': 1.0,
'target_update_freq': 10000,
'memory_capacity': 1000000,
'batch_size': 32,
'summary_freq': 50000,
}
return config
Switch to Double DQN as the default algorithm.
|
"""Configuration data for training or evaluating a reinforcement learning
agent.
"""
import agents
def get_config():
config = {
'game': 'BreakoutDeterministic-v3',
'agent_type': agents.DoubleDeepQLearner,
'history_length': 4,
'training_steps': 50000000,
'training_freq': 4,
'num_eval_episodes': 30,
'max_steps_per_eval_episode': 135000,
'eval_freq': 150000,
'initial_replay_size': 50000,
'epsilon_start': 1.0,
'epsilon_end': 0.01,
'epsilon_decay_steps': 1000000,
'eval_epsilon': 0.001,
'screen_dims': (84, 84),
'reward_processing': 'clip',
'discount_factor': 0.99,
'learning_rate': 0.00025,
'rms_scale': 0.95,
'rms_constant': 0.01,
'error_clipping': 1.0,
'target_update_freq': 30000,
'memory_capacity': 1000000,
'batch_size': 32,
'summary_freq': 50000,
}
return config
|
<commit_before>"""Configuration data for training or evaluating a reinforcement learning
agent.
"""
import agents
def get_config():
config = {
'game': 'BreakoutDeterministic-v3',
'agent_type': agents.DeepQLearner,
'history_length': 4,
'training_steps': 50000000,
'training_freq': 4,
'num_eval_episodes': 30,
'max_steps_per_eval_episode': 135000,
'eval_freq': 150000,
'initial_replay_size': 50000,
'epsilon_start': 1.0,
'epsilon_end': 0.1,
'epsilon_decay_steps': 1000000,
'eval_epsilon': 0.05,
'screen_dims': (84, 84),
'reward_processing': 'clip',
'discount_factor': 0.99,
'learning_rate': 0.00025,
'rms_scale': 0.95,
'rms_constant': 0.01,
'error_clipping': 1.0,
'target_update_freq': 10000,
'memory_capacity': 1000000,
'batch_size': 32,
'summary_freq': 50000,
}
return config
<commit_msg>Switch to Double DQN as the default algorithm.<commit_after>
|
"""Configuration data for training or evaluating a reinforcement learning
agent.
"""
import agents
def get_config():
config = {
'game': 'BreakoutDeterministic-v3',
'agent_type': agents.DoubleDeepQLearner,
'history_length': 4,
'training_steps': 50000000,
'training_freq': 4,
'num_eval_episodes': 30,
'max_steps_per_eval_episode': 135000,
'eval_freq': 150000,
'initial_replay_size': 50000,
'epsilon_start': 1.0,
'epsilon_end': 0.01,
'epsilon_decay_steps': 1000000,
'eval_epsilon': 0.001,
'screen_dims': (84, 84),
'reward_processing': 'clip',
'discount_factor': 0.99,
'learning_rate': 0.00025,
'rms_scale': 0.95,
'rms_constant': 0.01,
'error_clipping': 1.0,
'target_update_freq': 30000,
'memory_capacity': 1000000,
'batch_size': 32,
'summary_freq': 50000,
}
return config
|
"""Configuration data for training or evaluating a reinforcement learning
agent.
"""
import agents
def get_config():
config = {
'game': 'BreakoutDeterministic-v3',
'agent_type': agents.DeepQLearner,
'history_length': 4,
'training_steps': 50000000,
'training_freq': 4,
'num_eval_episodes': 30,
'max_steps_per_eval_episode': 135000,
'eval_freq': 150000,
'initial_replay_size': 50000,
'epsilon_start': 1.0,
'epsilon_end': 0.1,
'epsilon_decay_steps': 1000000,
'eval_epsilon': 0.05,
'screen_dims': (84, 84),
'reward_processing': 'clip',
'discount_factor': 0.99,
'learning_rate': 0.00025,
'rms_scale': 0.95,
'rms_constant': 0.01,
'error_clipping': 1.0,
'target_update_freq': 10000,
'memory_capacity': 1000000,
'batch_size': 32,
'summary_freq': 50000,
}
return config
Switch to Double DQN as the default algorithm."""Configuration data for training or evaluating a reinforcement learning
agent.
"""
import agents
def get_config():
config = {
'game': 'BreakoutDeterministic-v3',
'agent_type': agents.DoubleDeepQLearner,
'history_length': 4,
'training_steps': 50000000,
'training_freq': 4,
'num_eval_episodes': 30,
'max_steps_per_eval_episode': 135000,
'eval_freq': 150000,
'initial_replay_size': 50000,
'epsilon_start': 1.0,
'epsilon_end': 0.01,
'epsilon_decay_steps': 1000000,
'eval_epsilon': 0.001,
'screen_dims': (84, 84),
'reward_processing': 'clip',
'discount_factor': 0.99,
'learning_rate': 0.00025,
'rms_scale': 0.95,
'rms_constant': 0.01,
'error_clipping': 1.0,
'target_update_freq': 30000,
'memory_capacity': 1000000,
'batch_size': 32,
'summary_freq': 50000,
}
return config
|
<commit_before>"""Configuration data for training or evaluating a reinforcement learning
agent.
"""
import agents
def get_config():
config = {
'game': 'BreakoutDeterministic-v3',
'agent_type': agents.DeepQLearner,
'history_length': 4,
'training_steps': 50000000,
'training_freq': 4,
'num_eval_episodes': 30,
'max_steps_per_eval_episode': 135000,
'eval_freq': 150000,
'initial_replay_size': 50000,
'epsilon_start': 1.0,
'epsilon_end': 0.1,
'epsilon_decay_steps': 1000000,
'eval_epsilon': 0.05,
'screen_dims': (84, 84),
'reward_processing': 'clip',
'discount_factor': 0.99,
'learning_rate': 0.00025,
'rms_scale': 0.95,
'rms_constant': 0.01,
'error_clipping': 1.0,
'target_update_freq': 10000,
'memory_capacity': 1000000,
'batch_size': 32,
'summary_freq': 50000,
}
return config
<commit_msg>Switch to Double DQN as the default algorithm.<commit_after>"""Configuration data for training or evaluating a reinforcement learning
agent.
"""
import agents
def get_config():
config = {
'game': 'BreakoutDeterministic-v3',
'agent_type': agents.DoubleDeepQLearner,
'history_length': 4,
'training_steps': 50000000,
'training_freq': 4,
'num_eval_episodes': 30,
'max_steps_per_eval_episode': 135000,
'eval_freq': 150000,
'initial_replay_size': 50000,
'epsilon_start': 1.0,
'epsilon_end': 0.01,
'epsilon_decay_steps': 1000000,
'eval_epsilon': 0.001,
'screen_dims': (84, 84),
'reward_processing': 'clip',
'discount_factor': 0.99,
'learning_rate': 0.00025,
'rms_scale': 0.95,
'rms_constant': 0.01,
'error_clipping': 1.0,
'target_update_freq': 30000,
'memory_capacity': 1000000,
'batch_size': 32,
'summary_freq': 50000,
}
return config
|
9ae4ebf7e95cb301321911886cbb4041fae1eff6
|
bookmarks/search_indexes.py
|
bookmarks/search_indexes.py
|
from haystack.indexes import CharField, DateTimeField, RealTimeSearchIndex, Indexable
from models import Bookmark
class BookmarkIndex(RealTimeSearchIndex, Indexable):
text = CharField(document=True, use_template=True)
title = CharField(model_attr='description')
author = CharField(model_attr='adder')
pub_date = DateTimeField(model_attr='added')
summary = CharField(model_attr='note')
sites = CharField(model_attr='site_slugs')
def index_queryset(self):
"""Used when the entire index for model is updated."""
return self.get_model().objects.all()
def get_model(self):
return Bookmark
|
from haystack.indexes import CharField, DateTimeField, SearchIndex, Indexable
from models import Bookmark
class BookmarkIndex(SearchIndex, Indexable):
text = CharField(document=True, use_template=True)
title = CharField(model_attr='description')
author = CharField(model_attr='adder')
pub_date = DateTimeField(model_attr='added')
summary = CharField(model_attr='note')
sites = CharField(model_attr='site_slugs')
def index_queryset(self):
"""Used when the entire index for model is updated."""
return self.get_model().objects.all()
def get_model(self):
return Bookmark
|
Use `SearchIndex` instead of deprecated `RealTimeSearchIndex`.
|
Use `SearchIndex` instead of deprecated `RealTimeSearchIndex`.
|
Python
|
mit
|
incuna/incuna-bookmarks,incuna/incuna-bookmarks
|
from haystack.indexes import CharField, DateTimeField, RealTimeSearchIndex, Indexable
from models import Bookmark
class BookmarkIndex(RealTimeSearchIndex, Indexable):
text = CharField(document=True, use_template=True)
title = CharField(model_attr='description')
author = CharField(model_attr='adder')
pub_date = DateTimeField(model_attr='added')
summary = CharField(model_attr='note')
sites = CharField(model_attr='site_slugs')
def index_queryset(self):
"""Used when the entire index for model is updated."""
return self.get_model().objects.all()
def get_model(self):
return Bookmark
Use `SearchIndex` instead of deprecated `RealTimeSearchIndex`.
|
from haystack.indexes import CharField, DateTimeField, SearchIndex, Indexable
from models import Bookmark
class BookmarkIndex(SearchIndex, Indexable):
text = CharField(document=True, use_template=True)
title = CharField(model_attr='description')
author = CharField(model_attr='adder')
pub_date = DateTimeField(model_attr='added')
summary = CharField(model_attr='note')
sites = CharField(model_attr='site_slugs')
def index_queryset(self):
"""Used when the entire index for model is updated."""
return self.get_model().objects.all()
def get_model(self):
return Bookmark
|
<commit_before>from haystack.indexes import CharField, DateTimeField, RealTimeSearchIndex, Indexable
from models import Bookmark
class BookmarkIndex(RealTimeSearchIndex, Indexable):
text = CharField(document=True, use_template=True)
title = CharField(model_attr='description')
author = CharField(model_attr='adder')
pub_date = DateTimeField(model_attr='added')
summary = CharField(model_attr='note')
sites = CharField(model_attr='site_slugs')
def index_queryset(self):
"""Used when the entire index for model is updated."""
return self.get_model().objects.all()
def get_model(self):
return Bookmark
<commit_msg>Use `SearchIndex` instead of deprecated `RealTimeSearchIndex`.<commit_after>
|
from haystack.indexes import CharField, DateTimeField, SearchIndex, Indexable
from models import Bookmark
class BookmarkIndex(SearchIndex, Indexable):
text = CharField(document=True, use_template=True)
title = CharField(model_attr='description')
author = CharField(model_attr='adder')
pub_date = DateTimeField(model_attr='added')
summary = CharField(model_attr='note')
sites = CharField(model_attr='site_slugs')
def index_queryset(self):
"""Used when the entire index for model is updated."""
return self.get_model().objects.all()
def get_model(self):
return Bookmark
|
from haystack.indexes import CharField, DateTimeField, RealTimeSearchIndex, Indexable
from models import Bookmark
class BookmarkIndex(RealTimeSearchIndex, Indexable):
text = CharField(document=True, use_template=True)
title = CharField(model_attr='description')
author = CharField(model_attr='adder')
pub_date = DateTimeField(model_attr='added')
summary = CharField(model_attr='note')
sites = CharField(model_attr='site_slugs')
def index_queryset(self):
"""Used when the entire index for model is updated."""
return self.get_model().objects.all()
def get_model(self):
return Bookmark
Use `SearchIndex` instead of deprecated `RealTimeSearchIndex`.from haystack.indexes import CharField, DateTimeField, SearchIndex, Indexable
from models import Bookmark
class BookmarkIndex(SearchIndex, Indexable):
text = CharField(document=True, use_template=True)
title = CharField(model_attr='description')
author = CharField(model_attr='adder')
pub_date = DateTimeField(model_attr='added')
summary = CharField(model_attr='note')
sites = CharField(model_attr='site_slugs')
def index_queryset(self):
"""Used when the entire index for model is updated."""
return self.get_model().objects.all()
def get_model(self):
return Bookmark
|
<commit_before>from haystack.indexes import CharField, DateTimeField, RealTimeSearchIndex, Indexable
from models import Bookmark
class BookmarkIndex(RealTimeSearchIndex, Indexable):
text = CharField(document=True, use_template=True)
title = CharField(model_attr='description')
author = CharField(model_attr='adder')
pub_date = DateTimeField(model_attr='added')
summary = CharField(model_attr='note')
sites = CharField(model_attr='site_slugs')
def index_queryset(self):
"""Used when the entire index for model is updated."""
return self.get_model().objects.all()
def get_model(self):
return Bookmark
<commit_msg>Use `SearchIndex` instead of deprecated `RealTimeSearchIndex`.<commit_after>from haystack.indexes import CharField, DateTimeField, SearchIndex, Indexable
from models import Bookmark
class BookmarkIndex(SearchIndex, Indexable):
text = CharField(document=True, use_template=True)
title = CharField(model_attr='description')
author = CharField(model_attr='adder')
pub_date = DateTimeField(model_attr='added')
summary = CharField(model_attr='note')
sites = CharField(model_attr='site_slugs')
def index_queryset(self):
"""Used when the entire index for model is updated."""
return self.get_model().objects.all()
def get_model(self):
return Bookmark
|
8efd4b8661f5be47c04130de6d47c8b80c39454c
|
selvbetjening/core/events/management/commands/recalculate_attend_columns.py
|
selvbetjening/core/events/management/commands/recalculate_attend_columns.py
|
from django.core.management.base import NoArgsCommand
from selvbetjening.core.events.models import Attend
class Command(NoArgsCommand):
def handle_noargs(self, **options):
attendees = Attend.objects.select_related().prefetch_related('selection_set')
for attendee in attendees:
attendee.recalculate_price()
Attend.objects.recalculate_aggregations_paid(attendees)
|
from django.core.management.base import NoArgsCommand
class Command(NoArgsCommand):
def handle_noargs(self, **options):
from selvbetjening.core.events.models import Attend
attendees = Attend.objects.select_related().prefetch_related('selection_set')
for attendee in attendees:
attendee.recalculate_price()
Attend.objects.recalculate_aggregations_paid(attendees)
|
Fix import that crashed the system under certain conditions
|
Fix import that crashed the system under certain conditions
|
Python
|
mit
|
animekita/selvbetjening,animekita/selvbetjening,animekita/selvbetjening,animekita/selvbetjening
|
from django.core.management.base import NoArgsCommand
from selvbetjening.core.events.models import Attend
class Command(NoArgsCommand):
def handle_noargs(self, **options):
attendees = Attend.objects.select_related().prefetch_related('selection_set')
for attendee in attendees:
attendee.recalculate_price()
Attend.objects.recalculate_aggregations_paid(attendees)
Fix import that crashed the system under certain conditions
|
from django.core.management.base import NoArgsCommand
class Command(NoArgsCommand):
def handle_noargs(self, **options):
from selvbetjening.core.events.models import Attend
attendees = Attend.objects.select_related().prefetch_related('selection_set')
for attendee in attendees:
attendee.recalculate_price()
Attend.objects.recalculate_aggregations_paid(attendees)
|
<commit_before>
from django.core.management.base import NoArgsCommand
from selvbetjening.core.events.models import Attend
class Command(NoArgsCommand):
def handle_noargs(self, **options):
attendees = Attend.objects.select_related().prefetch_related('selection_set')
for attendee in attendees:
attendee.recalculate_price()
Attend.objects.recalculate_aggregations_paid(attendees)
<commit_msg>Fix import that crashed the system under certain conditions<commit_after>
|
from django.core.management.base import NoArgsCommand
class Command(NoArgsCommand):
def handle_noargs(self, **options):
from selvbetjening.core.events.models import Attend
attendees = Attend.objects.select_related().prefetch_related('selection_set')
for attendee in attendees:
attendee.recalculate_price()
Attend.objects.recalculate_aggregations_paid(attendees)
|
from django.core.management.base import NoArgsCommand
from selvbetjening.core.events.models import Attend
class Command(NoArgsCommand):
def handle_noargs(self, **options):
attendees = Attend.objects.select_related().prefetch_related('selection_set')
for attendee in attendees:
attendee.recalculate_price()
Attend.objects.recalculate_aggregations_paid(attendees)
Fix import that crashed the system under certain conditions
from django.core.management.base import NoArgsCommand
class Command(NoArgsCommand):
def handle_noargs(self, **options):
from selvbetjening.core.events.models import Attend
attendees = Attend.objects.select_related().prefetch_related('selection_set')
for attendee in attendees:
attendee.recalculate_price()
Attend.objects.recalculate_aggregations_paid(attendees)
|
<commit_before>
from django.core.management.base import NoArgsCommand
from selvbetjening.core.events.models import Attend
class Command(NoArgsCommand):
def handle_noargs(self, **options):
attendees = Attend.objects.select_related().prefetch_related('selection_set')
for attendee in attendees:
attendee.recalculate_price()
Attend.objects.recalculate_aggregations_paid(attendees)
<commit_msg>Fix import that crashed the system under certain conditions<commit_after>
from django.core.management.base import NoArgsCommand
class Command(NoArgsCommand):
def handle_noargs(self, **options):
from selvbetjening.core.events.models import Attend
attendees = Attend.objects.select_related().prefetch_related('selection_set')
for attendee in attendees:
attendee.recalculate_price()
Attend.objects.recalculate_aggregations_paid(attendees)
|
e4dd3e6c260ab446ca15b203dd5628f3b300887e
|
submit_awcy.py
|
submit_awcy.py
|
#!/usr/bin/env python
from __future__ import print_function
import requests
import argparse
import os
import subprocess
import sys
if 'DAALA_ROOT' not in os.environ:
print("Please specify the DAALA_ROOT environment variable to use this tool.")
sys.exit(1)
keyfile = open('secret_key','r')
key = keyfile.read().strip()
daala_root = os.environ['DAALA_ROOT']
os.chdir(daala_root)
parser = argparse.ArgumentParser(description='Submit test to arewecompressedyet.com')
parser.add_argument('-prefix',default=os.getlogin())
args = parser.parse_args()
commit = subprocess.check_output('git rev-parse HEAD',shell=True).strip()
short = subprocess.check_output('git rev-parse --short HEAD',shell=True).strip()
date = subprocess.check_output(['git','show','-s','--format=%ci',commit]).strip()
date_short = date.split()[0];
user = args.prefix
run_id = user+'-'+date_short+'-'+short
print('Creating run '+run_id)
r = requests.post("https://arewecompressedyet.com/submit/job", {'run_id': run_id, 'commit': commit, 'key': key})
print(r)
|
#!/usr/bin/env python
from __future__ import print_function
import requests
import argparse
import os
import subprocess
import sys
if 'DAALA_ROOT' not in os.environ:
print("Please specify the DAALA_ROOT environment variable to use this tool.")
sys.exit(1)
keyfile = open('secret_key','r')
key = keyfile.read().strip()
daala_root = os.environ['DAALA_ROOT']
os.chdir(daala_root)
branch = subprocess.check_output('git symbolic-ref -q --short HEAD',shell=True).strip()
parser = argparse.ArgumentParser(description='Submit test to arewecompressedyet.com')
parser.add_argument('-prefix',default=branch)
args = parser.parse_args()
commit = subprocess.check_output('git rev-parse HEAD',shell=True).strip()
short = subprocess.check_output('git rev-parse --short HEAD',shell=True).strip()
date = subprocess.check_output(['git','show','-s','--format=%ci',commit]).strip()
date_short = date.split()[0];
user = args.prefix
run_id = user+'-'+date_short+'-'+short
print('Creating run '+run_id)
r = requests.post("https://arewecompressedyet.com/submit/job", {'run_id': run_id, 'commit': commit, 'key': key})
print(r)
|
Use branch name instead of username
|
Use branch name instead of username
|
Python
|
mit
|
mdinger/awcy,mdinger/awcy,tdaede/awcy,mdinger/awcy,tdaede/awcy,mdinger/awcy,tdaede/awcy,tdaede/awcy,tdaede/awcy,mdinger/awcy,tdaede/awcy
|
#!/usr/bin/env python
from __future__ import print_function
import requests
import argparse
import os
import subprocess
import sys
if 'DAALA_ROOT' not in os.environ:
print("Please specify the DAALA_ROOT environment variable to use this tool.")
sys.exit(1)
keyfile = open('secret_key','r')
key = keyfile.read().strip()
daala_root = os.environ['DAALA_ROOT']
os.chdir(daala_root)
parser = argparse.ArgumentParser(description='Submit test to arewecompressedyet.com')
parser.add_argument('-prefix',default=os.getlogin())
args = parser.parse_args()
commit = subprocess.check_output('git rev-parse HEAD',shell=True).strip()
short = subprocess.check_output('git rev-parse --short HEAD',shell=True).strip()
date = subprocess.check_output(['git','show','-s','--format=%ci',commit]).strip()
date_short = date.split()[0];
user = args.prefix
run_id = user+'-'+date_short+'-'+short
print('Creating run '+run_id)
r = requests.post("https://arewecompressedyet.com/submit/job", {'run_id': run_id, 'commit': commit, 'key': key})
print(r)
Use branch name instead of username
|
#!/usr/bin/env python
from __future__ import print_function
import requests
import argparse
import os
import subprocess
import sys
if 'DAALA_ROOT' not in os.environ:
print("Please specify the DAALA_ROOT environment variable to use this tool.")
sys.exit(1)
keyfile = open('secret_key','r')
key = keyfile.read().strip()
daala_root = os.environ['DAALA_ROOT']
os.chdir(daala_root)
branch = subprocess.check_output('git symbolic-ref -q --short HEAD',shell=True).strip()
parser = argparse.ArgumentParser(description='Submit test to arewecompressedyet.com')
parser.add_argument('-prefix',default=branch)
args = parser.parse_args()
commit = subprocess.check_output('git rev-parse HEAD',shell=True).strip()
short = subprocess.check_output('git rev-parse --short HEAD',shell=True).strip()
date = subprocess.check_output(['git','show','-s','--format=%ci',commit]).strip()
date_short = date.split()[0];
user = args.prefix
run_id = user+'-'+date_short+'-'+short
print('Creating run '+run_id)
r = requests.post("https://arewecompressedyet.com/submit/job", {'run_id': run_id, 'commit': commit, 'key': key})
print(r)
|
<commit_before>#!/usr/bin/env python
from __future__ import print_function
import requests
import argparse
import os
import subprocess
import sys
if 'DAALA_ROOT' not in os.environ:
print("Please specify the DAALA_ROOT environment variable to use this tool.")
sys.exit(1)
keyfile = open('secret_key','r')
key = keyfile.read().strip()
daala_root = os.environ['DAALA_ROOT']
os.chdir(daala_root)
parser = argparse.ArgumentParser(description='Submit test to arewecompressedyet.com')
parser.add_argument('-prefix',default=os.getlogin())
args = parser.parse_args()
commit = subprocess.check_output('git rev-parse HEAD',shell=True).strip()
short = subprocess.check_output('git rev-parse --short HEAD',shell=True).strip()
date = subprocess.check_output(['git','show','-s','--format=%ci',commit]).strip()
date_short = date.split()[0];
user = args.prefix
run_id = user+'-'+date_short+'-'+short
print('Creating run '+run_id)
r = requests.post("https://arewecompressedyet.com/submit/job", {'run_id': run_id, 'commit': commit, 'key': key})
print(r)
<commit_msg>Use branch name instead of username<commit_after>
|
#!/usr/bin/env python
from __future__ import print_function
import requests
import argparse
import os
import subprocess
import sys
if 'DAALA_ROOT' not in os.environ:
print("Please specify the DAALA_ROOT environment variable to use this tool.")
sys.exit(1)
keyfile = open('secret_key','r')
key = keyfile.read().strip()
daala_root = os.environ['DAALA_ROOT']
os.chdir(daala_root)
branch = subprocess.check_output('git symbolic-ref -q --short HEAD',shell=True).strip()
parser = argparse.ArgumentParser(description='Submit test to arewecompressedyet.com')
parser.add_argument('-prefix',default=branch)
args = parser.parse_args()
commit = subprocess.check_output('git rev-parse HEAD',shell=True).strip()
short = subprocess.check_output('git rev-parse --short HEAD',shell=True).strip()
date = subprocess.check_output(['git','show','-s','--format=%ci',commit]).strip()
date_short = date.split()[0];
user = args.prefix
run_id = user+'-'+date_short+'-'+short
print('Creating run '+run_id)
r = requests.post("https://arewecompressedyet.com/submit/job", {'run_id': run_id, 'commit': commit, 'key': key})
print(r)
|
#!/usr/bin/env python
from __future__ import print_function
import requests
import argparse
import os
import subprocess
import sys
if 'DAALA_ROOT' not in os.environ:
print("Please specify the DAALA_ROOT environment variable to use this tool.")
sys.exit(1)
keyfile = open('secret_key','r')
key = keyfile.read().strip()
daala_root = os.environ['DAALA_ROOT']
os.chdir(daala_root)
parser = argparse.ArgumentParser(description='Submit test to arewecompressedyet.com')
parser.add_argument('-prefix',default=os.getlogin())
args = parser.parse_args()
commit = subprocess.check_output('git rev-parse HEAD',shell=True).strip()
short = subprocess.check_output('git rev-parse --short HEAD',shell=True).strip()
date = subprocess.check_output(['git','show','-s','--format=%ci',commit]).strip()
date_short = date.split()[0];
user = args.prefix
run_id = user+'-'+date_short+'-'+short
print('Creating run '+run_id)
r = requests.post("https://arewecompressedyet.com/submit/job", {'run_id': run_id, 'commit': commit, 'key': key})
print(r)
Use branch name instead of username#!/usr/bin/env python
from __future__ import print_function
import requests
import argparse
import os
import subprocess
import sys
if 'DAALA_ROOT' not in os.environ:
print("Please specify the DAALA_ROOT environment variable to use this tool.")
sys.exit(1)
keyfile = open('secret_key','r')
key = keyfile.read().strip()
daala_root = os.environ['DAALA_ROOT']
os.chdir(daala_root)
branch = subprocess.check_output('git symbolic-ref -q --short HEAD',shell=True).strip()
parser = argparse.ArgumentParser(description='Submit test to arewecompressedyet.com')
parser.add_argument('-prefix',default=branch)
args = parser.parse_args()
commit = subprocess.check_output('git rev-parse HEAD',shell=True).strip()
short = subprocess.check_output('git rev-parse --short HEAD',shell=True).strip()
date = subprocess.check_output(['git','show','-s','--format=%ci',commit]).strip()
date_short = date.split()[0];
user = args.prefix
run_id = user+'-'+date_short+'-'+short
print('Creating run '+run_id)
r = requests.post("https://arewecompressedyet.com/submit/job", {'run_id': run_id, 'commit': commit, 'key': key})
print(r)
|
<commit_before>#!/usr/bin/env python
from __future__ import print_function
import requests
import argparse
import os
import subprocess
import sys
if 'DAALA_ROOT' not in os.environ:
print("Please specify the DAALA_ROOT environment variable to use this tool.")
sys.exit(1)
keyfile = open('secret_key','r')
key = keyfile.read().strip()
daala_root = os.environ['DAALA_ROOT']
os.chdir(daala_root)
parser = argparse.ArgumentParser(description='Submit test to arewecompressedyet.com')
parser.add_argument('-prefix',default=os.getlogin())
args = parser.parse_args()
commit = subprocess.check_output('git rev-parse HEAD',shell=True).strip()
short = subprocess.check_output('git rev-parse --short HEAD',shell=True).strip()
date = subprocess.check_output(['git','show','-s','--format=%ci',commit]).strip()
date_short = date.split()[0];
user = args.prefix
run_id = user+'-'+date_short+'-'+short
print('Creating run '+run_id)
r = requests.post("https://arewecompressedyet.com/submit/job", {'run_id': run_id, 'commit': commit, 'key': key})
print(r)
<commit_msg>Use branch name instead of username<commit_after>#!/usr/bin/env python
from __future__ import print_function
import requests
import argparse
import os
import subprocess
import sys
if 'DAALA_ROOT' not in os.environ:
print("Please specify the DAALA_ROOT environment variable to use this tool.")
sys.exit(1)
keyfile = open('secret_key','r')
key = keyfile.read().strip()
daala_root = os.environ['DAALA_ROOT']
os.chdir(daala_root)
branch = subprocess.check_output('git symbolic-ref -q --short HEAD',shell=True).strip()
parser = argparse.ArgumentParser(description='Submit test to arewecompressedyet.com')
parser.add_argument('-prefix',default=branch)
args = parser.parse_args()
commit = subprocess.check_output('git rev-parse HEAD',shell=True).strip()
short = subprocess.check_output('git rev-parse --short HEAD',shell=True).strip()
date = subprocess.check_output(['git','show','-s','--format=%ci',commit]).strip()
date_short = date.split()[0];
user = args.prefix
run_id = user+'-'+date_short+'-'+short
print('Creating run '+run_id)
r = requests.post("https://arewecompressedyet.com/submit/job", {'run_id': run_id, 'commit': commit, 'key': key})
print(r)
|
95ef52b3c80d6f639ddd988ecd209057250fef1b
|
tags/fields.py
|
tags/fields.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db.models.fields import CharField
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from tags.models import Tag
@python_2_unicode_compatible
class TagField(CharField):
def __init__(self,
verbose_name=_('Tags'),
max_length=4000,
blank=True,
null=True,
help_text=_('A comma-separated list of tags.'),
**kwargs):
kwargs['max_length'] = max_length
kwargs['blank'] = blank
kwargs['null'] = null
kwargs['verbose_name'] = verbose_name
kwargs['help_text'] = help_text
self.max_length = max_length
self.blank = blank
self.null = null
self.verbose_name = verbose_name
self.help_text = help_text
CharField.__init__(self, **kwargs)
def pre_save(self, model_instance, add):
str_tags = getattr(model_instance, self.name)
if str_tags:
tags = set(str_tags.split(','))
for tag in tags:
Tag.objects.get_or_create(name=tag)
return ','.join(tags)
return super(TagField, self).pre_save(model_instance, add)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db.models.fields import CharField
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from tags.models import Tag
@python_2_unicode_compatible
class TagField(CharField):
def __init__(self,
verbose_name=_('Tags'),
max_length=4000,
blank=True,
null=True,
help_text=_('A comma-separated list of tags.'),
**kwargs):
kwargs['max_length'] = max_length
kwargs['blank'] = blank
kwargs['null'] = null
kwargs['verbose_name'] = verbose_name
kwargs['help_text'] = help_text
self.max_length = max_length
self.blank = blank
self.null = null
self.verbose_name = verbose_name
self.help_text = help_text
CharField.__init__(self, **kwargs)
def pre_save(self, model_instance, add):
str_tags = getattr(model_instance, self.name)
if str_tags:
tags = set(str_tags.split(','))
for tag in tags:
Tag.objects.get_or_create(name=tag)
return ','.join(tags)
return super(TagField, self).pre_save(model_instance, add)
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^tags\.fields\.TagField"])
except:
pass
|
Make the TagField be instropectable by South
|
Make the TagField be instropectable by South
|
Python
|
mit
|
avelino/django-tags
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db.models.fields import CharField
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from tags.models import Tag
@python_2_unicode_compatible
class TagField(CharField):
def __init__(self,
verbose_name=_('Tags'),
max_length=4000,
blank=True,
null=True,
help_text=_('A comma-separated list of tags.'),
**kwargs):
kwargs['max_length'] = max_length
kwargs['blank'] = blank
kwargs['null'] = null
kwargs['verbose_name'] = verbose_name
kwargs['help_text'] = help_text
self.max_length = max_length
self.blank = blank
self.null = null
self.verbose_name = verbose_name
self.help_text = help_text
CharField.__init__(self, **kwargs)
def pre_save(self, model_instance, add):
str_tags = getattr(model_instance, self.name)
if str_tags:
tags = set(str_tags.split(','))
for tag in tags:
Tag.objects.get_or_create(name=tag)
return ','.join(tags)
return super(TagField, self).pre_save(model_instance, add)
Make the TagField be instropectable by South
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db.models.fields import CharField
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from tags.models import Tag
@python_2_unicode_compatible
class TagField(CharField):
def __init__(self,
verbose_name=_('Tags'),
max_length=4000,
blank=True,
null=True,
help_text=_('A comma-separated list of tags.'),
**kwargs):
kwargs['max_length'] = max_length
kwargs['blank'] = blank
kwargs['null'] = null
kwargs['verbose_name'] = verbose_name
kwargs['help_text'] = help_text
self.max_length = max_length
self.blank = blank
self.null = null
self.verbose_name = verbose_name
self.help_text = help_text
CharField.__init__(self, **kwargs)
def pre_save(self, model_instance, add):
str_tags = getattr(model_instance, self.name)
if str_tags:
tags = set(str_tags.split(','))
for tag in tags:
Tag.objects.get_or_create(name=tag)
return ','.join(tags)
return super(TagField, self).pre_save(model_instance, add)
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^tags\.fields\.TagField"])
except:
pass
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db.models.fields import CharField
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from tags.models import Tag
@python_2_unicode_compatible
class TagField(CharField):
def __init__(self,
verbose_name=_('Tags'),
max_length=4000,
blank=True,
null=True,
help_text=_('A comma-separated list of tags.'),
**kwargs):
kwargs['max_length'] = max_length
kwargs['blank'] = blank
kwargs['null'] = null
kwargs['verbose_name'] = verbose_name
kwargs['help_text'] = help_text
self.max_length = max_length
self.blank = blank
self.null = null
self.verbose_name = verbose_name
self.help_text = help_text
CharField.__init__(self, **kwargs)
def pre_save(self, model_instance, add):
str_tags = getattr(model_instance, self.name)
if str_tags:
tags = set(str_tags.split(','))
for tag in tags:
Tag.objects.get_or_create(name=tag)
return ','.join(tags)
return super(TagField, self).pre_save(model_instance, add)
<commit_msg>Make the TagField be instropectable by South<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db.models.fields import CharField
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from tags.models import Tag
@python_2_unicode_compatible
class TagField(CharField):
def __init__(self,
verbose_name=_('Tags'),
max_length=4000,
blank=True,
null=True,
help_text=_('A comma-separated list of tags.'),
**kwargs):
kwargs['max_length'] = max_length
kwargs['blank'] = blank
kwargs['null'] = null
kwargs['verbose_name'] = verbose_name
kwargs['help_text'] = help_text
self.max_length = max_length
self.blank = blank
self.null = null
self.verbose_name = verbose_name
self.help_text = help_text
CharField.__init__(self, **kwargs)
def pre_save(self, model_instance, add):
str_tags = getattr(model_instance, self.name)
if str_tags:
tags = set(str_tags.split(','))
for tag in tags:
Tag.objects.get_or_create(name=tag)
return ','.join(tags)
return super(TagField, self).pre_save(model_instance, add)
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^tags\.fields\.TagField"])
except:
pass
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db.models.fields import CharField
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from tags.models import Tag
@python_2_unicode_compatible
class TagField(CharField):
def __init__(self,
verbose_name=_('Tags'),
max_length=4000,
blank=True,
null=True,
help_text=_('A comma-separated list of tags.'),
**kwargs):
kwargs['max_length'] = max_length
kwargs['blank'] = blank
kwargs['null'] = null
kwargs['verbose_name'] = verbose_name
kwargs['help_text'] = help_text
self.max_length = max_length
self.blank = blank
self.null = null
self.verbose_name = verbose_name
self.help_text = help_text
CharField.__init__(self, **kwargs)
def pre_save(self, model_instance, add):
str_tags = getattr(model_instance, self.name)
if str_tags:
tags = set(str_tags.split(','))
for tag in tags:
Tag.objects.get_or_create(name=tag)
return ','.join(tags)
return super(TagField, self).pre_save(model_instance, add)
Make the TagField be instropectable by South#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db.models.fields import CharField
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from tags.models import Tag
@python_2_unicode_compatible
class TagField(CharField):
def __init__(self,
verbose_name=_('Tags'),
max_length=4000,
blank=True,
null=True,
help_text=_('A comma-separated list of tags.'),
**kwargs):
kwargs['max_length'] = max_length
kwargs['blank'] = blank
kwargs['null'] = null
kwargs['verbose_name'] = verbose_name
kwargs['help_text'] = help_text
self.max_length = max_length
self.blank = blank
self.null = null
self.verbose_name = verbose_name
self.help_text = help_text
CharField.__init__(self, **kwargs)
def pre_save(self, model_instance, add):
str_tags = getattr(model_instance, self.name)
if str_tags:
tags = set(str_tags.split(','))
for tag in tags:
Tag.objects.get_or_create(name=tag)
return ','.join(tags)
return super(TagField, self).pre_save(model_instance, add)
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^tags\.fields\.TagField"])
except:
pass
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db.models.fields import CharField
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from tags.models import Tag
@python_2_unicode_compatible
class TagField(CharField):
def __init__(self,
verbose_name=_('Tags'),
max_length=4000,
blank=True,
null=True,
help_text=_('A comma-separated list of tags.'),
**kwargs):
kwargs['max_length'] = max_length
kwargs['blank'] = blank
kwargs['null'] = null
kwargs['verbose_name'] = verbose_name
kwargs['help_text'] = help_text
self.max_length = max_length
self.blank = blank
self.null = null
self.verbose_name = verbose_name
self.help_text = help_text
CharField.__init__(self, **kwargs)
def pre_save(self, model_instance, add):
str_tags = getattr(model_instance, self.name)
if str_tags:
tags = set(str_tags.split(','))
for tag in tags:
Tag.objects.get_or_create(name=tag)
return ','.join(tags)
return super(TagField, self).pre_save(model_instance, add)
<commit_msg>Make the TagField be instropectable by South<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db.models.fields import CharField
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from tags.models import Tag
@python_2_unicode_compatible
class TagField(CharField):
def __init__(self,
verbose_name=_('Tags'),
max_length=4000,
blank=True,
null=True,
help_text=_('A comma-separated list of tags.'),
**kwargs):
kwargs['max_length'] = max_length
kwargs['blank'] = blank
kwargs['null'] = null
kwargs['verbose_name'] = verbose_name
kwargs['help_text'] = help_text
self.max_length = max_length
self.blank = blank
self.null = null
self.verbose_name = verbose_name
self.help_text = help_text
CharField.__init__(self, **kwargs)
def pre_save(self, model_instance, add):
str_tags = getattr(model_instance, self.name)
if str_tags:
tags = set(str_tags.split(','))
for tag in tags:
Tag.objects.get_or_create(name=tag)
return ','.join(tags)
return super(TagField, self).pre_save(model_instance, add)
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^tags\.fields\.TagField"])
except:
pass
|
9177ffa65ac50026078610193b67fdfd6ac8358b
|
tests/app/utils/test_pagination.py
|
tests/app/utils/test_pagination.py
|
from app.utils.pagination import generate_next_dict, generate_previous_dict
def test_generate_previous_dict(client):
ret = generate_previous_dict('main.view_jobs', 'foo', 2, {})
assert 'page=1' in ret['url']
assert ret['title'] == 'Previous page'
assert ret['label'] == 'page 1'
def test_generate_next_dict(client):
ret = generate_next_dict('main.view_jobs', 'foo', 2, {})
assert 'page=3' in ret['url']
assert ret['title'] == 'Next page'
assert ret['label'] == 'page 3'
def test_generate_previous_next_dict_adds_other_url_args(client):
ret = generate_next_dict('main.view_notifications', 'foo', 2, {'message_type': 'blah'})
assert 'notifications/blah' in ret['url']
|
from app.utils.pagination import generate_next_dict, generate_previous_dict
def test_generate_previous_dict(client):
result = generate_previous_dict('main.view_jobs', 'foo', 2, {})
assert 'page=1' in result['url']
assert result['title'] == 'Previous page'
assert result['label'] == 'page 1'
def test_generate_next_dict(client):
result = generate_next_dict('main.view_jobs', 'foo', 2, {})
assert 'page=3' in result['url']
assert result['title'] == 'Next page'
assert result['label'] == 'page 3'
def test_generate_previous_next_dict_adds_other_url_args(client):
result = generate_next_dict('main.view_notifications', 'foo', 2, {'message_type': 'blah'})
assert 'notifications/blah' in result['url']
|
Clarify variable name in pagination tests
|
Clarify variable name in pagination tests
We should avoid using abbreviations, as they aren't universally
understood i.e. they're not worth the small saving in typing.
|
Python
|
mit
|
alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin
|
from app.utils.pagination import generate_next_dict, generate_previous_dict
def test_generate_previous_dict(client):
ret = generate_previous_dict('main.view_jobs', 'foo', 2, {})
assert 'page=1' in ret['url']
assert ret['title'] == 'Previous page'
assert ret['label'] == 'page 1'
def test_generate_next_dict(client):
ret = generate_next_dict('main.view_jobs', 'foo', 2, {})
assert 'page=3' in ret['url']
assert ret['title'] == 'Next page'
assert ret['label'] == 'page 3'
def test_generate_previous_next_dict_adds_other_url_args(client):
ret = generate_next_dict('main.view_notifications', 'foo', 2, {'message_type': 'blah'})
assert 'notifications/blah' in ret['url']
Clarify variable name in pagination tests
We should avoid using abbreviations, as they aren't universally
understood i.e. they're not worth the small saving in typing.
|
from app.utils.pagination import generate_next_dict, generate_previous_dict
def test_generate_previous_dict(client):
result = generate_previous_dict('main.view_jobs', 'foo', 2, {})
assert 'page=1' in result['url']
assert result['title'] == 'Previous page'
assert result['label'] == 'page 1'
def test_generate_next_dict(client):
result = generate_next_dict('main.view_jobs', 'foo', 2, {})
assert 'page=3' in result['url']
assert result['title'] == 'Next page'
assert result['label'] == 'page 3'
def test_generate_previous_next_dict_adds_other_url_args(client):
result = generate_next_dict('main.view_notifications', 'foo', 2, {'message_type': 'blah'})
assert 'notifications/blah' in result['url']
|
<commit_before>from app.utils.pagination import generate_next_dict, generate_previous_dict
def test_generate_previous_dict(client):
ret = generate_previous_dict('main.view_jobs', 'foo', 2, {})
assert 'page=1' in ret['url']
assert ret['title'] == 'Previous page'
assert ret['label'] == 'page 1'
def test_generate_next_dict(client):
ret = generate_next_dict('main.view_jobs', 'foo', 2, {})
assert 'page=3' in ret['url']
assert ret['title'] == 'Next page'
assert ret['label'] == 'page 3'
def test_generate_previous_next_dict_adds_other_url_args(client):
ret = generate_next_dict('main.view_notifications', 'foo', 2, {'message_type': 'blah'})
assert 'notifications/blah' in ret['url']
<commit_msg>Clarify variable name in pagination tests
We should avoid using abbreviations, as they aren't universally
understood i.e. they're not worth the small saving in typing.<commit_after>
|
from app.utils.pagination import generate_next_dict, generate_previous_dict
def test_generate_previous_dict(client):
result = generate_previous_dict('main.view_jobs', 'foo', 2, {})
assert 'page=1' in result['url']
assert result['title'] == 'Previous page'
assert result['label'] == 'page 1'
def test_generate_next_dict(client):
result = generate_next_dict('main.view_jobs', 'foo', 2, {})
assert 'page=3' in result['url']
assert result['title'] == 'Next page'
assert result['label'] == 'page 3'
def test_generate_previous_next_dict_adds_other_url_args(client):
result = generate_next_dict('main.view_notifications', 'foo', 2, {'message_type': 'blah'})
assert 'notifications/blah' in result['url']
|
from app.utils.pagination import generate_next_dict, generate_previous_dict
def test_generate_previous_dict(client):
ret = generate_previous_dict('main.view_jobs', 'foo', 2, {})
assert 'page=1' in ret['url']
assert ret['title'] == 'Previous page'
assert ret['label'] == 'page 1'
def test_generate_next_dict(client):
ret = generate_next_dict('main.view_jobs', 'foo', 2, {})
assert 'page=3' in ret['url']
assert ret['title'] == 'Next page'
assert ret['label'] == 'page 3'
def test_generate_previous_next_dict_adds_other_url_args(client):
ret = generate_next_dict('main.view_notifications', 'foo', 2, {'message_type': 'blah'})
assert 'notifications/blah' in ret['url']
Clarify variable name in pagination tests
We should avoid using abbreviations, as they aren't universally
understood i.e. they're not worth the small saving in typing.from app.utils.pagination import generate_next_dict, generate_previous_dict
def test_generate_previous_dict(client):
result = generate_previous_dict('main.view_jobs', 'foo', 2, {})
assert 'page=1' in result['url']
assert result['title'] == 'Previous page'
assert result['label'] == 'page 1'
def test_generate_next_dict(client):
result = generate_next_dict('main.view_jobs', 'foo', 2, {})
assert 'page=3' in result['url']
assert result['title'] == 'Next page'
assert result['label'] == 'page 3'
def test_generate_previous_next_dict_adds_other_url_args(client):
result = generate_next_dict('main.view_notifications', 'foo', 2, {'message_type': 'blah'})
assert 'notifications/blah' in result['url']
|
<commit_before>from app.utils.pagination import generate_next_dict, generate_previous_dict
def test_generate_previous_dict(client):
ret = generate_previous_dict('main.view_jobs', 'foo', 2, {})
assert 'page=1' in ret['url']
assert ret['title'] == 'Previous page'
assert ret['label'] == 'page 1'
def test_generate_next_dict(client):
ret = generate_next_dict('main.view_jobs', 'foo', 2, {})
assert 'page=3' in ret['url']
assert ret['title'] == 'Next page'
assert ret['label'] == 'page 3'
def test_generate_previous_next_dict_adds_other_url_args(client):
ret = generate_next_dict('main.view_notifications', 'foo', 2, {'message_type': 'blah'})
assert 'notifications/blah' in ret['url']
<commit_msg>Clarify variable name in pagination tests
We should avoid using abbreviations, as they aren't universally
understood i.e. they're not worth the small saving in typing.<commit_after>from app.utils.pagination import generate_next_dict, generate_previous_dict
def test_generate_previous_dict(client):
result = generate_previous_dict('main.view_jobs', 'foo', 2, {})
assert 'page=1' in result['url']
assert result['title'] == 'Previous page'
assert result['label'] == 'page 1'
def test_generate_next_dict(client):
result = generate_next_dict('main.view_jobs', 'foo', 2, {})
assert 'page=3' in result['url']
assert result['title'] == 'Next page'
assert result['label'] == 'page 3'
def test_generate_previous_next_dict_adds_other_url_args(client):
result = generate_next_dict('main.view_notifications', 'foo', 2, {'message_type': 'blah'})
assert 'notifications/blah' in result['url']
|
c5cb9572b1917f68d71e8f3265f1312ccb99104d
|
plugins/worker/server/constants.py
|
plugins/worker/server/constants.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
# The path that will be mounted in docker containers for data IO
DOCKER_DATA_VOLUME = '/mnt/girder_worker/data'
# The path that will be mounted in docker containers for utility scripts
DOCKER_SCRIPTS_VOUME = '/mnt/girder_worker/scripts'
# Settings where plugin information is stored
class PluginSettings(object):
BROKER = 'worker.broker'
BACKEND = 'worker.backend'
API_URL = 'worker.api_url'
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
# The path that will be mounted in docker containers for data IO
DOCKER_DATA_VOLUME = '/mnt/girder_worker/data'
# The path that will be mounted in docker containers for utility scripts
DOCKER_SCRIPTS_VOLUME = '/mnt/girder_worker/scripts'
# Settings where plugin information is stored
class PluginSettings(object):
BROKER = 'worker.broker'
BACKEND = 'worker.backend'
API_URL = 'worker.api_url'
|
Fix a misspelled variable name
|
Fix a misspelled variable name
This appears to be unused anywhere in the source code.
|
Python
|
apache-2.0
|
adsorensen/girder,adsorensen/girder,kotfic/girder,jbeezley/girder,Kitware/girder,manthey/girder,kotfic/girder,girder/girder,manthey/girder,Kitware/girder,kotfic/girder,RafaelPalomar/girder,RafaelPalomar/girder,manthey/girder,data-exp-lab/girder,girder/girder,kotfic/girder,Kitware/girder,Xarthisius/girder,data-exp-lab/girder,manthey/girder,girder/girder,RafaelPalomar/girder,Xarthisius/girder,RafaelPalomar/girder,girder/girder,adsorensen/girder,Xarthisius/girder,Xarthisius/girder,data-exp-lab/girder,adsorensen/girder,jbeezley/girder,RafaelPalomar/girder,jbeezley/girder,data-exp-lab/girder,data-exp-lab/girder,Xarthisius/girder,jbeezley/girder,adsorensen/girder,Kitware/girder,kotfic/girder
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
# The path that will be mounted in docker containers for data IO
DOCKER_DATA_VOLUME = '/mnt/girder_worker/data'
# The path that will be mounted in docker containers for utility scripts
DOCKER_SCRIPTS_VOUME = '/mnt/girder_worker/scripts'
# Settings where plugin information is stored
class PluginSettings(object):
BROKER = 'worker.broker'
BACKEND = 'worker.backend'
API_URL = 'worker.api_url'
Fix a misspelled variable name
This appears to be unused anywhere in the source code.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
# The path that will be mounted in docker containers for data IO
DOCKER_DATA_VOLUME = '/mnt/girder_worker/data'
# The path that will be mounted in docker containers for utility scripts
DOCKER_SCRIPTS_VOLUME = '/mnt/girder_worker/scripts'
# Settings where plugin information is stored
class PluginSettings(object):
BROKER = 'worker.broker'
BACKEND = 'worker.backend'
API_URL = 'worker.api_url'
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
# The path that will be mounted in docker containers for data IO
DOCKER_DATA_VOLUME = '/mnt/girder_worker/data'
# The path that will be mounted in docker containers for utility scripts
DOCKER_SCRIPTS_VOUME = '/mnt/girder_worker/scripts'
# Settings where plugin information is stored
class PluginSettings(object):
BROKER = 'worker.broker'
BACKEND = 'worker.backend'
API_URL = 'worker.api_url'
<commit_msg>Fix a misspelled variable name
This appears to be unused anywhere in the source code.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
# The path that will be mounted in docker containers for data IO
DOCKER_DATA_VOLUME = '/mnt/girder_worker/data'
# The path that will be mounted in docker containers for utility scripts
DOCKER_SCRIPTS_VOLUME = '/mnt/girder_worker/scripts'
# Settings where plugin information is stored
class PluginSettings(object):
BROKER = 'worker.broker'
BACKEND = 'worker.backend'
API_URL = 'worker.api_url'
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
# The path that will be mounted in docker containers for data IO
DOCKER_DATA_VOLUME = '/mnt/girder_worker/data'
# The path that will be mounted in docker containers for utility scripts
DOCKER_SCRIPTS_VOUME = '/mnt/girder_worker/scripts'
# Settings where plugin information is stored
class PluginSettings(object):
BROKER = 'worker.broker'
BACKEND = 'worker.backend'
API_URL = 'worker.api_url'
Fix a misspelled variable name
This appears to be unused anywhere in the source code.#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
# The path that will be mounted in docker containers for data IO
DOCKER_DATA_VOLUME = '/mnt/girder_worker/data'
# The path that will be mounted in docker containers for utility scripts
DOCKER_SCRIPTS_VOLUME = '/mnt/girder_worker/scripts'
# Settings where plugin information is stored
class PluginSettings(object):
BROKER = 'worker.broker'
BACKEND = 'worker.backend'
API_URL = 'worker.api_url'
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
# The path that will be mounted in docker containers for data IO
DOCKER_DATA_VOLUME = '/mnt/girder_worker/data'
# The path that will be mounted in docker containers for utility scripts
DOCKER_SCRIPTS_VOUME = '/mnt/girder_worker/scripts'
# Settings where plugin information is stored
class PluginSettings(object):
BROKER = 'worker.broker'
BACKEND = 'worker.backend'
API_URL = 'worker.api_url'
<commit_msg>Fix a misspelled variable name
This appears to be unused anywhere in the source code.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
# The path that will be mounted in docker containers for data IO
DOCKER_DATA_VOLUME = '/mnt/girder_worker/data'
# The path that will be mounted in docker containers for utility scripts
DOCKER_SCRIPTS_VOLUME = '/mnt/girder_worker/scripts'
# Settings where plugin information is stored
class PluginSettings(object):
BROKER = 'worker.broker'
BACKEND = 'worker.backend'
API_URL = 'worker.api_url'
|
1d3cac57c08baaac590d16497a146c0759e4a2da
|
led_flash.py
|
led_flash.py
|
# Basic script to toggle the LED state on pin 4
# For Raspberry Pi pin mapping, see:
# https://www.raspberrypi.org/documentation/usage/gpio/README.md
#
# Insipired by:
# https://www.raspberrypi.org/learning/python-quick-reaction-game/worksheet/
#
from gpiozero import LED
from time import sleep
flag = True
led = LED(4)
while True:
if flag:
print("on");
led.on()
else:
print("off");
led.off()
flag = not flag
sleep(1)
|
# Basic script to toggle the LED state on GPIO pin 7
# For Raspberry Pi pin mapping, see:
# https://www.raspberrypi.org/documentation/usage/gpio/README.md
#
# Insipired by:
# https://www.raspberrypi.org/learning/python-quick-reaction-game/worksheet/
#
from gpiozero import LED
from time import sleep
flag = True
led = LED(7)
while True:
if flag:
print("on");
led.on()
else:
print("off");
led.off()
flag = not flag
sleep(1)
|
Switch LED GPIO pin from 4 to 7
|
Switch LED GPIO pin from 4 to 7
|
Python
|
mit
|
barecode/iot_temp_sensors,barecode/iot_temp_sensors,barecode/iot_temp_sensors
|
# Basic script to toggle the LED state on pin 4
# For Raspberry Pi pin mapping, see:
# https://www.raspberrypi.org/documentation/usage/gpio/README.md
#
# Insipired by:
# https://www.raspberrypi.org/learning/python-quick-reaction-game/worksheet/
#
from gpiozero import LED
from time import sleep
flag = True
led = LED(4)
while True:
if flag:
print("on");
led.on()
else:
print("off");
led.off()
flag = not flag
sleep(1)
Switch LED GPIO pin from 4 to 7
|
# Basic script to toggle the LED state on GPIO pin 7
# For Raspberry Pi pin mapping, see:
# https://www.raspberrypi.org/documentation/usage/gpio/README.md
#
# Insipired by:
# https://www.raspberrypi.org/learning/python-quick-reaction-game/worksheet/
#
from gpiozero import LED
from time import sleep
flag = True
led = LED(7)
while True:
if flag:
print("on");
led.on()
else:
print("off");
led.off()
flag = not flag
sleep(1)
|
<commit_before># Basic script to toggle the LED state on pin 4
# For Raspberry Pi pin mapping, see:
# https://www.raspberrypi.org/documentation/usage/gpio/README.md
#
# Insipired by:
# https://www.raspberrypi.org/learning/python-quick-reaction-game/worksheet/
#
from gpiozero import LED
from time import sleep
flag = True
led = LED(4)
while True:
if flag:
print("on");
led.on()
else:
print("off");
led.off()
flag = not flag
sleep(1)
<commit_msg>Switch LED GPIO pin from 4 to 7<commit_after>
|
# Basic script to toggle the LED state on GPIO pin 7
# For Raspberry Pi pin mapping, see:
# https://www.raspberrypi.org/documentation/usage/gpio/README.md
#
# Insipired by:
# https://www.raspberrypi.org/learning/python-quick-reaction-game/worksheet/
#
from gpiozero import LED
from time import sleep
flag = True
led = LED(7)
while True:
if flag:
print("on");
led.on()
else:
print("off");
led.off()
flag = not flag
sleep(1)
|
# Basic script to toggle the LED state on pin 4
# For Raspberry Pi pin mapping, see:
# https://www.raspberrypi.org/documentation/usage/gpio/README.md
#
# Insipired by:
# https://www.raspberrypi.org/learning/python-quick-reaction-game/worksheet/
#
from gpiozero import LED
from time import sleep
flag = True
led = LED(4)
while True:
if flag:
print("on");
led.on()
else:
print("off");
led.off()
flag = not flag
sleep(1)
Switch LED GPIO pin from 4 to 7# Basic script to toggle the LED state on GPIO pin 7
# For Raspberry Pi pin mapping, see:
# https://www.raspberrypi.org/documentation/usage/gpio/README.md
#
# Insipired by:
# https://www.raspberrypi.org/learning/python-quick-reaction-game/worksheet/
#
from gpiozero import LED
from time import sleep
flag = True
led = LED(7)
while True:
if flag:
print("on");
led.on()
else:
print("off");
led.off()
flag = not flag
sleep(1)
|
<commit_before># Basic script to toggle the LED state on pin 4
# For Raspberry Pi pin mapping, see:
# https://www.raspberrypi.org/documentation/usage/gpio/README.md
#
# Insipired by:
# https://www.raspberrypi.org/learning/python-quick-reaction-game/worksheet/
#
from gpiozero import LED
from time import sleep
flag = True
led = LED(4)
while True:
if flag:
print("on");
led.on()
else:
print("off");
led.off()
flag = not flag
sleep(1)
<commit_msg>Switch LED GPIO pin from 4 to 7<commit_after># Basic script to toggle the LED state on GPIO pin 7
# For Raspberry Pi pin mapping, see:
# https://www.raspberrypi.org/documentation/usage/gpio/README.md
#
# Insipired by:
# https://www.raspberrypi.org/learning/python-quick-reaction-game/worksheet/
#
from gpiozero import LED
from time import sleep
flag = True
led = LED(7)
while True:
if flag:
print("on");
led.on()
else:
print("off");
led.off()
flag = not flag
sleep(1)
|
2e8f42c0b5eb018309d965b01659c496bc08a08b
|
quickstart/python/understand/example-1/update_initial_intent.6.x.py
|
quickstart/python/understand/example-1/update_initial_intent.6.x.py
|
# Download the helper library from https://www.twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/console
account_sid = 'ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
auth_token = 'your_auth_token'
client = Client(account_sid, auth_token)
assistant_sid = 'UAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
# Provide actions for your assistant: say something and listen for a repsonse.
update_action = {
'actions': [
{'say': 'Hi there, I\'m your virtual assistant! How can I help you?'},
{'listen': True}
]
}
# Update the default intent to use your new actions.
client.preview.understand \
.assistants(assistant_sid) \
.intents('hello-world') \
.intent_actions().update(update_action)
print("Intent actions updated")
|
# Download the helper library from https://www.twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/console
account_sid = 'ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
auth_token = 'your_auth_token'
client = Client(account_sid, auth_token)
# Provide actions for your assistant: say something and listen for a repsonse.
update_action = {
'actions': [
{'say': 'Hi there, I\'m your virtual assistant! How can I help you?'},
{'listen': True}
]
}
# Update the default intent to use your new actions.
# Replace 'UAXXX...' with your Assistant's unique SID https://www.twilio.com/console/assistant/list
client.preview.understand \
.assistants('UAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.intents('hello-world') \
.intent_actions().update(update_action)
print("Intent actions updated")
|
Update intent actions to use assistant SID inline
|
Update intent actions to use assistant SID inline
Maintaining consistency with the auto-generated code samples for Understand, which
don't allow for our variable-named placeholder values
|
Python
|
mit
|
TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets
|
# Download the helper library from https://www.twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/console
account_sid = 'ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
auth_token = 'your_auth_token'
client = Client(account_sid, auth_token)
assistant_sid = 'UAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
# Provide actions for your assistant: say something and listen for a repsonse.
update_action = {
'actions': [
{'say': 'Hi there, I\'m your virtual assistant! How can I help you?'},
{'listen': True}
]
}
# Update the default intent to use your new actions.
client.preview.understand \
.assistants(assistant_sid) \
.intents('hello-world') \
.intent_actions().update(update_action)
print("Intent actions updated")
Update intent actions to use assistant SID inline
Maintaining consistency with the auto-generated code samples for Understand, which
don't allow for our variable-named placeholder values
|
# Download the helper library from https://www.twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/console
account_sid = 'ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
auth_token = 'your_auth_token'
client = Client(account_sid, auth_token)
# Provide actions for your assistant: say something and listen for a repsonse.
update_action = {
'actions': [
{'say': 'Hi there, I\'m your virtual assistant! How can I help you?'},
{'listen': True}
]
}
# Update the default intent to use your new actions.
# Replace 'UAXXX...' with your Assistant's unique SID https://www.twilio.com/console/assistant/list
client.preview.understand \
.assistants('UAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.intents('hello-world') \
.intent_actions().update(update_action)
print("Intent actions updated")
|
<commit_before># Download the helper library from https://www.twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/console
account_sid = 'ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
auth_token = 'your_auth_token'
client = Client(account_sid, auth_token)
assistant_sid = 'UAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
# Provide actions for your assistant: say something and listen for a repsonse.
update_action = {
'actions': [
{'say': 'Hi there, I\'m your virtual assistant! How can I help you?'},
{'listen': True}
]
}
# Update the default intent to use your new actions.
client.preview.understand \
.assistants(assistant_sid) \
.intents('hello-world') \
.intent_actions().update(update_action)
print("Intent actions updated")
<commit_msg>Update intent actions to use assistant SID inline
Maintaining consistency with the auto-generated code samples for Understand, which
don't allow for our variable-named placeholder values<commit_after>
|
# Download the helper library from https://www.twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/console
account_sid = 'ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
auth_token = 'your_auth_token'
client = Client(account_sid, auth_token)
# Provide actions for your assistant: say something and listen for a repsonse.
update_action = {
'actions': [
{'say': 'Hi there, I\'m your virtual assistant! How can I help you?'},
{'listen': True}
]
}
# Update the default intent to use your new actions.
# Replace 'UAXXX...' with your Assistant's unique SID https://www.twilio.com/console/assistant/list
client.preview.understand \
.assistants('UAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.intents('hello-world') \
.intent_actions().update(update_action)
print("Intent actions updated")
|
# Download the helper library from https://www.twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/console
account_sid = 'ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
auth_token = 'your_auth_token'
client = Client(account_sid, auth_token)
assistant_sid = 'UAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
# Provide actions for your assistant: say something and listen for a repsonse.
update_action = {
'actions': [
{'say': 'Hi there, I\'m your virtual assistant! How can I help you?'},
{'listen': True}
]
}
# Update the default intent to use your new actions.
client.preview.understand \
.assistants(assistant_sid) \
.intents('hello-world') \
.intent_actions().update(update_action)
print("Intent actions updated")
Update intent actions to use assistant SID inline
Maintaining consistency with the auto-generated code samples for Understand, which
don't allow for our variable-named placeholder values# Download the helper library from https://www.twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/console
account_sid = 'ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
auth_token = 'your_auth_token'
client = Client(account_sid, auth_token)
# Provide actions for your assistant: say something and listen for a repsonse.
update_action = {
'actions': [
{'say': 'Hi there, I\'m your virtual assistant! How can I help you?'},
{'listen': True}
]
}
# Update the default intent to use your new actions.
# Replace 'UAXXX...' with your Assistant's unique SID https://www.twilio.com/console/assistant/list
client.preview.understand \
.assistants('UAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.intents('hello-world') \
.intent_actions().update(update_action)
print("Intent actions updated")
|
<commit_before># Download the helper library from https://www.twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/console
account_sid = 'ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
auth_token = 'your_auth_token'
client = Client(account_sid, auth_token)
assistant_sid = 'UAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
# Provide actions for your assistant: say something and listen for a repsonse.
update_action = {
'actions': [
{'say': 'Hi there, I\'m your virtual assistant! How can I help you?'},
{'listen': True}
]
}
# Update the default intent to use your new actions.
client.preview.understand \
.assistants(assistant_sid) \
.intents('hello-world') \
.intent_actions().update(update_action)
print("Intent actions updated")
<commit_msg>Update intent actions to use assistant SID inline
Maintaining consistency with the auto-generated code samples for Understand, which
don't allow for our variable-named placeholder values<commit_after># Download the helper library from https://www.twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/console
account_sid = 'ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
auth_token = 'your_auth_token'
client = Client(account_sid, auth_token)
# Provide actions for your assistant: say something and listen for a repsonse.
update_action = {
'actions': [
{'say': 'Hi there, I\'m your virtual assistant! How can I help you?'},
{'listen': True}
]
}
# Update the default intent to use your new actions.
# Replace 'UAXXX...' with your Assistant's unique SID https://www.twilio.com/console/assistant/list
client.preview.understand \
.assistants('UAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.intents('hello-world') \
.intent_actions().update(update_action)
print("Intent actions updated")
|
af42a9c9983dbaab80945d481570d9bf11a22d3a
|
tweetGenerator/webserver.py
|
tweetGenerator/webserver.py
|
from http.server import BaseHTTPRequestHandler, HTTPServer
import response
# HTTPRequestHandler class
ret = ""
class ServerHandler(BaseHTTPRequestHandler):
def do_GET(self):
global ret
# Send response status code
self.send_response(200)
# Send headers
self.send_header('Content-type', 'text/html')
self.end_headers()
# Write content as utf-8 data
self.wfile.write(bytes(ret, "utf8"))
ret = response.get_json()
return
def run():
global ret
print('starting server...')
ret = response.get_json()
# Server settings
server_address = ('127.0.0.1', 3001)
httpd = HTTPServer(server_address, ServerHandler)
print('running server...')
httpd.serve_forever()
run()
|
from http.server import BaseHTTPRequestHandler, HTTPServer
import response
ret = ""
class ServerHandler(BaseHTTPRequestHandler):
def do_GET(self):
global ret
# Send response status code
self.send_response(200)
# Send headers
self.send_header('Content-type', 'application/json')
self.send_header('Connection', 'close')
self.end_headers()
# Write content as utf-8 data
self.wfile.write(bytes(ret, "utf8"))
ret = response.get_json()
return
def run():
global ret
print('starting server...')
ret = response.get_json()
# Server settings
server_address = ('127.0.0.1', 3001)
httpd = HTTPServer(server_address, ServerHandler)
print('running server...')
httpd.serve_forever()
run()
|
Add Connection: close to header
|
Add Connection: close to header
|
Python
|
mit
|
ratorx/hc-2,ratorx/hc-2,ratorx/hc-2,ratorx/hc-2
|
from http.server import BaseHTTPRequestHandler, HTTPServer
import response
# HTTPRequestHandler class
ret = ""
class ServerHandler(BaseHTTPRequestHandler):
def do_GET(self):
global ret
# Send response status code
self.send_response(200)
# Send headers
self.send_header('Content-type', 'text/html')
self.end_headers()
# Write content as utf-8 data
self.wfile.write(bytes(ret, "utf8"))
ret = response.get_json()
return
def run():
global ret
print('starting server...')
ret = response.get_json()
# Server settings
server_address = ('127.0.0.1', 3001)
httpd = HTTPServer(server_address, ServerHandler)
print('running server...')
httpd.serve_forever()
run()
Add Connection: close to header
|
from http.server import BaseHTTPRequestHandler, HTTPServer
import response
ret = ""
class ServerHandler(BaseHTTPRequestHandler):
def do_GET(self):
global ret
# Send response status code
self.send_response(200)
# Send headers
self.send_header('Content-type', 'application/json')
self.send_header('Connection', 'close')
self.end_headers()
# Write content as utf-8 data
self.wfile.write(bytes(ret, "utf8"))
ret = response.get_json()
return
def run():
global ret
print('starting server...')
ret = response.get_json()
# Server settings
server_address = ('127.0.0.1', 3001)
httpd = HTTPServer(server_address, ServerHandler)
print('running server...')
httpd.serve_forever()
run()
|
<commit_before>from http.server import BaseHTTPRequestHandler, HTTPServer
import response
# HTTPRequestHandler class
ret = ""
class ServerHandler(BaseHTTPRequestHandler):
def do_GET(self):
global ret
# Send response status code
self.send_response(200)
# Send headers
self.send_header('Content-type', 'text/html')
self.end_headers()
# Write content as utf-8 data
self.wfile.write(bytes(ret, "utf8"))
ret = response.get_json()
return
def run():
global ret
print('starting server...')
ret = response.get_json()
# Server settings
server_address = ('127.0.0.1', 3001)
httpd = HTTPServer(server_address, ServerHandler)
print('running server...')
httpd.serve_forever()
run()
<commit_msg>Add Connection: close to header<commit_after>
|
from http.server import BaseHTTPRequestHandler, HTTPServer
import response
ret = ""
class ServerHandler(BaseHTTPRequestHandler):
def do_GET(self):
global ret
# Send response status code
self.send_response(200)
# Send headers
self.send_header('Content-type', 'application/json')
self.send_header('Connection', 'close')
self.end_headers()
# Write content as utf-8 data
self.wfile.write(bytes(ret, "utf8"))
ret = response.get_json()
return
def run():
global ret
print('starting server...')
ret = response.get_json()
# Server settings
server_address = ('127.0.0.1', 3001)
httpd = HTTPServer(server_address, ServerHandler)
print('running server...')
httpd.serve_forever()
run()
|
from http.server import BaseHTTPRequestHandler, HTTPServer
import response
# HTTPRequestHandler class
ret = ""
class ServerHandler(BaseHTTPRequestHandler):
def do_GET(self):
global ret
# Send response status code
self.send_response(200)
# Send headers
self.send_header('Content-type', 'text/html')
self.end_headers()
# Write content as utf-8 data
self.wfile.write(bytes(ret, "utf8"))
ret = response.get_json()
return
def run():
global ret
print('starting server...')
ret = response.get_json()
# Server settings
server_address = ('127.0.0.1', 3001)
httpd = HTTPServer(server_address, ServerHandler)
print('running server...')
httpd.serve_forever()
run()
Add Connection: close to headerfrom http.server import BaseHTTPRequestHandler, HTTPServer
import response
ret = ""
class ServerHandler(BaseHTTPRequestHandler):
def do_GET(self):
global ret
# Send response status code
self.send_response(200)
# Send headers
self.send_header('Content-type', 'application/json')
self.send_header('Connection', 'close')
self.end_headers()
# Write content as utf-8 data
self.wfile.write(bytes(ret, "utf8"))
ret = response.get_json()
return
def run():
global ret
print('starting server...')
ret = response.get_json()
# Server settings
server_address = ('127.0.0.1', 3001)
httpd = HTTPServer(server_address, ServerHandler)
print('running server...')
httpd.serve_forever()
run()
|
<commit_before>from http.server import BaseHTTPRequestHandler, HTTPServer
import response
# HTTPRequestHandler class
ret = ""
class ServerHandler(BaseHTTPRequestHandler):
def do_GET(self):
global ret
# Send response status code
self.send_response(200)
# Send headers
self.send_header('Content-type', 'text/html')
self.end_headers()
# Write content as utf-8 data
self.wfile.write(bytes(ret, "utf8"))
ret = response.get_json()
return
def run():
global ret
print('starting server...')
ret = response.get_json()
# Server settings
server_address = ('127.0.0.1', 3001)
httpd = HTTPServer(server_address, ServerHandler)
print('running server...')
httpd.serve_forever()
run()
<commit_msg>Add Connection: close to header<commit_after>from http.server import BaseHTTPRequestHandler, HTTPServer
import response
ret = ""
class ServerHandler(BaseHTTPRequestHandler):
def do_GET(self):
global ret
# Send response status code
self.send_response(200)
# Send headers
self.send_header('Content-type', 'application/json')
self.send_header('Connection', 'close')
self.end_headers()
# Write content as utf-8 data
self.wfile.write(bytes(ret, "utf8"))
ret = response.get_json()
return
def run():
global ret
print('starting server...')
ret = response.get_json()
# Server settings
server_address = ('127.0.0.1', 3001)
httpd = HTTPServer(server_address, ServerHandler)
print('running server...')
httpd.serve_forever()
run()
|
ff272cffbe70f2e306c841fd33424bb009e79ccf
|
conf_site/proposals/urls.py
|
conf_site/proposals/urls.py
|
from django.conf.urls import include
from django.contrib.admin.views.decorators import staff_member_required
from django.urls import path
from conf_site.proposals.views import ExportProposalSubmittersView
urlpatterns = [
path(
"export/",
staff_member_required(ExportProposalSubmittersView.as_view()),
name="proposal_submitter_export",
),
path("", include("symposion.proposals.urls")),
]
|
from django.conf.urls import include
from django.contrib.admin.views.decorators import staff_member_required
from django.urls import path
from conf_site.proposals.views import ExportProposalSubmittersView
urlpatterns = [
path(
"submitters/export/",
staff_member_required(ExportProposalSubmittersView.as_view()),
name="proposal_submitter_export",
),
path("", include("symposion.proposals.urls")),
]
|
Change location of proposal submitters CSV export.
|
Change location of proposal submitters CSV export.
"/proposals/submitters/export/" is a more accurate URL than
/proposals/export/", since the CSV file contains information about the
people submitting proposals and not the proposals themselves.
|
Python
|
mit
|
pydata/conf_site,pydata/conf_site,pydata/conf_site
|
from django.conf.urls import include
from django.contrib.admin.views.decorators import staff_member_required
from django.urls import path
from conf_site.proposals.views import ExportProposalSubmittersView
urlpatterns = [
path(
"export/",
staff_member_required(ExportProposalSubmittersView.as_view()),
name="proposal_submitter_export",
),
path("", include("symposion.proposals.urls")),
]
Change location of proposal submitters CSV export.
"/proposals/submitters/export/" is a more accurate URL than
/proposals/export/", since the CSV file contains information about the
people submitting proposals and not the proposals themselves.
|
from django.conf.urls import include
from django.contrib.admin.views.decorators import staff_member_required
from django.urls import path
from conf_site.proposals.views import ExportProposalSubmittersView
urlpatterns = [
path(
"submitters/export/",
staff_member_required(ExportProposalSubmittersView.as_view()),
name="proposal_submitter_export",
),
path("", include("symposion.proposals.urls")),
]
|
<commit_before>from django.conf.urls import include
from django.contrib.admin.views.decorators import staff_member_required
from django.urls import path
from conf_site.proposals.views import ExportProposalSubmittersView
urlpatterns = [
path(
"export/",
staff_member_required(ExportProposalSubmittersView.as_view()),
name="proposal_submitter_export",
),
path("", include("symposion.proposals.urls")),
]
<commit_msg>Change location of proposal submitters CSV export.
"/proposals/submitters/export/" is a more accurate URL than
/proposals/export/", since the CSV file contains information about the
people submitting proposals and not the proposals themselves.<commit_after>
|
from django.conf.urls import include
from django.contrib.admin.views.decorators import staff_member_required
from django.urls import path
from conf_site.proposals.views import ExportProposalSubmittersView
urlpatterns = [
path(
"submitters/export/",
staff_member_required(ExportProposalSubmittersView.as_view()),
name="proposal_submitter_export",
),
path("", include("symposion.proposals.urls")),
]
|
from django.conf.urls import include
from django.contrib.admin.views.decorators import staff_member_required
from django.urls import path
from conf_site.proposals.views import ExportProposalSubmittersView
urlpatterns = [
path(
"export/",
staff_member_required(ExportProposalSubmittersView.as_view()),
name="proposal_submitter_export",
),
path("", include("symposion.proposals.urls")),
]
Change location of proposal submitters CSV export.
"/proposals/submitters/export/" is a more accurate URL than
/proposals/export/", since the CSV file contains information about the
people submitting proposals and not the proposals themselves.from django.conf.urls import include
from django.contrib.admin.views.decorators import staff_member_required
from django.urls import path
from conf_site.proposals.views import ExportProposalSubmittersView
urlpatterns = [
path(
"submitters/export/",
staff_member_required(ExportProposalSubmittersView.as_view()),
name="proposal_submitter_export",
),
path("", include("symposion.proposals.urls")),
]
|
<commit_before>from django.conf.urls import include
from django.contrib.admin.views.decorators import staff_member_required
from django.urls import path
from conf_site.proposals.views import ExportProposalSubmittersView
urlpatterns = [
path(
"export/",
staff_member_required(ExportProposalSubmittersView.as_view()),
name="proposal_submitter_export",
),
path("", include("symposion.proposals.urls")),
]
<commit_msg>Change location of proposal submitters CSV export.
"/proposals/submitters/export/" is a more accurate URL than
/proposals/export/", since the CSV file contains information about the
people submitting proposals and not the proposals themselves.<commit_after>from django.conf.urls import include
from django.contrib.admin.views.decorators import staff_member_required
from django.urls import path
from conf_site.proposals.views import ExportProposalSubmittersView
urlpatterns = [
path(
"submitters/export/",
staff_member_required(ExportProposalSubmittersView.as_view()),
name="proposal_submitter_export",
),
path("", include("symposion.proposals.urls")),
]
|
c254bf20bc8b4b7c73e3361d3666fb3733dbc09f
|
pycroscopy/processing/__init__.py
|
pycroscopy/processing/__init__.py
|
import fft
import gmode_utils
import cluster
import proc_utils
import decomposition
def no_impl(*args,**kwargs):
raise NotImplementedError("You need to install Multiprocess package (pip,github) to do a parallel Computation.\n"
"Switching to the serial version. ")
# from .feature_extraction import FeatureExtractorParallel, FeatureExtractorSerial
# from .geometric_transformation import geoTransformerParallel, geoTransformerSerial
#
# FeatureExtractor = FeatureExtractorSerial
# geoTransformer = geoTransformerSerial
#
# try:
# import multiprocess
# except ImportError:
# FeatureExtractorParallel = no_impl
# geoTransformerParallel = no_impl
# else:
# FeatureExtractor = FeatureExtractorParallel
# geoTransformer = geoTransformerParallel
|
import fft
import gmode_utils
import cluster
import proc_utils
import decomposition
def no_impl(*args,**kwargs):
raise NotImplementedError("You need to install Multiprocess package (pip,github) to do a parallel Computation.\n"
"Switching to the serial version. ")
from .feature_extraction import FeatureExtractorParallel, FeatureExtractorSerial
from .geometric_transformation import geoTransformerParallel, geoTransformerSerial
FeatureExtractor = FeatureExtractorSerial
geoTransformer = geoTransformerSerial
try:
import multiprocess
except ImportError:
FeatureExtractorParallel = no_impl
geoTransformerParallel = no_impl
else:
FeatureExtractor = FeatureExtractorParallel
geoTransformer = geoTransformerParallel
|
Revert "Commented out unimplemented imports"
|
Revert "Commented out unimplemented imports"
This reverts commit f6b76db8f963d28c0a9f2875139d5e286e3bd01b.
|
Python
|
mit
|
pycroscopy/pycroscopy,anugrah-saxena/pycroscopy
|
import fft
import gmode_utils
import cluster
import proc_utils
import decomposition
def no_impl(*args,**kwargs):
raise NotImplementedError("You need to install Multiprocess package (pip,github) to do a parallel Computation.\n"
"Switching to the serial version. ")
# from .feature_extraction import FeatureExtractorParallel, FeatureExtractorSerial
# from .geometric_transformation import geoTransformerParallel, geoTransformerSerial
#
# FeatureExtractor = FeatureExtractorSerial
# geoTransformer = geoTransformerSerial
#
# try:
# import multiprocess
# except ImportError:
# FeatureExtractorParallel = no_impl
# geoTransformerParallel = no_impl
# else:
# FeatureExtractor = FeatureExtractorParallel
# geoTransformer = geoTransformerParallel
Revert "Commented out unimplemented imports"
This reverts commit f6b76db8f963d28c0a9f2875139d5e286e3bd01b.
|
import fft
import gmode_utils
import cluster
import proc_utils
import decomposition
def no_impl(*args,**kwargs):
raise NotImplementedError("You need to install Multiprocess package (pip,github) to do a parallel Computation.\n"
"Switching to the serial version. ")
from .feature_extraction import FeatureExtractorParallel, FeatureExtractorSerial
from .geometric_transformation import geoTransformerParallel, geoTransformerSerial
FeatureExtractor = FeatureExtractorSerial
geoTransformer = geoTransformerSerial
try:
import multiprocess
except ImportError:
FeatureExtractorParallel = no_impl
geoTransformerParallel = no_impl
else:
FeatureExtractor = FeatureExtractorParallel
geoTransformer = geoTransformerParallel
|
<commit_before>import fft
import gmode_utils
import cluster
import proc_utils
import decomposition
def no_impl(*args,**kwargs):
raise NotImplementedError("You need to install Multiprocess package (pip,github) to do a parallel Computation.\n"
"Switching to the serial version. ")
# from .feature_extraction import FeatureExtractorParallel, FeatureExtractorSerial
# from .geometric_transformation import geoTransformerParallel, geoTransformerSerial
#
# FeatureExtractor = FeatureExtractorSerial
# geoTransformer = geoTransformerSerial
#
# try:
# import multiprocess
# except ImportError:
# FeatureExtractorParallel = no_impl
# geoTransformerParallel = no_impl
# else:
# FeatureExtractor = FeatureExtractorParallel
# geoTransformer = geoTransformerParallel
<commit_msg>Revert "Commented out unimplemented imports"
This reverts commit f6b76db8f963d28c0a9f2875139d5e286e3bd01b.<commit_after>
|
import fft
import gmode_utils
import cluster
import proc_utils
import decomposition
def no_impl(*args,**kwargs):
raise NotImplementedError("You need to install Multiprocess package (pip,github) to do a parallel Computation.\n"
"Switching to the serial version. ")
from .feature_extraction import FeatureExtractorParallel, FeatureExtractorSerial
from .geometric_transformation import geoTransformerParallel, geoTransformerSerial
FeatureExtractor = FeatureExtractorSerial
geoTransformer = geoTransformerSerial
try:
import multiprocess
except ImportError:
FeatureExtractorParallel = no_impl
geoTransformerParallel = no_impl
else:
FeatureExtractor = FeatureExtractorParallel
geoTransformer = geoTransformerParallel
|
import fft
import gmode_utils
import cluster
import proc_utils
import decomposition
def no_impl(*args,**kwargs):
raise NotImplementedError("You need to install Multiprocess package (pip,github) to do a parallel Computation.\n"
"Switching to the serial version. ")
# from .feature_extraction import FeatureExtractorParallel, FeatureExtractorSerial
# from .geometric_transformation import geoTransformerParallel, geoTransformerSerial
#
# FeatureExtractor = FeatureExtractorSerial
# geoTransformer = geoTransformerSerial
#
# try:
# import multiprocess
# except ImportError:
# FeatureExtractorParallel = no_impl
# geoTransformerParallel = no_impl
# else:
# FeatureExtractor = FeatureExtractorParallel
# geoTransformer = geoTransformerParallel
Revert "Commented out unimplemented imports"
This reverts commit f6b76db8f963d28c0a9f2875139d5e286e3bd01b.import fft
import gmode_utils
import cluster
import proc_utils
import decomposition
def no_impl(*args,**kwargs):
raise NotImplementedError("You need to install Multiprocess package (pip,github) to do a parallel Computation.\n"
"Switching to the serial version. ")
from .feature_extraction import FeatureExtractorParallel, FeatureExtractorSerial
from .geometric_transformation import geoTransformerParallel, geoTransformerSerial
FeatureExtractor = FeatureExtractorSerial
geoTransformer = geoTransformerSerial
try:
import multiprocess
except ImportError:
FeatureExtractorParallel = no_impl
geoTransformerParallel = no_impl
else:
FeatureExtractor = FeatureExtractorParallel
geoTransformer = geoTransformerParallel
|
<commit_before>import fft
import gmode_utils
import cluster
import proc_utils
import decomposition
def no_impl(*args,**kwargs):
raise NotImplementedError("You need to install Multiprocess package (pip,github) to do a parallel Computation.\n"
"Switching to the serial version. ")
# from .feature_extraction import FeatureExtractorParallel, FeatureExtractorSerial
# from .geometric_transformation import geoTransformerParallel, geoTransformerSerial
#
# FeatureExtractor = FeatureExtractorSerial
# geoTransformer = geoTransformerSerial
#
# try:
# import multiprocess
# except ImportError:
# FeatureExtractorParallel = no_impl
# geoTransformerParallel = no_impl
# else:
# FeatureExtractor = FeatureExtractorParallel
# geoTransformer = geoTransformerParallel
<commit_msg>Revert "Commented out unimplemented imports"
This reverts commit f6b76db8f963d28c0a9f2875139d5e286e3bd01b.<commit_after>import fft
import gmode_utils
import cluster
import proc_utils
import decomposition
def no_impl(*args,**kwargs):
raise NotImplementedError("You need to install Multiprocess package (pip,github) to do a parallel Computation.\n"
"Switching to the serial version. ")
from .feature_extraction import FeatureExtractorParallel, FeatureExtractorSerial
from .geometric_transformation import geoTransformerParallel, geoTransformerSerial
FeatureExtractor = FeatureExtractorSerial
geoTransformer = geoTransformerSerial
try:
import multiprocess
except ImportError:
FeatureExtractorParallel = no_impl
geoTransformerParallel = no_impl
else:
FeatureExtractor = FeatureExtractorParallel
geoTransformer = geoTransformerParallel
|
be1b1de45b93b5c72d6d76667430a6be4c56fb75
|
vsmomi/_service_instance.py
|
vsmomi/_service_instance.py
|
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import *
from future.builtins.disabled import *
import base64
import atexit
import requests
# disable warnings
try:
requests.packages.urllib3.disable_warnings()
except AttributeError:
pass
from pyVmomi import vim
from pyVim.connect import SmartConnect, Disconnect
class ServiceInstance(object):
def __init__(self, vcenter, username, password):
self.si = None
self.vcenter = vcenter
self.username = username
self.password = password
self.__connect()
def __connect(self):
connect = True
if self.si:
# check connection
try:
self.si.CurrentTime()
connect = False
except vim.fault.NotAuthenticated:
# timeout
pass
if connect:
si = None
try:
pwd = base64.b64decode(self.password).decode("utf-8")
si = SmartConnect(
host=self.vcenter,
user=self.username,
pwd=pwd,
port=443)
except IOError:
raise
if self.si is None:
atexit.register(Disconnect, self.si)
else:
Disconnect(self.si)
self.si = si
def __getattr__(self, name):
self.__connect()
return getattr(self.si, name)
|
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import *
from future.builtins.disabled import *
import base64
import atexit
import requests
# disable warnings
try:
requests.packages.urllib3.disable_warnings()
except AttributeError:
pass
# disable SSL verification
__get = requests.get
def getNoSLL(*args, **kwargs):
kwargs["verify"] = False
return __get(*args, **kwargs)
requests.get = getNoSLL
from pyVmomi import vim
from pyVim.connect import SmartConnect, Disconnect
class ServiceInstance(object):
def __init__(self, vcenter, username, password):
self.si = None
self.vcenter = vcenter
self.username = username
self.password = password
self.__connect()
def __connect(self):
connect = True
if self.si:
# check connection
try:
self.si.CurrentTime()
connect = False
except vim.fault.NotAuthenticated:
# timeout
pass
if connect:
si = None
try:
pwd = base64.b64decode(self.password).decode("utf-8")
si = SmartConnect(
host=self.vcenter,
user=self.username,
pwd=pwd,
port=443)
except IOError:
raise
if self.si is None:
atexit.register(Disconnect, self.si)
else:
Disconnect(self.si)
self.si = si
def __getattr__(self, name):
self.__connect()
return getattr(self.si, name)
|
Disable SSL verification in requests.get
|
Disable SSL verification in requests.get
|
Python
|
apache-2.0
|
dahuebi/vsmomi,dahuebi/vsmomi
|
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import *
from future.builtins.disabled import *
import base64
import atexit
import requests
# disable warnings
try:
requests.packages.urllib3.disable_warnings()
except AttributeError:
pass
from pyVmomi import vim
from pyVim.connect import SmartConnect, Disconnect
class ServiceInstance(object):
def __init__(self, vcenter, username, password):
self.si = None
self.vcenter = vcenter
self.username = username
self.password = password
self.__connect()
def __connect(self):
connect = True
if self.si:
# check connection
try:
self.si.CurrentTime()
connect = False
except vim.fault.NotAuthenticated:
# timeout
pass
if connect:
si = None
try:
pwd = base64.b64decode(self.password).decode("utf-8")
si = SmartConnect(
host=self.vcenter,
user=self.username,
pwd=pwd,
port=443)
except IOError:
raise
if self.si is None:
atexit.register(Disconnect, self.si)
else:
Disconnect(self.si)
self.si = si
def __getattr__(self, name):
self.__connect()
return getattr(self.si, name)
Disable SSL verification in requests.get
|
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import *
from future.builtins.disabled import *
import base64
import atexit
import requests
# disable warnings
try:
requests.packages.urllib3.disable_warnings()
except AttributeError:
pass
# disable SSL verification
__get = requests.get
def getNoSLL(*args, **kwargs):
kwargs["verify"] = False
return __get(*args, **kwargs)
requests.get = getNoSLL
from pyVmomi import vim
from pyVim.connect import SmartConnect, Disconnect
class ServiceInstance(object):
def __init__(self, vcenter, username, password):
self.si = None
self.vcenter = vcenter
self.username = username
self.password = password
self.__connect()
def __connect(self):
connect = True
if self.si:
# check connection
try:
self.si.CurrentTime()
connect = False
except vim.fault.NotAuthenticated:
# timeout
pass
if connect:
si = None
try:
pwd = base64.b64decode(self.password).decode("utf-8")
si = SmartConnect(
host=self.vcenter,
user=self.username,
pwd=pwd,
port=443)
except IOError:
raise
if self.si is None:
atexit.register(Disconnect, self.si)
else:
Disconnect(self.si)
self.si = si
def __getattr__(self, name):
self.__connect()
return getattr(self.si, name)
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import *
from future.builtins.disabled import *
import base64
import atexit
import requests
# disable warnings
try:
requests.packages.urllib3.disable_warnings()
except AttributeError:
pass
from pyVmomi import vim
from pyVim.connect import SmartConnect, Disconnect
class ServiceInstance(object):
def __init__(self, vcenter, username, password):
self.si = None
self.vcenter = vcenter
self.username = username
self.password = password
self.__connect()
def __connect(self):
connect = True
if self.si:
# check connection
try:
self.si.CurrentTime()
connect = False
except vim.fault.NotAuthenticated:
# timeout
pass
if connect:
si = None
try:
pwd = base64.b64decode(self.password).decode("utf-8")
si = SmartConnect(
host=self.vcenter,
user=self.username,
pwd=pwd,
port=443)
except IOError:
raise
if self.si is None:
atexit.register(Disconnect, self.si)
else:
Disconnect(self.si)
self.si = si
def __getattr__(self, name):
self.__connect()
return getattr(self.si, name)
<commit_msg>Disable SSL verification in requests.get<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import *
from future.builtins.disabled import *
import base64
import atexit
import requests
# disable warnings
try:
requests.packages.urllib3.disable_warnings()
except AttributeError:
pass
# disable SSL verification
__get = requests.get
def getNoSLL(*args, **kwargs):
kwargs["verify"] = False
return __get(*args, **kwargs)
requests.get = getNoSLL
from pyVmomi import vim
from pyVim.connect import SmartConnect, Disconnect
class ServiceInstance(object):
def __init__(self, vcenter, username, password):
self.si = None
self.vcenter = vcenter
self.username = username
self.password = password
self.__connect()
def __connect(self):
connect = True
if self.si:
# check connection
try:
self.si.CurrentTime()
connect = False
except vim.fault.NotAuthenticated:
# timeout
pass
if connect:
si = None
try:
pwd = base64.b64decode(self.password).decode("utf-8")
si = SmartConnect(
host=self.vcenter,
user=self.username,
pwd=pwd,
port=443)
except IOError:
raise
if self.si is None:
atexit.register(Disconnect, self.si)
else:
Disconnect(self.si)
self.si = si
def __getattr__(self, name):
self.__connect()
return getattr(self.si, name)
|
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import *
from future.builtins.disabled import *
import base64
import atexit
import requests
# disable warnings
try:
requests.packages.urllib3.disable_warnings()
except AttributeError:
pass
from pyVmomi import vim
from pyVim.connect import SmartConnect, Disconnect
class ServiceInstance(object):
def __init__(self, vcenter, username, password):
self.si = None
self.vcenter = vcenter
self.username = username
self.password = password
self.__connect()
def __connect(self):
connect = True
if self.si:
# check connection
try:
self.si.CurrentTime()
connect = False
except vim.fault.NotAuthenticated:
# timeout
pass
if connect:
si = None
try:
pwd = base64.b64decode(self.password).decode("utf-8")
si = SmartConnect(
host=self.vcenter,
user=self.username,
pwd=pwd,
port=443)
except IOError:
raise
if self.si is None:
atexit.register(Disconnect, self.si)
else:
Disconnect(self.si)
self.si = si
def __getattr__(self, name):
self.__connect()
return getattr(self.si, name)
Disable SSL verification in requests.get# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import *
from future.builtins.disabled import *
import base64
import atexit
import requests
# disable warnings
try:
requests.packages.urllib3.disable_warnings()
except AttributeError:
pass
# disable SSL verification
__get = requests.get
def getNoSLL(*args, **kwargs):
kwargs["verify"] = False
return __get(*args, **kwargs)
requests.get = getNoSLL
from pyVmomi import vim
from pyVim.connect import SmartConnect, Disconnect
class ServiceInstance(object):
def __init__(self, vcenter, username, password):
self.si = None
self.vcenter = vcenter
self.username = username
self.password = password
self.__connect()
def __connect(self):
connect = True
if self.si:
# check connection
try:
self.si.CurrentTime()
connect = False
except vim.fault.NotAuthenticated:
# timeout
pass
if connect:
si = None
try:
pwd = base64.b64decode(self.password).decode("utf-8")
si = SmartConnect(
host=self.vcenter,
user=self.username,
pwd=pwd,
port=443)
except IOError:
raise
if self.si is None:
atexit.register(Disconnect, self.si)
else:
Disconnect(self.si)
self.si = si
def __getattr__(self, name):
self.__connect()
return getattr(self.si, name)
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import *
from future.builtins.disabled import *
import base64
import atexit
import requests
# disable warnings
try:
requests.packages.urllib3.disable_warnings()
except AttributeError:
pass
from pyVmomi import vim
from pyVim.connect import SmartConnect, Disconnect
class ServiceInstance(object):
def __init__(self, vcenter, username, password):
self.si = None
self.vcenter = vcenter
self.username = username
self.password = password
self.__connect()
def __connect(self):
connect = True
if self.si:
# check connection
try:
self.si.CurrentTime()
connect = False
except vim.fault.NotAuthenticated:
# timeout
pass
if connect:
si = None
try:
pwd = base64.b64decode(self.password).decode("utf-8")
si = SmartConnect(
host=self.vcenter,
user=self.username,
pwd=pwd,
port=443)
except IOError:
raise
if self.si is None:
atexit.register(Disconnect, self.si)
else:
Disconnect(self.si)
self.si = si
def __getattr__(self, name):
self.__connect()
return getattr(self.si, name)
<commit_msg>Disable SSL verification in requests.get<commit_after># -*- coding: utf-8 -*-
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import *
from future.builtins.disabled import *
import base64
import atexit
import requests
# disable warnings
try:
requests.packages.urllib3.disable_warnings()
except AttributeError:
pass
# disable SSL verification
__get = requests.get
def getNoSLL(*args, **kwargs):
kwargs["verify"] = False
return __get(*args, **kwargs)
requests.get = getNoSLL
from pyVmomi import vim
from pyVim.connect import SmartConnect, Disconnect
class ServiceInstance(object):
def __init__(self, vcenter, username, password):
self.si = None
self.vcenter = vcenter
self.username = username
self.password = password
self.__connect()
def __connect(self):
connect = True
if self.si:
# check connection
try:
self.si.CurrentTime()
connect = False
except vim.fault.NotAuthenticated:
# timeout
pass
if connect:
si = None
try:
pwd = base64.b64decode(self.password).decode("utf-8")
si = SmartConnect(
host=self.vcenter,
user=self.username,
pwd=pwd,
port=443)
except IOError:
raise
if self.si is None:
atexit.register(Disconnect, self.si)
else:
Disconnect(self.si)
self.si = si
def __getattr__(self, name):
self.__connect()
return getattr(self.si, name)
|
b2482545448f36880ca444f2604812e285bc67da
|
tests/plots.py
|
tests/plots.py
|
import numpy as np
from pyquante2 import basisset,rhf,h2
from pyquante2.graphics.vtk import vtk_orbital
from pyquante.graphics.lineplot import test_plot_orbs,test_plot_bfs
def lineplot_orbs(): return test_plot_orbs()
def lineplot_bfs(): return test_plot_bfs()
def plot_h2():
bfs = basisset(h2,'sto3g')
solver = rhf(h2,bfs)
ens = solver.converge()
# Note: these orbitals are not coming out symmetric. Why not??
print solver
print solver.orbs
vtk_orbital(h2,solver.orbs,bfs)
def plot_orbs():
bfs = basisset(h2,'sto3g')
orbs = np.array([[1.0,1.0],
[1.0,-1.0]],'d')
vtk_orbital(h2,orbs,bfs)
return
if __name__ == '__main__':
plot_h2()
|
import numpy as np
from pyquante2 import basisset,rhf,h2
from pyquante2.graphics.vtk import vtk_orbital
from pyquante.graphics.lineplot import test_plot_orbs,test_plot_bfs
from pyquante.graphics.contourplot import test_contour
def lineplot_orbs(): return test_plot_orbs()
def lineplot_bfs(): return test_plot_bfs()
def contour_orb(): return test_contour(True)
def plot_h2():
bfs = basisset(h2,'sto3g')
solver = rhf(h2,bfs)
ens = solver.converge()
# Note: these orbitals are not coming out symmetric. Why not??
print solver
print solver.orbs
vtk_orbital(h2,solver.orbs,bfs)
def plot_orbs():
bfs = basisset(h2,'sto3g')
orbs = np.array([[1.0,1.0],
[1.0,-1.0]],'d')
vtk_orbital(h2,orbs,bfs)
return
if __name__ == '__main__':
plot_h2()
|
Test routine for contour plotting
|
Test routine for contour plotting
|
Python
|
bsd-3-clause
|
Konjkov/pyquante2,Konjkov/pyquante2,Konjkov/pyquante2
|
import numpy as np
from pyquante2 import basisset,rhf,h2
from pyquante2.graphics.vtk import vtk_orbital
from pyquante.graphics.lineplot import test_plot_orbs,test_plot_bfs
def lineplot_orbs(): return test_plot_orbs()
def lineplot_bfs(): return test_plot_bfs()
def plot_h2():
bfs = basisset(h2,'sto3g')
solver = rhf(h2,bfs)
ens = solver.converge()
# Note: these orbitals are not coming out symmetric. Why not??
print solver
print solver.orbs
vtk_orbital(h2,solver.orbs,bfs)
def plot_orbs():
bfs = basisset(h2,'sto3g')
orbs = np.array([[1.0,1.0],
[1.0,-1.0]],'d')
vtk_orbital(h2,orbs,bfs)
return
if __name__ == '__main__':
plot_h2()
Test routine for contour plotting
|
import numpy as np
from pyquante2 import basisset,rhf,h2
from pyquante2.graphics.vtk import vtk_orbital
from pyquante.graphics.lineplot import test_plot_orbs,test_plot_bfs
from pyquante.graphics.contourplot import test_contour
def lineplot_orbs(): return test_plot_orbs()
def lineplot_bfs(): return test_plot_bfs()
def contour_orb(): return test_contour(True)
def plot_h2():
bfs = basisset(h2,'sto3g')
solver = rhf(h2,bfs)
ens = solver.converge()
# Note: these orbitals are not coming out symmetric. Why not??
print solver
print solver.orbs
vtk_orbital(h2,solver.orbs,bfs)
def plot_orbs():
bfs = basisset(h2,'sto3g')
orbs = np.array([[1.0,1.0],
[1.0,-1.0]],'d')
vtk_orbital(h2,orbs,bfs)
return
if __name__ == '__main__':
plot_h2()
|
<commit_before>import numpy as np
from pyquante2 import basisset,rhf,h2
from pyquante2.graphics.vtk import vtk_orbital
from pyquante.graphics.lineplot import test_plot_orbs,test_plot_bfs
def lineplot_orbs(): return test_plot_orbs()
def lineplot_bfs(): return test_plot_bfs()
def plot_h2():
bfs = basisset(h2,'sto3g')
solver = rhf(h2,bfs)
ens = solver.converge()
# Note: these orbitals are not coming out symmetric. Why not??
print solver
print solver.orbs
vtk_orbital(h2,solver.orbs,bfs)
def plot_orbs():
bfs = basisset(h2,'sto3g')
orbs = np.array([[1.0,1.0],
[1.0,-1.0]],'d')
vtk_orbital(h2,orbs,bfs)
return
if __name__ == '__main__':
plot_h2()
<commit_msg>Test routine for contour plotting<commit_after>
|
import numpy as np
from pyquante2 import basisset,rhf,h2
from pyquante2.graphics.vtk import vtk_orbital
from pyquante.graphics.lineplot import test_plot_orbs,test_plot_bfs
from pyquante.graphics.contourplot import test_contour
def lineplot_orbs(): return test_plot_orbs()
def lineplot_bfs(): return test_plot_bfs()
def contour_orb(): return test_contour(True)
def plot_h2():
bfs = basisset(h2,'sto3g')
solver = rhf(h2,bfs)
ens = solver.converge()
# Note: these orbitals are not coming out symmetric. Why not??
print solver
print solver.orbs
vtk_orbital(h2,solver.orbs,bfs)
def plot_orbs():
bfs = basisset(h2,'sto3g')
orbs = np.array([[1.0,1.0],
[1.0,-1.0]],'d')
vtk_orbital(h2,orbs,bfs)
return
if __name__ == '__main__':
plot_h2()
|
import numpy as np
from pyquante2 import basisset,rhf,h2
from pyquante2.graphics.vtk import vtk_orbital
from pyquante.graphics.lineplot import test_plot_orbs,test_plot_bfs
def lineplot_orbs(): return test_plot_orbs()
def lineplot_bfs(): return test_plot_bfs()
def plot_h2():
bfs = basisset(h2,'sto3g')
solver = rhf(h2,bfs)
ens = solver.converge()
# Note: these orbitals are not coming out symmetric. Why not??
print solver
print solver.orbs
vtk_orbital(h2,solver.orbs,bfs)
def plot_orbs():
bfs = basisset(h2,'sto3g')
orbs = np.array([[1.0,1.0],
[1.0,-1.0]],'d')
vtk_orbital(h2,orbs,bfs)
return
if __name__ == '__main__':
plot_h2()
Test routine for contour plottingimport numpy as np
from pyquante2 import basisset,rhf,h2
from pyquante2.graphics.vtk import vtk_orbital
from pyquante.graphics.lineplot import test_plot_orbs,test_plot_bfs
from pyquante.graphics.contourplot import test_contour
def lineplot_orbs(): return test_plot_orbs()
def lineplot_bfs(): return test_plot_bfs()
def contour_orb(): return test_contour(True)
def plot_h2():
bfs = basisset(h2,'sto3g')
solver = rhf(h2,bfs)
ens = solver.converge()
# Note: these orbitals are not coming out symmetric. Why not??
print solver
print solver.orbs
vtk_orbital(h2,solver.orbs,bfs)
def plot_orbs():
bfs = basisset(h2,'sto3g')
orbs = np.array([[1.0,1.0],
[1.0,-1.0]],'d')
vtk_orbital(h2,orbs,bfs)
return
if __name__ == '__main__':
plot_h2()
|
<commit_before>import numpy as np
from pyquante2 import basisset,rhf,h2
from pyquante2.graphics.vtk import vtk_orbital
from pyquante.graphics.lineplot import test_plot_orbs,test_plot_bfs
def lineplot_orbs(): return test_plot_orbs()
def lineplot_bfs(): return test_plot_bfs()
def plot_h2():
bfs = basisset(h2,'sto3g')
solver = rhf(h2,bfs)
ens = solver.converge()
# Note: these orbitals are not coming out symmetric. Why not??
print solver
print solver.orbs
vtk_orbital(h2,solver.orbs,bfs)
def plot_orbs():
bfs = basisset(h2,'sto3g')
orbs = np.array([[1.0,1.0],
[1.0,-1.0]],'d')
vtk_orbital(h2,orbs,bfs)
return
if __name__ == '__main__':
plot_h2()
<commit_msg>Test routine for contour plotting<commit_after>import numpy as np
from pyquante2 import basisset,rhf,h2
from pyquante2.graphics.vtk import vtk_orbital
from pyquante.graphics.lineplot import test_plot_orbs,test_plot_bfs
from pyquante.graphics.contourplot import test_contour
def lineplot_orbs(): return test_plot_orbs()
def lineplot_bfs(): return test_plot_bfs()
def contour_orb(): return test_contour(True)
def plot_h2():
bfs = basisset(h2,'sto3g')
solver = rhf(h2,bfs)
ens = solver.converge()
# Note: these orbitals are not coming out symmetric. Why not??
print solver
print solver.orbs
vtk_orbital(h2,solver.orbs,bfs)
def plot_orbs():
bfs = basisset(h2,'sto3g')
orbs = np.array([[1.0,1.0],
[1.0,-1.0]],'d')
vtk_orbital(h2,orbs,bfs)
return
if __name__ == '__main__':
plot_h2()
|
3d95d4e69e927e6f21ebad1b9730142df19eeef7
|
my_button.py
|
my_button.py
|
from kivy.uix.button import Button
import audio as a_m
class MyButton(Button):
def on_touch_up(self, *args, **kwargs):
a_m.instance.click()
super(MyButton, self).on_touch_up(*args, **kwargs)
def sim_press(self):
self.state = "down"
def sim_release(self):
self.state = "normal"
a_m.instance.click()
|
from kivy.uix.button import Button
import audio as a_m
class MyButton(Button):
def on_touch_up(self, *args, **kwargs):
if not hasattr(self, "silent"):
a_m.instance.click()
super(MyButton, self).on_touch_up(*args, **kwargs)
def sim_press(self):
self.state = "down"
def sim_release(self):
self.state = "normal"
if not hasattr(self, "silent"):
a_m.instance.click()
|
Allow buttons to be silenced
|
Allow buttons to be silenced
|
Python
|
mit
|
cropleyb/pentai,cropleyb/pentai,cropleyb/pentai
|
from kivy.uix.button import Button
import audio as a_m
class MyButton(Button):
def on_touch_up(self, *args, **kwargs):
a_m.instance.click()
super(MyButton, self).on_touch_up(*args, **kwargs)
def sim_press(self):
self.state = "down"
def sim_release(self):
self.state = "normal"
a_m.instance.click()
Allow buttons to be silenced
|
from kivy.uix.button import Button
import audio as a_m
class MyButton(Button):
def on_touch_up(self, *args, **kwargs):
if not hasattr(self, "silent"):
a_m.instance.click()
super(MyButton, self).on_touch_up(*args, **kwargs)
def sim_press(self):
self.state = "down"
def sim_release(self):
self.state = "normal"
if not hasattr(self, "silent"):
a_m.instance.click()
|
<commit_before>
from kivy.uix.button import Button
import audio as a_m
class MyButton(Button):
def on_touch_up(self, *args, **kwargs):
a_m.instance.click()
super(MyButton, self).on_touch_up(*args, **kwargs)
def sim_press(self):
self.state = "down"
def sim_release(self):
self.state = "normal"
a_m.instance.click()
<commit_msg>Allow buttons to be silenced<commit_after>
|
from kivy.uix.button import Button
import audio as a_m
class MyButton(Button):
def on_touch_up(self, *args, **kwargs):
if not hasattr(self, "silent"):
a_m.instance.click()
super(MyButton, self).on_touch_up(*args, **kwargs)
def sim_press(self):
self.state = "down"
def sim_release(self):
self.state = "normal"
if not hasattr(self, "silent"):
a_m.instance.click()
|
from kivy.uix.button import Button
import audio as a_m
class MyButton(Button):
def on_touch_up(self, *args, **kwargs):
a_m.instance.click()
super(MyButton, self).on_touch_up(*args, **kwargs)
def sim_press(self):
self.state = "down"
def sim_release(self):
self.state = "normal"
a_m.instance.click()
Allow buttons to be silenced
from kivy.uix.button import Button
import audio as a_m
class MyButton(Button):
def on_touch_up(self, *args, **kwargs):
if not hasattr(self, "silent"):
a_m.instance.click()
super(MyButton, self).on_touch_up(*args, **kwargs)
def sim_press(self):
self.state = "down"
def sim_release(self):
self.state = "normal"
if not hasattr(self, "silent"):
a_m.instance.click()
|
<commit_before>
from kivy.uix.button import Button
import audio as a_m
class MyButton(Button):
def on_touch_up(self, *args, **kwargs):
a_m.instance.click()
super(MyButton, self).on_touch_up(*args, **kwargs)
def sim_press(self):
self.state = "down"
def sim_release(self):
self.state = "normal"
a_m.instance.click()
<commit_msg>Allow buttons to be silenced<commit_after>
from kivy.uix.button import Button
import audio as a_m
class MyButton(Button):
def on_touch_up(self, *args, **kwargs):
if not hasattr(self, "silent"):
a_m.instance.click()
super(MyButton, self).on_touch_up(*args, **kwargs)
def sim_press(self):
self.state = "down"
def sim_release(self):
self.state = "normal"
if not hasattr(self, "silent"):
a_m.instance.click()
|
b849219c476db73e11ba40900d5a558a7b3e7759
|
net/utils.py
|
net/utils.py
|
def looping_retry(func, *args):
while True:
try:
return func(*args)
except Exception:
pass
|
import time
def looping_retry(func, *args):
while True:
try:
return func(*args)
except Exception:
time.sleep(0.5)
pass
|
Add pause during looping retry
|
Add pause during looping retry
|
Python
|
mit
|
OpenBazaar/Network,tyler-smith/OpenBazaar-Server,tyler-smith/OpenBazaar-Server,OpenBazaar/Network,tomgalloway/OpenBazaar-Server,cpacia/OpenBazaar-Server,cpacia/OpenBazaar-Server,OpenBazaar/OpenBazaar-Server,saltduck/OpenBazaar-Server,saltduck/OpenBazaar-Server,tyler-smith/OpenBazaar-Server,saltduck/OpenBazaar-Server,cpacia/OpenBazaar-Server,OpenBazaar/OpenBazaar-Server,tomgalloway/OpenBazaar-Server,OpenBazaar/Network,OpenBazaar/OpenBazaar-Server,tomgalloway/OpenBazaar-Server
|
def looping_retry(func, *args):
while True:
try:
return func(*args)
except Exception:
pass
Add pause during looping retry
|
import time
def looping_retry(func, *args):
while True:
try:
return func(*args)
except Exception:
time.sleep(0.5)
pass
|
<commit_before>def looping_retry(func, *args):
while True:
try:
return func(*args)
except Exception:
pass
<commit_msg>Add pause during looping retry<commit_after>
|
import time
def looping_retry(func, *args):
while True:
try:
return func(*args)
except Exception:
time.sleep(0.5)
pass
|
def looping_retry(func, *args):
while True:
try:
return func(*args)
except Exception:
pass
Add pause during looping retryimport time
def looping_retry(func, *args):
while True:
try:
return func(*args)
except Exception:
time.sleep(0.5)
pass
|
<commit_before>def looping_retry(func, *args):
while True:
try:
return func(*args)
except Exception:
pass
<commit_msg>Add pause during looping retry<commit_after>import time
def looping_retry(func, *args):
while True:
try:
return func(*args)
except Exception:
time.sleep(0.5)
pass
|
3b8a54f2ce220de26741aa329ebb45ceeb3b99c5
|
external_file_location/__manifest__.py
|
external_file_location/__manifest__.py
|
# coding: utf-8
# @ 2016 florian DA COSTA @ Akretion
# © 2016 @author Mourad EL HADJ MIMOUNE <mourad.elhadj.mimoune@akretion.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'External File Location',
'version': '10.0.1.0.0',
'author': 'Akretion,Odoo Community Association (OCA),ThinkOpen Solutions Brasil',
'website': 'http://www.akretion.com/',
'license': 'AGPL-3',
'category': 'Generic Modules',
'depends': [
'attachment_base_synchronize',
],
'external_dependencies': {
'python': [
'fs',
'paramiko',
],
},
'data': [
'views/menu.xml',
'views/attachment_view.xml',
'views/location_view.xml',
'views/task_view.xml',
'data/cron.xml',
'security/ir.model.access.csv',
],
'demo': [
'demo/task_demo.xml',
],
'installable': True,
'application': False,
}
|
# coding: utf-8
# @ 2016 florian DA COSTA @ Akretion
# © 2016 @author Mourad EL HADJ MIMOUNE <mourad.elhadj.mimoune@akretion.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'External File Location',
'version': '10.0.1.0.0',
'author': 'Akretion,Odoo Community Association (OCA),'
'ThinkOpen Solutions Brasil',
'website': 'http://www.akretion.com/',
'license': 'AGPL-3',
'category': 'Generic Modules',
'depends': [
'attachment_base_synchronize',
],
'external_dependencies': {
'python': [
'fs',
'paramiko',
],
},
'data': [
'views/menu.xml',
'views/attachment_view.xml',
'views/location_view.xml',
'views/task_view.xml',
'data/cron.xml',
'security/ir.model.access.csv',
],
'demo': [
'demo/task_demo.xml',
],
'installable': True,
'application': False,
}
|
Fix line lenght in manifest
|
Fix line lenght in manifest
|
Python
|
agpl-3.0
|
thinkopensolutions/server-tools,thinkopensolutions/server-tools
|
# coding: utf-8
# @ 2016 florian DA COSTA @ Akretion
# © 2016 @author Mourad EL HADJ MIMOUNE <mourad.elhadj.mimoune@akretion.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'External File Location',
'version': '10.0.1.0.0',
'author': 'Akretion,Odoo Community Association (OCA),ThinkOpen Solutions Brasil',
'website': 'http://www.akretion.com/',
'license': 'AGPL-3',
'category': 'Generic Modules',
'depends': [
'attachment_base_synchronize',
],
'external_dependencies': {
'python': [
'fs',
'paramiko',
],
},
'data': [
'views/menu.xml',
'views/attachment_view.xml',
'views/location_view.xml',
'views/task_view.xml',
'data/cron.xml',
'security/ir.model.access.csv',
],
'demo': [
'demo/task_demo.xml',
],
'installable': True,
'application': False,
}
Fix line lenght in manifest
|
# coding: utf-8
# @ 2016 florian DA COSTA @ Akretion
# © 2016 @author Mourad EL HADJ MIMOUNE <mourad.elhadj.mimoune@akretion.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'External File Location',
'version': '10.0.1.0.0',
'author': 'Akretion,Odoo Community Association (OCA),'
'ThinkOpen Solutions Brasil',
'website': 'http://www.akretion.com/',
'license': 'AGPL-3',
'category': 'Generic Modules',
'depends': [
'attachment_base_synchronize',
],
'external_dependencies': {
'python': [
'fs',
'paramiko',
],
},
'data': [
'views/menu.xml',
'views/attachment_view.xml',
'views/location_view.xml',
'views/task_view.xml',
'data/cron.xml',
'security/ir.model.access.csv',
],
'demo': [
'demo/task_demo.xml',
],
'installable': True,
'application': False,
}
|
<commit_before># coding: utf-8
# @ 2016 florian DA COSTA @ Akretion
# © 2016 @author Mourad EL HADJ MIMOUNE <mourad.elhadj.mimoune@akretion.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'External File Location',
'version': '10.0.1.0.0',
'author': 'Akretion,Odoo Community Association (OCA),ThinkOpen Solutions Brasil',
'website': 'http://www.akretion.com/',
'license': 'AGPL-3',
'category': 'Generic Modules',
'depends': [
'attachment_base_synchronize',
],
'external_dependencies': {
'python': [
'fs',
'paramiko',
],
},
'data': [
'views/menu.xml',
'views/attachment_view.xml',
'views/location_view.xml',
'views/task_view.xml',
'data/cron.xml',
'security/ir.model.access.csv',
],
'demo': [
'demo/task_demo.xml',
],
'installable': True,
'application': False,
}
<commit_msg>Fix line lenght in manifest<commit_after>
|
# coding: utf-8
# @ 2016 florian DA COSTA @ Akretion
# © 2016 @author Mourad EL HADJ MIMOUNE <mourad.elhadj.mimoune@akretion.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'External File Location',
'version': '10.0.1.0.0',
'author': 'Akretion,Odoo Community Association (OCA),'
'ThinkOpen Solutions Brasil',
'website': 'http://www.akretion.com/',
'license': 'AGPL-3',
'category': 'Generic Modules',
'depends': [
'attachment_base_synchronize',
],
'external_dependencies': {
'python': [
'fs',
'paramiko',
],
},
'data': [
'views/menu.xml',
'views/attachment_view.xml',
'views/location_view.xml',
'views/task_view.xml',
'data/cron.xml',
'security/ir.model.access.csv',
],
'demo': [
'demo/task_demo.xml',
],
'installable': True,
'application': False,
}
|
# coding: utf-8
# @ 2016 florian DA COSTA @ Akretion
# © 2016 @author Mourad EL HADJ MIMOUNE <mourad.elhadj.mimoune@akretion.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'External File Location',
'version': '10.0.1.0.0',
'author': 'Akretion,Odoo Community Association (OCA),ThinkOpen Solutions Brasil',
'website': 'http://www.akretion.com/',
'license': 'AGPL-3',
'category': 'Generic Modules',
'depends': [
'attachment_base_synchronize',
],
'external_dependencies': {
'python': [
'fs',
'paramiko',
],
},
'data': [
'views/menu.xml',
'views/attachment_view.xml',
'views/location_view.xml',
'views/task_view.xml',
'data/cron.xml',
'security/ir.model.access.csv',
],
'demo': [
'demo/task_demo.xml',
],
'installable': True,
'application': False,
}
Fix line lenght in manifest# coding: utf-8
# @ 2016 florian DA COSTA @ Akretion
# © 2016 @author Mourad EL HADJ MIMOUNE <mourad.elhadj.mimoune@akretion.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'External File Location',
'version': '10.0.1.0.0',
'author': 'Akretion,Odoo Community Association (OCA),'
'ThinkOpen Solutions Brasil',
'website': 'http://www.akretion.com/',
'license': 'AGPL-3',
'category': 'Generic Modules',
'depends': [
'attachment_base_synchronize',
],
'external_dependencies': {
'python': [
'fs',
'paramiko',
],
},
'data': [
'views/menu.xml',
'views/attachment_view.xml',
'views/location_view.xml',
'views/task_view.xml',
'data/cron.xml',
'security/ir.model.access.csv',
],
'demo': [
'demo/task_demo.xml',
],
'installable': True,
'application': False,
}
|
<commit_before># coding: utf-8
# @ 2016 florian DA COSTA @ Akretion
# © 2016 @author Mourad EL HADJ MIMOUNE <mourad.elhadj.mimoune@akretion.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'External File Location',
'version': '10.0.1.0.0',
'author': 'Akretion,Odoo Community Association (OCA),ThinkOpen Solutions Brasil',
'website': 'http://www.akretion.com/',
'license': 'AGPL-3',
'category': 'Generic Modules',
'depends': [
'attachment_base_synchronize',
],
'external_dependencies': {
'python': [
'fs',
'paramiko',
],
},
'data': [
'views/menu.xml',
'views/attachment_view.xml',
'views/location_view.xml',
'views/task_view.xml',
'data/cron.xml',
'security/ir.model.access.csv',
],
'demo': [
'demo/task_demo.xml',
],
'installable': True,
'application': False,
}
<commit_msg>Fix line lenght in manifest<commit_after># coding: utf-8
# @ 2016 florian DA COSTA @ Akretion
# © 2016 @author Mourad EL HADJ MIMOUNE <mourad.elhadj.mimoune@akretion.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'External File Location',
'version': '10.0.1.0.0',
'author': 'Akretion,Odoo Community Association (OCA),'
'ThinkOpen Solutions Brasil',
'website': 'http://www.akretion.com/',
'license': 'AGPL-3',
'category': 'Generic Modules',
'depends': [
'attachment_base_synchronize',
],
'external_dependencies': {
'python': [
'fs',
'paramiko',
],
},
'data': [
'views/menu.xml',
'views/attachment_view.xml',
'views/location_view.xml',
'views/task_view.xml',
'data/cron.xml',
'security/ir.model.access.csv',
],
'demo': [
'demo/task_demo.xml',
],
'installable': True,
'application': False,
}
|
31c8b251d5dd1df220fdae874a1715743d001407
|
migrations/versions/5981b26ae993_drop_trackman_tables.py
|
migrations/versions/5981b26ae993_drop_trackman_tables.py
|
"""Drop Trackman tables
Revision ID: 5981b26ae993
Revises: 804fb3dc434f
Create Date: 2018-05-19 23:57:42.897891
"""
# revision identifiers, used by Alembic.
revision = '5981b26ae993'
down_revision = '804fb3dc434f'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.drop_table('air_log')
op.drop_table('tracklog')
op.drop_table('trackreport')
op.drop_table('track')
op.drop_table('djset')
op.drop_table('dj')
op.drop_table('rotation')
def downgrade():
raise Exception("Downgrade to previous versions is unsupported.")
|
"""Drop Trackman tables
Revision ID: 5981b26ae993
Revises: 804fb3dc434f
Create Date: 2018-05-19 23:57:42.897891
"""
# revision identifiers, used by Alembic.
revision = '5981b26ae993'
down_revision = '804fb3dc434f'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.drop_table('air_log')
op.drop_table('tracklog')
op.drop_table('trackreport')
op.drop_table('track')
op.drop_table('set')
op.drop_table('dj')
op.drop_table('rotation')
def downgrade():
raise Exception("Downgrade to previous versions is unsupported.")
|
Fix typo: the table name is set, not djset
|
Fix typo: the table name is set, not djset
|
Python
|
agpl-3.0
|
wuvt/wuvt-site,wuvt/wuvt-site,wuvt/wuvt-site,wuvt/wuvt-site
|
"""Drop Trackman tables
Revision ID: 5981b26ae993
Revises: 804fb3dc434f
Create Date: 2018-05-19 23:57:42.897891
"""
# revision identifiers, used by Alembic.
revision = '5981b26ae993'
down_revision = '804fb3dc434f'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.drop_table('air_log')
op.drop_table('tracklog')
op.drop_table('trackreport')
op.drop_table('track')
op.drop_table('djset')
op.drop_table('dj')
op.drop_table('rotation')
def downgrade():
raise Exception("Downgrade to previous versions is unsupported.")
Fix typo: the table name is set, not djset
|
"""Drop Trackman tables
Revision ID: 5981b26ae993
Revises: 804fb3dc434f
Create Date: 2018-05-19 23:57:42.897891
"""
# revision identifiers, used by Alembic.
revision = '5981b26ae993'
down_revision = '804fb3dc434f'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.drop_table('air_log')
op.drop_table('tracklog')
op.drop_table('trackreport')
op.drop_table('track')
op.drop_table('set')
op.drop_table('dj')
op.drop_table('rotation')
def downgrade():
raise Exception("Downgrade to previous versions is unsupported.")
|
<commit_before>"""Drop Trackman tables
Revision ID: 5981b26ae993
Revises: 804fb3dc434f
Create Date: 2018-05-19 23:57:42.897891
"""
# revision identifiers, used by Alembic.
revision = '5981b26ae993'
down_revision = '804fb3dc434f'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.drop_table('air_log')
op.drop_table('tracklog')
op.drop_table('trackreport')
op.drop_table('track')
op.drop_table('djset')
op.drop_table('dj')
op.drop_table('rotation')
def downgrade():
raise Exception("Downgrade to previous versions is unsupported.")
<commit_msg>Fix typo: the table name is set, not djset<commit_after>
|
"""Drop Trackman tables
Revision ID: 5981b26ae993
Revises: 804fb3dc434f
Create Date: 2018-05-19 23:57:42.897891
"""
# revision identifiers, used by Alembic.
revision = '5981b26ae993'
down_revision = '804fb3dc434f'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.drop_table('air_log')
op.drop_table('tracklog')
op.drop_table('trackreport')
op.drop_table('track')
op.drop_table('set')
op.drop_table('dj')
op.drop_table('rotation')
def downgrade():
raise Exception("Downgrade to previous versions is unsupported.")
|
"""Drop Trackman tables
Revision ID: 5981b26ae993
Revises: 804fb3dc434f
Create Date: 2018-05-19 23:57:42.897891
"""
# revision identifiers, used by Alembic.
revision = '5981b26ae993'
down_revision = '804fb3dc434f'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.drop_table('air_log')
op.drop_table('tracklog')
op.drop_table('trackreport')
op.drop_table('track')
op.drop_table('djset')
op.drop_table('dj')
op.drop_table('rotation')
def downgrade():
raise Exception("Downgrade to previous versions is unsupported.")
Fix typo: the table name is set, not djset"""Drop Trackman tables
Revision ID: 5981b26ae993
Revises: 804fb3dc434f
Create Date: 2018-05-19 23:57:42.897891
"""
# revision identifiers, used by Alembic.
revision = '5981b26ae993'
down_revision = '804fb3dc434f'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.drop_table('air_log')
op.drop_table('tracklog')
op.drop_table('trackreport')
op.drop_table('track')
op.drop_table('set')
op.drop_table('dj')
op.drop_table('rotation')
def downgrade():
raise Exception("Downgrade to previous versions is unsupported.")
|
<commit_before>"""Drop Trackman tables
Revision ID: 5981b26ae993
Revises: 804fb3dc434f
Create Date: 2018-05-19 23:57:42.897891
"""
# revision identifiers, used by Alembic.
revision = '5981b26ae993'
down_revision = '804fb3dc434f'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.drop_table('air_log')
op.drop_table('tracklog')
op.drop_table('trackreport')
op.drop_table('track')
op.drop_table('djset')
op.drop_table('dj')
op.drop_table('rotation')
def downgrade():
raise Exception("Downgrade to previous versions is unsupported.")
<commit_msg>Fix typo: the table name is set, not djset<commit_after>"""Drop Trackman tables
Revision ID: 5981b26ae993
Revises: 804fb3dc434f
Create Date: 2018-05-19 23:57:42.897891
"""
# revision identifiers, used by Alembic.
revision = '5981b26ae993'
down_revision = '804fb3dc434f'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.drop_table('air_log')
op.drop_table('tracklog')
op.drop_table('trackreport')
op.drop_table('track')
op.drop_table('set')
op.drop_table('dj')
op.drop_table('rotation')
def downgrade():
raise Exception("Downgrade to previous versions is unsupported.")
|
6ac67683c1aea8578d1b9b5ad9d41280d6789f58
|
schematics/types/temporal.py
|
schematics/types/temporal.py
|
from __future__ import absolute_import
import datetime
from time import mktime
try:
from dateutil.tz import tzutc, tzlocal
except ImportError:
raise ImportError(
'Using the datetime fields requires the dateutil library. '
'You can obtain dateutil from http://labix.org/python-dateutil'
)
from .base import DateTimeType
class TimeStampType(DateTimeType):
"""Variant of a datetime field that saves itself as a unix timestamp (int)
instead of a ISO-8601 string.
"""
def __set__(self, instance, value):
"""Will try to parse the value as a timestamp. If that fails it
will fallback to DateTimeType's value parsing.
A datetime may be used (and is encouraged).
"""
if not value:
return
try:
value = TimeStampType.timestamp_to_date(value)
except TypeError:
pass
super(TimeStampType, self).__set__(instance, value)
@classmethod
def timestamp_to_date(cls, value):
return datetime.datetime.fromtimestamp(value, tz=tzutc())
@classmethod
def date_to_timestamp(cls, value):
if value.tzinfo is None:
value = value.replace(tzinfo=tzlocal())
return int(round(mktime(value.astimezone(tzutc()).timetuple())))
def to_primitive(self, value):
v = TimeStampType.date_to_timestamp(value)
return v
|
from __future__ import absolute_import
import datetime
from time import mktime
try:
from dateutil.tz import tzutc, tzlocal
except ImportError:
raise ImportError(
'Using the datetime fields requires the dateutil library. '
'You can obtain dateutil from http://labix.org/python-dateutil'
)
from .base import DateTimeType
class TimeStampType(DateTimeType):
"""Variant of a datetime field that saves itself as a unix timestamp (int)
instead of a ISO-8601 string.
"""
def convert(self, value):
"""Will try to parse the value as a timestamp. If that fails it
will fallback to DateTimeType's value parsing.
A datetime may be used (and is encouraged).
"""
if not value:
return
try:
value = float(value)
return TimeStampType.timestamp_to_date(value)
except (TypeError, ValueError):
pass
return super(TimeStampType, self).convert(value)
@classmethod
def timestamp_to_date(cls, value):
return datetime.datetime.fromtimestamp(value, tz=tzutc())
@classmethod
def date_to_timestamp(cls, value):
if value.tzinfo is None:
value = value.replace(tzinfo=tzlocal())
return int(round(mktime(value.astimezone(tzutc()).timetuple())))
def to_primitive(self, value):
v = TimeStampType.date_to_timestamp(value)
return v
|
Fix TimeStampType to use convert method
|
Fix TimeStampType to use convert method
|
Python
|
bsd-3-clause
|
nKey/schematics
|
from __future__ import absolute_import
import datetime
from time import mktime
try:
from dateutil.tz import tzutc, tzlocal
except ImportError:
raise ImportError(
'Using the datetime fields requires the dateutil library. '
'You can obtain dateutil from http://labix.org/python-dateutil'
)
from .base import DateTimeType
class TimeStampType(DateTimeType):
"""Variant of a datetime field that saves itself as a unix timestamp (int)
instead of a ISO-8601 string.
"""
def __set__(self, instance, value):
"""Will try to parse the value as a timestamp. If that fails it
will fallback to DateTimeType's value parsing.
A datetime may be used (and is encouraged).
"""
if not value:
return
try:
value = TimeStampType.timestamp_to_date(value)
except TypeError:
pass
super(TimeStampType, self).__set__(instance, value)
@classmethod
def timestamp_to_date(cls, value):
return datetime.datetime.fromtimestamp(value, tz=tzutc())
@classmethod
def date_to_timestamp(cls, value):
if value.tzinfo is None:
value = value.replace(tzinfo=tzlocal())
return int(round(mktime(value.astimezone(tzutc()).timetuple())))
def to_primitive(self, value):
v = TimeStampType.date_to_timestamp(value)
return v
Fix TimeStampType to use convert method
|
from __future__ import absolute_import
import datetime
from time import mktime
try:
from dateutil.tz import tzutc, tzlocal
except ImportError:
raise ImportError(
'Using the datetime fields requires the dateutil library. '
'You can obtain dateutil from http://labix.org/python-dateutil'
)
from .base import DateTimeType
class TimeStampType(DateTimeType):
"""Variant of a datetime field that saves itself as a unix timestamp (int)
instead of a ISO-8601 string.
"""
def convert(self, value):
"""Will try to parse the value as a timestamp. If that fails it
will fallback to DateTimeType's value parsing.
A datetime may be used (and is encouraged).
"""
if not value:
return
try:
value = float(value)
return TimeStampType.timestamp_to_date(value)
except (TypeError, ValueError):
pass
return super(TimeStampType, self).convert(value)
@classmethod
def timestamp_to_date(cls, value):
return datetime.datetime.fromtimestamp(value, tz=tzutc())
@classmethod
def date_to_timestamp(cls, value):
if value.tzinfo is None:
value = value.replace(tzinfo=tzlocal())
return int(round(mktime(value.astimezone(tzutc()).timetuple())))
def to_primitive(self, value):
v = TimeStampType.date_to_timestamp(value)
return v
|
<commit_before>from __future__ import absolute_import
import datetime
from time import mktime
try:
from dateutil.tz import tzutc, tzlocal
except ImportError:
raise ImportError(
'Using the datetime fields requires the dateutil library. '
'You can obtain dateutil from http://labix.org/python-dateutil'
)
from .base import DateTimeType
class TimeStampType(DateTimeType):
"""Variant of a datetime field that saves itself as a unix timestamp (int)
instead of a ISO-8601 string.
"""
def __set__(self, instance, value):
"""Will try to parse the value as a timestamp. If that fails it
will fallback to DateTimeType's value parsing.
A datetime may be used (and is encouraged).
"""
if not value:
return
try:
value = TimeStampType.timestamp_to_date(value)
except TypeError:
pass
super(TimeStampType, self).__set__(instance, value)
@classmethod
def timestamp_to_date(cls, value):
return datetime.datetime.fromtimestamp(value, tz=tzutc())
@classmethod
def date_to_timestamp(cls, value):
if value.tzinfo is None:
value = value.replace(tzinfo=tzlocal())
return int(round(mktime(value.astimezone(tzutc()).timetuple())))
def to_primitive(self, value):
v = TimeStampType.date_to_timestamp(value)
return v
<commit_msg>Fix TimeStampType to use convert method<commit_after>
|
from __future__ import absolute_import
import datetime
from time import mktime
try:
from dateutil.tz import tzutc, tzlocal
except ImportError:
raise ImportError(
'Using the datetime fields requires the dateutil library. '
'You can obtain dateutil from http://labix.org/python-dateutil'
)
from .base import DateTimeType
class TimeStampType(DateTimeType):
"""Variant of a datetime field that saves itself as a unix timestamp (int)
instead of a ISO-8601 string.
"""
def convert(self, value):
"""Will try to parse the value as a timestamp. If that fails it
will fallback to DateTimeType's value parsing.
A datetime may be used (and is encouraged).
"""
if not value:
return
try:
value = float(value)
return TimeStampType.timestamp_to_date(value)
except (TypeError, ValueError):
pass
return super(TimeStampType, self).convert(value)
@classmethod
def timestamp_to_date(cls, value):
return datetime.datetime.fromtimestamp(value, tz=tzutc())
@classmethod
def date_to_timestamp(cls, value):
if value.tzinfo is None:
value = value.replace(tzinfo=tzlocal())
return int(round(mktime(value.astimezone(tzutc()).timetuple())))
def to_primitive(self, value):
v = TimeStampType.date_to_timestamp(value)
return v
|
from __future__ import absolute_import
import datetime
from time import mktime
try:
from dateutil.tz import tzutc, tzlocal
except ImportError:
raise ImportError(
'Using the datetime fields requires the dateutil library. '
'You can obtain dateutil from http://labix.org/python-dateutil'
)
from .base import DateTimeType
class TimeStampType(DateTimeType):
"""Variant of a datetime field that saves itself as a unix timestamp (int)
instead of a ISO-8601 string.
"""
def __set__(self, instance, value):
"""Will try to parse the value as a timestamp. If that fails it
will fallback to DateTimeType's value parsing.
A datetime may be used (and is encouraged).
"""
if not value:
return
try:
value = TimeStampType.timestamp_to_date(value)
except TypeError:
pass
super(TimeStampType, self).__set__(instance, value)
@classmethod
def timestamp_to_date(cls, value):
return datetime.datetime.fromtimestamp(value, tz=tzutc())
@classmethod
def date_to_timestamp(cls, value):
if value.tzinfo is None:
value = value.replace(tzinfo=tzlocal())
return int(round(mktime(value.astimezone(tzutc()).timetuple())))
def to_primitive(self, value):
v = TimeStampType.date_to_timestamp(value)
return v
Fix TimeStampType to use convert methodfrom __future__ import absolute_import
import datetime
from time import mktime
try:
from dateutil.tz import tzutc, tzlocal
except ImportError:
raise ImportError(
'Using the datetime fields requires the dateutil library. '
'You can obtain dateutil from http://labix.org/python-dateutil'
)
from .base import DateTimeType
class TimeStampType(DateTimeType):
"""Variant of a datetime field that saves itself as a unix timestamp (int)
instead of a ISO-8601 string.
"""
def convert(self, value):
"""Will try to parse the value as a timestamp. If that fails it
will fallback to DateTimeType's value parsing.
A datetime may be used (and is encouraged).
"""
if not value:
return
try:
value = float(value)
return TimeStampType.timestamp_to_date(value)
except (TypeError, ValueError):
pass
return super(TimeStampType, self).convert(value)
@classmethod
def timestamp_to_date(cls, value):
return datetime.datetime.fromtimestamp(value, tz=tzutc())
@classmethod
def date_to_timestamp(cls, value):
if value.tzinfo is None:
value = value.replace(tzinfo=tzlocal())
return int(round(mktime(value.astimezone(tzutc()).timetuple())))
def to_primitive(self, value):
v = TimeStampType.date_to_timestamp(value)
return v
|
<commit_before>from __future__ import absolute_import
import datetime
from time import mktime
try:
from dateutil.tz import tzutc, tzlocal
except ImportError:
raise ImportError(
'Using the datetime fields requires the dateutil library. '
'You can obtain dateutil from http://labix.org/python-dateutil'
)
from .base import DateTimeType
class TimeStampType(DateTimeType):
"""Variant of a datetime field that saves itself as a unix timestamp (int)
instead of a ISO-8601 string.
"""
def __set__(self, instance, value):
"""Will try to parse the value as a timestamp. If that fails it
will fallback to DateTimeType's value parsing.
A datetime may be used (and is encouraged).
"""
if not value:
return
try:
value = TimeStampType.timestamp_to_date(value)
except TypeError:
pass
super(TimeStampType, self).__set__(instance, value)
@classmethod
def timestamp_to_date(cls, value):
return datetime.datetime.fromtimestamp(value, tz=tzutc())
@classmethod
def date_to_timestamp(cls, value):
if value.tzinfo is None:
value = value.replace(tzinfo=tzlocal())
return int(round(mktime(value.astimezone(tzutc()).timetuple())))
def to_primitive(self, value):
v = TimeStampType.date_to_timestamp(value)
return v
<commit_msg>Fix TimeStampType to use convert method<commit_after>from __future__ import absolute_import
import datetime
from time import mktime
try:
from dateutil.tz import tzutc, tzlocal
except ImportError:
raise ImportError(
'Using the datetime fields requires the dateutil library. '
'You can obtain dateutil from http://labix.org/python-dateutil'
)
from .base import DateTimeType
class TimeStampType(DateTimeType):
"""Variant of a datetime field that saves itself as a unix timestamp (int)
instead of a ISO-8601 string.
"""
def convert(self, value):
"""Will try to parse the value as a timestamp. If that fails it
will fallback to DateTimeType's value parsing.
A datetime may be used (and is encouraged).
"""
if not value:
return
try:
value = float(value)
return TimeStampType.timestamp_to_date(value)
except (TypeError, ValueError):
pass
return super(TimeStampType, self).convert(value)
@classmethod
def timestamp_to_date(cls, value):
return datetime.datetime.fromtimestamp(value, tz=tzutc())
@classmethod
def date_to_timestamp(cls, value):
if value.tzinfo is None:
value = value.replace(tzinfo=tzlocal())
return int(round(mktime(value.astimezone(tzutc()).timetuple())))
def to_primitive(self, value):
v = TimeStampType.date_to_timestamp(value)
return v
|
ec775abe37ee6d7965e7a30ff36accec5a8dc73c
|
python/misc/functions.py
|
python/misc/functions.py
|
#!/usr/bin/env python
def convert_filetime_to_epoch(filetime):
return (filetime / 10000000) - 11644473600
|
#!/usr/bin/env python
import socket
def convert_filetime_to_epoch(filetime):
return (filetime / 10000000) - 11644473600
# Can be used to test connectivity if telnet isn't installed (https://stackoverflow.com/a/33117579/399105)
def test_connectivity(host, port, timeout=3):
try:
socket.setdefaulttimeout(timeout)
socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect((host, port))
return True
except Exception as ex:
print(ex.message)
return False
|
Add function to test network connectivity
|
Add function to test network connectivity
|
Python
|
mit
|
bmaupin/junkpile,bmaupin/junkpile,bmaupin/junkpile,bmaupin/junkpile,bmaupin/junkpile,bmaupin/junkpile,bmaupin/junkpile,bmaupin/junkpile,bmaupin/junkpile
|
#!/usr/bin/env python
def convert_filetime_to_epoch(filetime):
return (filetime / 10000000) - 11644473600
Add function to test network connectivity
|
#!/usr/bin/env python
import socket
def convert_filetime_to_epoch(filetime):
return (filetime / 10000000) - 11644473600
# Can be used to test connectivity if telnet isn't installed (https://stackoverflow.com/a/33117579/399105)
def test_connectivity(host, port, timeout=3):
try:
socket.setdefaulttimeout(timeout)
socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect((host, port))
return True
except Exception as ex:
print(ex.message)
return False
|
<commit_before>#!/usr/bin/env python
def convert_filetime_to_epoch(filetime):
return (filetime / 10000000) - 11644473600
<commit_msg>Add function to test network connectivity<commit_after>
|
#!/usr/bin/env python
import socket
def convert_filetime_to_epoch(filetime):
return (filetime / 10000000) - 11644473600
# Can be used to test connectivity if telnet isn't installed (https://stackoverflow.com/a/33117579/399105)
def test_connectivity(host, port, timeout=3):
try:
socket.setdefaulttimeout(timeout)
socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect((host, port))
return True
except Exception as ex:
print(ex.message)
return False
|
#!/usr/bin/env python
def convert_filetime_to_epoch(filetime):
return (filetime / 10000000) - 11644473600
Add function to test network connectivity#!/usr/bin/env python
import socket
def convert_filetime_to_epoch(filetime):
return (filetime / 10000000) - 11644473600
# Can be used to test connectivity if telnet isn't installed (https://stackoverflow.com/a/33117579/399105)
def test_connectivity(host, port, timeout=3):
try:
socket.setdefaulttimeout(timeout)
socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect((host, port))
return True
except Exception as ex:
print(ex.message)
return False
|
<commit_before>#!/usr/bin/env python
def convert_filetime_to_epoch(filetime):
return (filetime / 10000000) - 11644473600
<commit_msg>Add function to test network connectivity<commit_after>#!/usr/bin/env python
import socket
def convert_filetime_to_epoch(filetime):
return (filetime / 10000000) - 11644473600
# Can be used to test connectivity if telnet isn't installed (https://stackoverflow.com/a/33117579/399105)
def test_connectivity(host, port, timeout=3):
try:
socket.setdefaulttimeout(timeout)
socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect((host, port))
return True
except Exception as ex:
print(ex.message)
return False
|
2f4365d1d8c54f4ced852ffe9824fc530ac14862
|
{{cookiecutter.repo_name}}/tests/test_{{cookiecutter.repo_name}}.py
|
{{cookiecutter.repo_name}}/tests/test_{{cookiecutter.repo_name}}.py
|
# -*- coding: utf-8 -*-
import pytest
def test_app_title(app):
"""Simply tests if the default app title meets the expectations.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the title does not match
"""
assert app.title == '{{cookiecutter.app_title}}'
def test_carousel(app):
"""Test for the carousel widget of the app checking the slides' names.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the names of the slides do not match the expectations
"""
names = [slide.name for slide in app.carousel.slides]
expected = ['hello', 'kivy', 'cookiecutterdozer', 'license', 'github']
assert names == expected
|
# -*- coding: utf-8 -*-
def test_app_title(app):
"""Simply tests if the default app title meets the expectations.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the title does not match
"""
assert app.title == '{{cookiecutter.app_title}}'
def test_carousel(app):
"""Test for the carousel widget of the app checking the slides' names.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the names of the slides do not match the expectations
"""
names = [slide.name for slide in app.carousel.slides]
expected = ['hello', 'kivy', 'cookiecutterdozer', 'license', 'github']
assert names == expected
|
Fix flake8 in app test
|
Fix flake8 in app test
|
Python
|
mit
|
hackebrot/cookiedozer,hackebrot/cookiedozer
|
# -*- coding: utf-8 -*-
import pytest
def test_app_title(app):
"""Simply tests if the default app title meets the expectations.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the title does not match
"""
assert app.title == '{{cookiecutter.app_title}}'
def test_carousel(app):
"""Test for the carousel widget of the app checking the slides' names.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the names of the slides do not match the expectations
"""
names = [slide.name for slide in app.carousel.slides]
expected = ['hello', 'kivy', 'cookiecutterdozer', 'license', 'github']
assert names == expected
Fix flake8 in app test
|
# -*- coding: utf-8 -*-
def test_app_title(app):
"""Simply tests if the default app title meets the expectations.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the title does not match
"""
assert app.title == '{{cookiecutter.app_title}}'
def test_carousel(app):
"""Test for the carousel widget of the app checking the slides' names.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the names of the slides do not match the expectations
"""
names = [slide.name for slide in app.carousel.slides]
expected = ['hello', 'kivy', 'cookiecutterdozer', 'license', 'github']
assert names == expected
|
<commit_before># -*- coding: utf-8 -*-
import pytest
def test_app_title(app):
"""Simply tests if the default app title meets the expectations.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the title does not match
"""
assert app.title == '{{cookiecutter.app_title}}'
def test_carousel(app):
"""Test for the carousel widget of the app checking the slides' names.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the names of the slides do not match the expectations
"""
names = [slide.name for slide in app.carousel.slides]
expected = ['hello', 'kivy', 'cookiecutterdozer', 'license', 'github']
assert names == expected
<commit_msg>Fix flake8 in app test<commit_after>
|
# -*- coding: utf-8 -*-
def test_app_title(app):
"""Simply tests if the default app title meets the expectations.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the title does not match
"""
assert app.title == '{{cookiecutter.app_title}}'
def test_carousel(app):
"""Test for the carousel widget of the app checking the slides' names.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the names of the slides do not match the expectations
"""
names = [slide.name for slide in app.carousel.slides]
expected = ['hello', 'kivy', 'cookiecutterdozer', 'license', 'github']
assert names == expected
|
# -*- coding: utf-8 -*-
import pytest
def test_app_title(app):
"""Simply tests if the default app title meets the expectations.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the title does not match
"""
assert app.title == '{{cookiecutter.app_title}}'
def test_carousel(app):
"""Test for the carousel widget of the app checking the slides' names.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the names of the slides do not match the expectations
"""
names = [slide.name for slide in app.carousel.slides]
expected = ['hello', 'kivy', 'cookiecutterdozer', 'license', 'github']
assert names == expected
Fix flake8 in app test# -*- coding: utf-8 -*-
def test_app_title(app):
"""Simply tests if the default app title meets the expectations.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the title does not match
"""
assert app.title == '{{cookiecutter.app_title}}'
def test_carousel(app):
"""Test for the carousel widget of the app checking the slides' names.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the names of the slides do not match the expectations
"""
names = [slide.name for slide in app.carousel.slides]
expected = ['hello', 'kivy', 'cookiecutterdozer', 'license', 'github']
assert names == expected
|
<commit_before># -*- coding: utf-8 -*-
import pytest
def test_app_title(app):
"""Simply tests if the default app title meets the expectations.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the title does not match
"""
assert app.title == '{{cookiecutter.app_title}}'
def test_carousel(app):
"""Test for the carousel widget of the app checking the slides' names.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the names of the slides do not match the expectations
"""
names = [slide.name for slide in app.carousel.slides]
expected = ['hello', 'kivy', 'cookiecutterdozer', 'license', 'github']
assert names == expected
<commit_msg>Fix flake8 in app test<commit_after># -*- coding: utf-8 -*-
def test_app_title(app):
"""Simply tests if the default app title meets the expectations.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the title does not match
"""
assert app.title == '{{cookiecutter.app_title}}'
def test_carousel(app):
"""Test for the carousel widget of the app checking the slides' names.
Args:
app (:class:`{{cookiecutter.app_class_name}}`): Default app instance
Raises:
AssertionError: If the names of the slides do not match the expectations
"""
names = [slide.name for slide in app.carousel.slides]
expected = ['hello', 'kivy', 'cookiecutterdozer', 'license', 'github']
assert names == expected
|
319af7294f85ec8476a0cd1bda0095b59b0b0324
|
dpam/backends.py
|
dpam/backends.py
|
import pam
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.auth.backends import ModelBackend
class PAMBackend(ModelBackend):
def authenticate(self, username=None, password=None):
if pam.authenticate(username, password):
try:
user = User.objects.get(username=username)
except:
user = User(username=username, password='not stored here')
user.set_unusable_password()
if getattr(settings, 'PAM_IS_SUPERUSER', False):
user.is_superuser = True
if getattr(settings, 'PAM_IS_STAFF', user.is_superuser):
user.is_staff = True
user.save()
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
import pam
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.auth.backends import ModelBackend
class PAMBackend(ModelBackend):
SERVICE = getattr(settings, 'PAM_SERVICE', 'login')
def authenticate(self, username=None, password=None):
if pam.authenticate(username, password, service=service):
try:
user = User.objects.get(username=username)
except:
user = User(username=username, password='not stored here')
user.set_unusable_password()
if getattr(settings, 'PAM_IS_SUPERUSER', False):
user.is_superuser = True
if getattr(settings, 'PAM_IS_STAFF', user.is_superuser):
user.is_staff = True
user.save()
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
Add the PAM_SERVICE setting to select a custom pam service for authentication
|
Add the PAM_SERVICE setting to select a custom pam service for authentication
|
Python
|
bsd-2-clause
|
JustinAzoff/django-pam,tehmaze/django-pam,keobox/django-pam,kurojishi/django-pam
|
import pam
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.auth.backends import ModelBackend
class PAMBackend(ModelBackend):
def authenticate(self, username=None, password=None):
if pam.authenticate(username, password):
try:
user = User.objects.get(username=username)
except:
user = User(username=username, password='not stored here')
user.set_unusable_password()
if getattr(settings, 'PAM_IS_SUPERUSER', False):
user.is_superuser = True
if getattr(settings, 'PAM_IS_STAFF', user.is_superuser):
user.is_staff = True
user.save()
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
Add the PAM_SERVICE setting to select a custom pam service for authentication
|
import pam
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.auth.backends import ModelBackend
class PAMBackend(ModelBackend):
SERVICE = getattr(settings, 'PAM_SERVICE', 'login')
def authenticate(self, username=None, password=None):
if pam.authenticate(username, password, service=service):
try:
user = User.objects.get(username=username)
except:
user = User(username=username, password='not stored here')
user.set_unusable_password()
if getattr(settings, 'PAM_IS_SUPERUSER', False):
user.is_superuser = True
if getattr(settings, 'PAM_IS_STAFF', user.is_superuser):
user.is_staff = True
user.save()
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
<commit_before>import pam
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.auth.backends import ModelBackend
class PAMBackend(ModelBackend):
def authenticate(self, username=None, password=None):
if pam.authenticate(username, password):
try:
user = User.objects.get(username=username)
except:
user = User(username=username, password='not stored here')
user.set_unusable_password()
if getattr(settings, 'PAM_IS_SUPERUSER', False):
user.is_superuser = True
if getattr(settings, 'PAM_IS_STAFF', user.is_superuser):
user.is_staff = True
user.save()
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
<commit_msg>Add the PAM_SERVICE setting to select a custom pam service for authentication<commit_after>
|
import pam
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.auth.backends import ModelBackend
class PAMBackend(ModelBackend):
SERVICE = getattr(settings, 'PAM_SERVICE', 'login')
def authenticate(self, username=None, password=None):
if pam.authenticate(username, password, service=service):
try:
user = User.objects.get(username=username)
except:
user = User(username=username, password='not stored here')
user.set_unusable_password()
if getattr(settings, 'PAM_IS_SUPERUSER', False):
user.is_superuser = True
if getattr(settings, 'PAM_IS_STAFF', user.is_superuser):
user.is_staff = True
user.save()
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
import pam
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.auth.backends import ModelBackend
class PAMBackend(ModelBackend):
def authenticate(self, username=None, password=None):
if pam.authenticate(username, password):
try:
user = User.objects.get(username=username)
except:
user = User(username=username, password='not stored here')
user.set_unusable_password()
if getattr(settings, 'PAM_IS_SUPERUSER', False):
user.is_superuser = True
if getattr(settings, 'PAM_IS_STAFF', user.is_superuser):
user.is_staff = True
user.save()
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
Add the PAM_SERVICE setting to select a custom pam service for authenticationimport pam
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.auth.backends import ModelBackend
class PAMBackend(ModelBackend):
SERVICE = getattr(settings, 'PAM_SERVICE', 'login')
def authenticate(self, username=None, password=None):
if pam.authenticate(username, password, service=service):
try:
user = User.objects.get(username=username)
except:
user = User(username=username, password='not stored here')
user.set_unusable_password()
if getattr(settings, 'PAM_IS_SUPERUSER', False):
user.is_superuser = True
if getattr(settings, 'PAM_IS_STAFF', user.is_superuser):
user.is_staff = True
user.save()
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
<commit_before>import pam
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.auth.backends import ModelBackend
class PAMBackend(ModelBackend):
def authenticate(self, username=None, password=None):
if pam.authenticate(username, password):
try:
user = User.objects.get(username=username)
except:
user = User(username=username, password='not stored here')
user.set_unusable_password()
if getattr(settings, 'PAM_IS_SUPERUSER', False):
user.is_superuser = True
if getattr(settings, 'PAM_IS_STAFF', user.is_superuser):
user.is_staff = True
user.save()
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
<commit_msg>Add the PAM_SERVICE setting to select a custom pam service for authentication<commit_after>import pam
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.auth.backends import ModelBackend
class PAMBackend(ModelBackend):
SERVICE = getattr(settings, 'PAM_SERVICE', 'login')
def authenticate(self, username=None, password=None):
if pam.authenticate(username, password, service=service):
try:
user = User.objects.get(username=username)
except:
user = User(username=username, password='not stored here')
user.set_unusable_password()
if getattr(settings, 'PAM_IS_SUPERUSER', False):
user.is_superuser = True
if getattr(settings, 'PAM_IS_STAFF', user.is_superuser):
user.is_staff = True
user.save()
return user
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
9e47c145c705351748399eb64a7686efc9e24b0a
|
tests/basics/memoryview2.py
|
tests/basics/memoryview2.py
|
# test memoryview accessing maximum values for signed/unsigned elements
from array import array
print(list(memoryview(b'\x7f\x80\x81\xff')))
print(list(memoryview(array('b', [0x7f, -0x80]))))
print(list(memoryview(array('B', [0x7f, 0x80, 0x81, 0xff]))))
print(list(memoryview(array('h', [0x7f00, -0x8000]))))
print(list(memoryview(array('H', [0x7f00, 0x8000, 0x8100, 0xffff]))))
print(list(memoryview(array('i', [0x7f000000, -0x80000000]))))
print(list(memoryview(array('I', [0x7f000000, 0x80000000, 0x81000000, 0xffffffff]))))
|
# test memoryview accessing maximum values for signed/unsigned elements
from array import array
print(list(memoryview(b'\x7f\x80\x81\xff')))
print(list(memoryview(array('b', [0x7f, -0x80]))))
print(list(memoryview(array('B', [0x7f, 0x80, 0x81, 0xff]))))
print(list(memoryview(array('h', [0x7f00, -0x8000]))))
print(list(memoryview(array('H', [0x7f00, 0x8000, 0x8100, 0xffff]))))
# these constructors give an internal overflow in uPy
#print(list(memoryview(array('i', [0x7f000000, -0x80000000]))))
#print(list(memoryview(array('I', [0x7f000000, 0x80000000, 0x81000000, 0xffffffff]))))
|
Disable memoryview tests that overflow int conversion.
|
tests: Disable memoryview tests that overflow int conversion.
They fail on builds with 32-bit word size.
|
Python
|
mit
|
SHA2017-badge/micropython-esp32,deshipu/micropython,pozetroninc/micropython,hosaka/micropython,pozetroninc/micropython,blazewicz/micropython,drrk/micropython,Peetz0r/micropython-esp32,Timmenem/micropython,blazewicz/micropython,pramasoul/micropython,alex-robbins/micropython,swegener/micropython,ryannathans/micropython,alex-march/micropython,bvernoux/micropython,pfalcon/micropython,trezor/micropython,Peetz0r/micropython-esp32,micropython/micropython-esp32,henriknelson/micropython,tralamazza/micropython,adafruit/circuitpython,selste/micropython,trezor/micropython,ganshun666/micropython,dinau/micropython,deshipu/micropython,chrisdearman/micropython,PappaPeppar/micropython,adafruit/micropython,tuc-osg/micropython,MrSurly/micropython-esp32,redbear/micropython,dxxb/micropython,turbinenreiter/micropython,henriknelson/micropython,cwyark/micropython,tobbad/micropython,alex-march/micropython,emfcamp/micropython,henriknelson/micropython,misterdanb/micropython,infinnovation/micropython,dxxb/micropython,puuu/micropython,MrSurly/micropython,adafruit/circuitpython,turbinenreiter/micropython,lowRISC/micropython,micropython/micropython-esp32,SHA2017-badge/micropython-esp32,tobbad/micropython,torwag/micropython,emfcamp/micropython,MrSurly/micropython-esp32,cwyark/micropython,adafruit/circuitpython,selste/micropython,Timmenem/micropython,torwag/micropython,SHA2017-badge/micropython-esp32,adafruit/circuitpython,cwyark/micropython,tuc-osg/micropython,HenrikSolver/micropython,jmarcelino/pycom-micropython,chrisdearman/micropython,matthewelse/micropython,lowRISC/micropython,alex-march/micropython,dxxb/micropython,TDAbboud/micropython,lowRISC/micropython,MrSurly/micropython,puuu/micropython,hosaka/micropython,dmazzella/micropython,toolmacher/micropython,dinau/micropython,MrSurly/micropython,infinnovation/micropython,bvernoux/micropython,deshipu/micropython,dmazzella/micropython,henriknelson/micropython,TDAbboud/micropython,adafruit/micropython,drrk/micropython,kerneltask/micropython,mhoffma/micropython,mpalomer/micropython,hiway/micropython,matthewelse/micropython,turbinenreiter/micropython,Peetz0r/micropython-esp32,torwag/micropython,Peetz0r/micropython-esp32,Timmenem/micropython,drrk/micropython,hosaka/micropython,dxxb/micropython,TDAbboud/micropython,misterdanb/micropython,praemdonck/micropython,alex-robbins/micropython,toolmacher/micropython,oopy/micropython,misterdanb/micropython,chrisdearman/micropython,PappaPeppar/micropython,tuc-osg/micropython,PappaPeppar/micropython,toolmacher/micropython,Timmenem/micropython,deshipu/micropython,infinnovation/micropython,cwyark/micropython,adafruit/micropython,redbear/micropython,jmarcelino/pycom-micropython,alex-march/micropython,kerneltask/micropython,tuc-osg/micropython,oopy/micropython,ganshun666/micropython,alex-robbins/micropython,selste/micropython,redbear/micropython,tuc-osg/micropython,AriZuu/micropython,emfcamp/micropython,MrSurly/micropython-esp32,puuu/micropython,matthewelse/micropython,selste/micropython,HenrikSolver/micropython,ryannathans/micropython,cwyark/micropython,oopy/micropython,oopy/micropython,tobbad/micropython,trezor/micropython,adafruit/micropython,micropython/micropython-esp32,chrisdearman/micropython,adafruit/micropython,praemdonck/micropython,selste/micropython,jmarcelino/pycom-micropython,jmarcelino/pycom-micropython,matthewelse/micropython,hosaka/micropython,pfalcon/micropython,mhoffma/micropython,praemdonck/micropython,pramasoul/micropython,AriZuu/micropython,oopy/micropython,blazewicz/micropython,bvernoux/micropython,HenrikSolver/micropython,deshipu/micropython,hiway/micropython,blazewicz/micropython,SHA2017-badge/micropython-esp32,mhoffma/micropython,redbear/micropython,praemdonck/micropython,dxxb/micropython,mpalomer/micropython,pozetroninc/micropython,praemdonck/micropython,tobbad/micropython,chrisdearman/micropython,TDAbboud/micropython,torwag/micropython,MrSurly/micropython-esp32,AriZuu/micropython,emfcamp/micropython,lowRISC/micropython,alex-robbins/micropython,redbear/micropython,toolmacher/micropython,kerneltask/micropython,matthewelse/micropython,tobbad/micropython,micropython/micropython-esp32,ganshun666/micropython,drrk/micropython,pfalcon/micropython,pfalcon/micropython,adafruit/circuitpython,HenrikSolver/micropython,dinau/micropython,ryannathans/micropython,dinau/micropython,matthewelse/micropython,dinau/micropython,hiway/micropython,emfcamp/micropython,torwag/micropython,pozetroninc/micropython,mhoffma/micropython,SHA2017-badge/micropython-esp32,dmazzella/micropython,mpalomer/micropython,lowRISC/micropython,AriZuu/micropython,MrSurly/micropython,tralamazza/micropython,puuu/micropython,pfalcon/micropython,MrSurly/micropython,ganshun666/micropython,PappaPeppar/micropython,trezor/micropython,jmarcelino/pycom-micropython,mhoffma/micropython,pramasoul/micropython,ryannathans/micropython,bvernoux/micropython,TDAbboud/micropython,PappaPeppar/micropython,HenrikSolver/micropython,ryannathans/micropython,swegener/micropython,blazewicz/micropython,kerneltask/micropython,micropython/micropython-esp32,alex-robbins/micropython,swegener/micropython,turbinenreiter/micropython,hiway/micropython,tralamazza/micropython,tralamazza/micropython,hiway/micropython,toolmacher/micropython,misterdanb/micropython,mpalomer/micropython,trezor/micropython,hosaka/micropython,henriknelson/micropython,misterdanb/micropython,bvernoux/micropython,puuu/micropython,pramasoul/micropython,infinnovation/micropython,ganshun666/micropython,MrSurly/micropython-esp32,Timmenem/micropython,swegener/micropython,dmazzella/micropython,alex-march/micropython,turbinenreiter/micropython,swegener/micropython,Peetz0r/micropython-esp32,infinnovation/micropython,adafruit/circuitpython,mpalomer/micropython,AriZuu/micropython,pramasoul/micropython,drrk/micropython,pozetroninc/micropython,kerneltask/micropython
|
# test memoryview accessing maximum values for signed/unsigned elements
from array import array
print(list(memoryview(b'\x7f\x80\x81\xff')))
print(list(memoryview(array('b', [0x7f, -0x80]))))
print(list(memoryview(array('B', [0x7f, 0x80, 0x81, 0xff]))))
print(list(memoryview(array('h', [0x7f00, -0x8000]))))
print(list(memoryview(array('H', [0x7f00, 0x8000, 0x8100, 0xffff]))))
print(list(memoryview(array('i', [0x7f000000, -0x80000000]))))
print(list(memoryview(array('I', [0x7f000000, 0x80000000, 0x81000000, 0xffffffff]))))
tests: Disable memoryview tests that overflow int conversion.
They fail on builds with 32-bit word size.
|
# test memoryview accessing maximum values for signed/unsigned elements
from array import array
print(list(memoryview(b'\x7f\x80\x81\xff')))
print(list(memoryview(array('b', [0x7f, -0x80]))))
print(list(memoryview(array('B', [0x7f, 0x80, 0x81, 0xff]))))
print(list(memoryview(array('h', [0x7f00, -0x8000]))))
print(list(memoryview(array('H', [0x7f00, 0x8000, 0x8100, 0xffff]))))
# these constructors give an internal overflow in uPy
#print(list(memoryview(array('i', [0x7f000000, -0x80000000]))))
#print(list(memoryview(array('I', [0x7f000000, 0x80000000, 0x81000000, 0xffffffff]))))
|
<commit_before># test memoryview accessing maximum values for signed/unsigned elements
from array import array
print(list(memoryview(b'\x7f\x80\x81\xff')))
print(list(memoryview(array('b', [0x7f, -0x80]))))
print(list(memoryview(array('B', [0x7f, 0x80, 0x81, 0xff]))))
print(list(memoryview(array('h', [0x7f00, -0x8000]))))
print(list(memoryview(array('H', [0x7f00, 0x8000, 0x8100, 0xffff]))))
print(list(memoryview(array('i', [0x7f000000, -0x80000000]))))
print(list(memoryview(array('I', [0x7f000000, 0x80000000, 0x81000000, 0xffffffff]))))
<commit_msg>tests: Disable memoryview tests that overflow int conversion.
They fail on builds with 32-bit word size.<commit_after>
|
# test memoryview accessing maximum values for signed/unsigned elements
from array import array
print(list(memoryview(b'\x7f\x80\x81\xff')))
print(list(memoryview(array('b', [0x7f, -0x80]))))
print(list(memoryview(array('B', [0x7f, 0x80, 0x81, 0xff]))))
print(list(memoryview(array('h', [0x7f00, -0x8000]))))
print(list(memoryview(array('H', [0x7f00, 0x8000, 0x8100, 0xffff]))))
# these constructors give an internal overflow in uPy
#print(list(memoryview(array('i', [0x7f000000, -0x80000000]))))
#print(list(memoryview(array('I', [0x7f000000, 0x80000000, 0x81000000, 0xffffffff]))))
|
# test memoryview accessing maximum values for signed/unsigned elements
from array import array
print(list(memoryview(b'\x7f\x80\x81\xff')))
print(list(memoryview(array('b', [0x7f, -0x80]))))
print(list(memoryview(array('B', [0x7f, 0x80, 0x81, 0xff]))))
print(list(memoryview(array('h', [0x7f00, -0x8000]))))
print(list(memoryview(array('H', [0x7f00, 0x8000, 0x8100, 0xffff]))))
print(list(memoryview(array('i', [0x7f000000, -0x80000000]))))
print(list(memoryview(array('I', [0x7f000000, 0x80000000, 0x81000000, 0xffffffff]))))
tests: Disable memoryview tests that overflow int conversion.
They fail on builds with 32-bit word size.# test memoryview accessing maximum values for signed/unsigned elements
from array import array
print(list(memoryview(b'\x7f\x80\x81\xff')))
print(list(memoryview(array('b', [0x7f, -0x80]))))
print(list(memoryview(array('B', [0x7f, 0x80, 0x81, 0xff]))))
print(list(memoryview(array('h', [0x7f00, -0x8000]))))
print(list(memoryview(array('H', [0x7f00, 0x8000, 0x8100, 0xffff]))))
# these constructors give an internal overflow in uPy
#print(list(memoryview(array('i', [0x7f000000, -0x80000000]))))
#print(list(memoryview(array('I', [0x7f000000, 0x80000000, 0x81000000, 0xffffffff]))))
|
<commit_before># test memoryview accessing maximum values for signed/unsigned elements
from array import array
print(list(memoryview(b'\x7f\x80\x81\xff')))
print(list(memoryview(array('b', [0x7f, -0x80]))))
print(list(memoryview(array('B', [0x7f, 0x80, 0x81, 0xff]))))
print(list(memoryview(array('h', [0x7f00, -0x8000]))))
print(list(memoryview(array('H', [0x7f00, 0x8000, 0x8100, 0xffff]))))
print(list(memoryview(array('i', [0x7f000000, -0x80000000]))))
print(list(memoryview(array('I', [0x7f000000, 0x80000000, 0x81000000, 0xffffffff]))))
<commit_msg>tests: Disable memoryview tests that overflow int conversion.
They fail on builds with 32-bit word size.<commit_after># test memoryview accessing maximum values for signed/unsigned elements
from array import array
print(list(memoryview(b'\x7f\x80\x81\xff')))
print(list(memoryview(array('b', [0x7f, -0x80]))))
print(list(memoryview(array('B', [0x7f, 0x80, 0x81, 0xff]))))
print(list(memoryview(array('h', [0x7f00, -0x8000]))))
print(list(memoryview(array('H', [0x7f00, 0x8000, 0x8100, 0xffff]))))
# these constructors give an internal overflow in uPy
#print(list(memoryview(array('i', [0x7f000000, -0x80000000]))))
#print(list(memoryview(array('I', [0x7f000000, 0x80000000, 0x81000000, 0xffffffff]))))
|
729f02949842d4d5a5722a2b9b35c204748c00f7
|
turbosms/lib.py
|
turbosms/lib.py
|
from django.apps import apps
from django.template.loader import render_to_string
from turbosms.settings import IS_SMS_ENABLED, SMS_RECIPIENTS
from turbosms.models import SMS
def get_recipients():
if apps.is_installed('site_config'):
from site_config import config
if hasattr(config, 'SMS_RECIPIENTS'):
return config.SMS_RECIPIENTS
return SMS_RECIPIENTS
def send_sms(message):
if IS_SMS_ENABLED:
for number in get_recipients():
SMS.objects.create(number=number, message=message)
def send_sms_from_template(template_name, context=None):
message = render_to_string(template_name, context)
send_sms(message)
|
from django.apps import apps
from django.template.loader import render_to_string
from turbosms.settings import IS_SMS_ENABLED, SMS_RECIPIENTS
from turbosms.models import SMS
def get_default_sms_recipients():
if apps.is_installed('site_config'):
from site_config import config
if hasattr(config, 'SMS_RECIPIENTS'):
return config.SMS_RECIPIENTS
return SMS_RECIPIENTS
def send_sms(message, recipients=None):
if not IS_SMS_ENABLED:
return
if recipients is None:
recipients = get_default_sms_recipients()
for number in recipients:
SMS.objects.create(number=number, message=message)
def send_sms_from_template(template_name, context=None):
message = render_to_string(template_name, context)
send_sms(message)
|
Add recipients param to send_sms method.
|
Add recipients param to send_sms method.
|
Python
|
isc
|
pmaigutyak/mp-turbosms
|
from django.apps import apps
from django.template.loader import render_to_string
from turbosms.settings import IS_SMS_ENABLED, SMS_RECIPIENTS
from turbosms.models import SMS
def get_recipients():
if apps.is_installed('site_config'):
from site_config import config
if hasattr(config, 'SMS_RECIPIENTS'):
return config.SMS_RECIPIENTS
return SMS_RECIPIENTS
def send_sms(message):
if IS_SMS_ENABLED:
for number in get_recipients():
SMS.objects.create(number=number, message=message)
def send_sms_from_template(template_name, context=None):
message = render_to_string(template_name, context)
send_sms(message)
Add recipients param to send_sms method.
|
from django.apps import apps
from django.template.loader import render_to_string
from turbosms.settings import IS_SMS_ENABLED, SMS_RECIPIENTS
from turbosms.models import SMS
def get_default_sms_recipients():
if apps.is_installed('site_config'):
from site_config import config
if hasattr(config, 'SMS_RECIPIENTS'):
return config.SMS_RECIPIENTS
return SMS_RECIPIENTS
def send_sms(message, recipients=None):
if not IS_SMS_ENABLED:
return
if recipients is None:
recipients = get_default_sms_recipients()
for number in recipients:
SMS.objects.create(number=number, message=message)
def send_sms_from_template(template_name, context=None):
message = render_to_string(template_name, context)
send_sms(message)
|
<commit_before>
from django.apps import apps
from django.template.loader import render_to_string
from turbosms.settings import IS_SMS_ENABLED, SMS_RECIPIENTS
from turbosms.models import SMS
def get_recipients():
if apps.is_installed('site_config'):
from site_config import config
if hasattr(config, 'SMS_RECIPIENTS'):
return config.SMS_RECIPIENTS
return SMS_RECIPIENTS
def send_sms(message):
if IS_SMS_ENABLED:
for number in get_recipients():
SMS.objects.create(number=number, message=message)
def send_sms_from_template(template_name, context=None):
message = render_to_string(template_name, context)
send_sms(message)
<commit_msg>Add recipients param to send_sms method.<commit_after>
|
from django.apps import apps
from django.template.loader import render_to_string
from turbosms.settings import IS_SMS_ENABLED, SMS_RECIPIENTS
from turbosms.models import SMS
def get_default_sms_recipients():
if apps.is_installed('site_config'):
from site_config import config
if hasattr(config, 'SMS_RECIPIENTS'):
return config.SMS_RECIPIENTS
return SMS_RECIPIENTS
def send_sms(message, recipients=None):
if not IS_SMS_ENABLED:
return
if recipients is None:
recipients = get_default_sms_recipients()
for number in recipients:
SMS.objects.create(number=number, message=message)
def send_sms_from_template(template_name, context=None):
message = render_to_string(template_name, context)
send_sms(message)
|
from django.apps import apps
from django.template.loader import render_to_string
from turbosms.settings import IS_SMS_ENABLED, SMS_RECIPIENTS
from turbosms.models import SMS
def get_recipients():
if apps.is_installed('site_config'):
from site_config import config
if hasattr(config, 'SMS_RECIPIENTS'):
return config.SMS_RECIPIENTS
return SMS_RECIPIENTS
def send_sms(message):
if IS_SMS_ENABLED:
for number in get_recipients():
SMS.objects.create(number=number, message=message)
def send_sms_from_template(template_name, context=None):
message = render_to_string(template_name, context)
send_sms(message)
Add recipients param to send_sms method.
from django.apps import apps
from django.template.loader import render_to_string
from turbosms.settings import IS_SMS_ENABLED, SMS_RECIPIENTS
from turbosms.models import SMS
def get_default_sms_recipients():
if apps.is_installed('site_config'):
from site_config import config
if hasattr(config, 'SMS_RECIPIENTS'):
return config.SMS_RECIPIENTS
return SMS_RECIPIENTS
def send_sms(message, recipients=None):
if not IS_SMS_ENABLED:
return
if recipients is None:
recipients = get_default_sms_recipients()
for number in recipients:
SMS.objects.create(number=number, message=message)
def send_sms_from_template(template_name, context=None):
message = render_to_string(template_name, context)
send_sms(message)
|
<commit_before>
from django.apps import apps
from django.template.loader import render_to_string
from turbosms.settings import IS_SMS_ENABLED, SMS_RECIPIENTS
from turbosms.models import SMS
def get_recipients():
if apps.is_installed('site_config'):
from site_config import config
if hasattr(config, 'SMS_RECIPIENTS'):
return config.SMS_RECIPIENTS
return SMS_RECIPIENTS
def send_sms(message):
if IS_SMS_ENABLED:
for number in get_recipients():
SMS.objects.create(number=number, message=message)
def send_sms_from_template(template_name, context=None):
message = render_to_string(template_name, context)
send_sms(message)
<commit_msg>Add recipients param to send_sms method.<commit_after>
from django.apps import apps
from django.template.loader import render_to_string
from turbosms.settings import IS_SMS_ENABLED, SMS_RECIPIENTS
from turbosms.models import SMS
def get_default_sms_recipients():
if apps.is_installed('site_config'):
from site_config import config
if hasattr(config, 'SMS_RECIPIENTS'):
return config.SMS_RECIPIENTS
return SMS_RECIPIENTS
def send_sms(message, recipients=None):
if not IS_SMS_ENABLED:
return
if recipients is None:
recipients = get_default_sms_recipients()
for number in recipients:
SMS.objects.create(number=number, message=message)
def send_sms_from_template(template_name, context=None):
message = render_to_string(template_name, context)
send_sms(message)
|
e22d62d84e9fe518a04cb0af89c589be3c3f01a2
|
app/env_settings_example.py
|
app/env_settings_example.py
|
import os
# *****************************
# Environment specific settings
# *****************************
# The settings below can (and should) be over-ruled by OS environment variable settings
# Flask settings # Generated with: import os; os.urandom(24)
SECRET_KEY = '\xb9\x8d\xb5\xc2\xc4Q\xe7\x8ej\xe0\x05\xf3\xa3kp\x99l\xe7\xf2i\x00\xb1-\xcd'
# PLEASE USE A DIFFERENT KEY FOR PRODUCTION ENVIRONMENTS!
# SQLAlchemy settings
SQLALCHEMY_DATABASE_URI = 'sqlite:///../app.sqlite'
# Flask-Mail settings
MAIL_USERNAME = 'email@example.com'
MAIL_PASSWORD = 'password'
MAIL_DEFAULT_SENDER = '"AppName" <noreply@example.com>'
MAIL_SERVER = 'MAIL_SERVER', 'smtp.gmail.com'
MAIL_PORT = 465
MAIL_USE_SSL = True
MAIL_USE_TLS = False
ADMINS = [
'"Admin One" <admin1@gmail.com>',
]
|
import os
# *****************************
# Environment specific settings
# *****************************
# The settings below can (and should) be over-ruled by OS environment variable settings
# Flask settings # Generated with: import os; os.urandom(24)
SECRET_KEY = '\x9d|*\xbb\x82T\x83\xeb\xf52\xd1\xdfl\x87\xb4\x9e\x10f\xdf\x9e\xea\xf8_\x99'
# PLEASE USE A DIFFERENT KEY FOR PRODUCTION ENVIRONMENTS!
# SQLAlchemy settings
SQLALCHEMY_DATABASE_URI = 'sqlite:///../app.sqlite'
# Flask-Mail settings
MAIL_USERNAME = ''
MAIL_PASSWORD = ''
MAIL_DEFAULT_SENDER = ''
MAIL_SERVER = 'localhost'
MAIL_PORT = 25
MAIL_USE_SSL = False
MAIL_USE_TLS = False
ADMINS = [
'"Admin One" <admin1@gmail.com>',
]
|
Set environment settings to suit localhost
|
Set environment settings to suit localhost
|
Python
|
bsd-2-clause
|
UCL-CS35/incdb-user,UCL-CS35/incdb-user,UCL-CS35/incdb-user
|
import os
# *****************************
# Environment specific settings
# *****************************
# The settings below can (and should) be over-ruled by OS environment variable settings
# Flask settings # Generated with: import os; os.urandom(24)
SECRET_KEY = '\xb9\x8d\xb5\xc2\xc4Q\xe7\x8ej\xe0\x05\xf3\xa3kp\x99l\xe7\xf2i\x00\xb1-\xcd'
# PLEASE USE A DIFFERENT KEY FOR PRODUCTION ENVIRONMENTS!
# SQLAlchemy settings
SQLALCHEMY_DATABASE_URI = 'sqlite:///../app.sqlite'
# Flask-Mail settings
MAIL_USERNAME = 'email@example.com'
MAIL_PASSWORD = 'password'
MAIL_DEFAULT_SENDER = '"AppName" <noreply@example.com>'
MAIL_SERVER = 'MAIL_SERVER', 'smtp.gmail.com'
MAIL_PORT = 465
MAIL_USE_SSL = True
MAIL_USE_TLS = False
ADMINS = [
'"Admin One" <admin1@gmail.com>',
]
Set environment settings to suit localhost
|
import os
# *****************************
# Environment specific settings
# *****************************
# The settings below can (and should) be over-ruled by OS environment variable settings
# Flask settings # Generated with: import os; os.urandom(24)
SECRET_KEY = '\x9d|*\xbb\x82T\x83\xeb\xf52\xd1\xdfl\x87\xb4\x9e\x10f\xdf\x9e\xea\xf8_\x99'
# PLEASE USE A DIFFERENT KEY FOR PRODUCTION ENVIRONMENTS!
# SQLAlchemy settings
SQLALCHEMY_DATABASE_URI = 'sqlite:///../app.sqlite'
# Flask-Mail settings
MAIL_USERNAME = ''
MAIL_PASSWORD = ''
MAIL_DEFAULT_SENDER = ''
MAIL_SERVER = 'localhost'
MAIL_PORT = 25
MAIL_USE_SSL = False
MAIL_USE_TLS = False
ADMINS = [
'"Admin One" <admin1@gmail.com>',
]
|
<commit_before>import os
# *****************************
# Environment specific settings
# *****************************
# The settings below can (and should) be over-ruled by OS environment variable settings
# Flask settings # Generated with: import os; os.urandom(24)
SECRET_KEY = '\xb9\x8d\xb5\xc2\xc4Q\xe7\x8ej\xe0\x05\xf3\xa3kp\x99l\xe7\xf2i\x00\xb1-\xcd'
# PLEASE USE A DIFFERENT KEY FOR PRODUCTION ENVIRONMENTS!
# SQLAlchemy settings
SQLALCHEMY_DATABASE_URI = 'sqlite:///../app.sqlite'
# Flask-Mail settings
MAIL_USERNAME = 'email@example.com'
MAIL_PASSWORD = 'password'
MAIL_DEFAULT_SENDER = '"AppName" <noreply@example.com>'
MAIL_SERVER = 'MAIL_SERVER', 'smtp.gmail.com'
MAIL_PORT = 465
MAIL_USE_SSL = True
MAIL_USE_TLS = False
ADMINS = [
'"Admin One" <admin1@gmail.com>',
]
<commit_msg>Set environment settings to suit localhost<commit_after>
|
import os
# *****************************
# Environment specific settings
# *****************************
# The settings below can (and should) be over-ruled by OS environment variable settings
# Flask settings # Generated with: import os; os.urandom(24)
SECRET_KEY = '\x9d|*\xbb\x82T\x83\xeb\xf52\xd1\xdfl\x87\xb4\x9e\x10f\xdf\x9e\xea\xf8_\x99'
# PLEASE USE A DIFFERENT KEY FOR PRODUCTION ENVIRONMENTS!
# SQLAlchemy settings
SQLALCHEMY_DATABASE_URI = 'sqlite:///../app.sqlite'
# Flask-Mail settings
MAIL_USERNAME = ''
MAIL_PASSWORD = ''
MAIL_DEFAULT_SENDER = ''
MAIL_SERVER = 'localhost'
MAIL_PORT = 25
MAIL_USE_SSL = False
MAIL_USE_TLS = False
ADMINS = [
'"Admin One" <admin1@gmail.com>',
]
|
import os
# *****************************
# Environment specific settings
# *****************************
# The settings below can (and should) be over-ruled by OS environment variable settings
# Flask settings # Generated with: import os; os.urandom(24)
SECRET_KEY = '\xb9\x8d\xb5\xc2\xc4Q\xe7\x8ej\xe0\x05\xf3\xa3kp\x99l\xe7\xf2i\x00\xb1-\xcd'
# PLEASE USE A DIFFERENT KEY FOR PRODUCTION ENVIRONMENTS!
# SQLAlchemy settings
SQLALCHEMY_DATABASE_URI = 'sqlite:///../app.sqlite'
# Flask-Mail settings
MAIL_USERNAME = 'email@example.com'
MAIL_PASSWORD = 'password'
MAIL_DEFAULT_SENDER = '"AppName" <noreply@example.com>'
MAIL_SERVER = 'MAIL_SERVER', 'smtp.gmail.com'
MAIL_PORT = 465
MAIL_USE_SSL = True
MAIL_USE_TLS = False
ADMINS = [
'"Admin One" <admin1@gmail.com>',
]
Set environment settings to suit localhostimport os
# *****************************
# Environment specific settings
# *****************************
# The settings below can (and should) be over-ruled by OS environment variable settings
# Flask settings # Generated with: import os; os.urandom(24)
SECRET_KEY = '\x9d|*\xbb\x82T\x83\xeb\xf52\xd1\xdfl\x87\xb4\x9e\x10f\xdf\x9e\xea\xf8_\x99'
# PLEASE USE A DIFFERENT KEY FOR PRODUCTION ENVIRONMENTS!
# SQLAlchemy settings
SQLALCHEMY_DATABASE_URI = 'sqlite:///../app.sqlite'
# Flask-Mail settings
MAIL_USERNAME = ''
MAIL_PASSWORD = ''
MAIL_DEFAULT_SENDER = ''
MAIL_SERVER = 'localhost'
MAIL_PORT = 25
MAIL_USE_SSL = False
MAIL_USE_TLS = False
ADMINS = [
'"Admin One" <admin1@gmail.com>',
]
|
<commit_before>import os
# *****************************
# Environment specific settings
# *****************************
# The settings below can (and should) be over-ruled by OS environment variable settings
# Flask settings # Generated with: import os; os.urandom(24)
SECRET_KEY = '\xb9\x8d\xb5\xc2\xc4Q\xe7\x8ej\xe0\x05\xf3\xa3kp\x99l\xe7\xf2i\x00\xb1-\xcd'
# PLEASE USE A DIFFERENT KEY FOR PRODUCTION ENVIRONMENTS!
# SQLAlchemy settings
SQLALCHEMY_DATABASE_URI = 'sqlite:///../app.sqlite'
# Flask-Mail settings
MAIL_USERNAME = 'email@example.com'
MAIL_PASSWORD = 'password'
MAIL_DEFAULT_SENDER = '"AppName" <noreply@example.com>'
MAIL_SERVER = 'MAIL_SERVER', 'smtp.gmail.com'
MAIL_PORT = 465
MAIL_USE_SSL = True
MAIL_USE_TLS = False
ADMINS = [
'"Admin One" <admin1@gmail.com>',
]
<commit_msg>Set environment settings to suit localhost<commit_after>import os
# *****************************
# Environment specific settings
# *****************************
# The settings below can (and should) be over-ruled by OS environment variable settings
# Flask settings # Generated with: import os; os.urandom(24)
SECRET_KEY = '\x9d|*\xbb\x82T\x83\xeb\xf52\xd1\xdfl\x87\xb4\x9e\x10f\xdf\x9e\xea\xf8_\x99'
# PLEASE USE A DIFFERENT KEY FOR PRODUCTION ENVIRONMENTS!
# SQLAlchemy settings
SQLALCHEMY_DATABASE_URI = 'sqlite:///../app.sqlite'
# Flask-Mail settings
MAIL_USERNAME = ''
MAIL_PASSWORD = ''
MAIL_DEFAULT_SENDER = ''
MAIL_SERVER = 'localhost'
MAIL_PORT = 25
MAIL_USE_SSL = False
MAIL_USE_TLS = False
ADMINS = [
'"Admin One" <admin1@gmail.com>',
]
|
3bdc7250f7a40ef4b3ad5f431c6b6e3e92ccacc8
|
app.py
|
app.py
|
from flask import Flask, render_template, request, redirect
import requests
import pandas as pd
from datetime import datetime
from bokeh.plotting import figure, output_notebook, output_file, save
app = Flask(__name__)
# @app.route('/')
# def main():
# return redirect('/index')
@app.route('/', methods=['GET', 'POST'])
def index():
if request.method == 'GET':
return render_template('index.html')
else:
pitcher = request.form['pitcher']
image_file = pitcher.lower()
image_file = image_file.split()
image_file = '_'.join(image_file) + '.png'
return render_template('results.html', image_file = image_file)
if __name__ == '__main__':
app.run(port=33508)
|
from flask import Flask, render_template, request, redirect
import requests
import pandas as pd
from datetime import datetime
from bokeh.plotting import figure, output_notebook, output_file, save
app = Flask(__name__)
@app.route('/')
def main():
return redirect('/index')
@app.route('/index', methods=['GET', 'POST'])
def index():
if request.method == 'GET':
return render_template('index.html')
else:
pitcher = request.form['pitcher']
image_file = pitcher.lower()
image_file = image_file.split()
image_file = '_'.join(image_file) + '.png'
return render_template('results.html', image_file = image_file)
if __name__ == '__main__':
app.run(port=33508)
|
Revert "Remove redirect to avoid Chrome privacy error"
|
Revert "Remove redirect to avoid Chrome privacy error"
This reverts commit e5322958f14b2428b74de726476fd98adae8c454.
|
Python
|
mit
|
gsganden/pitcher-reports,gsganden/pitcher-reports
|
from flask import Flask, render_template, request, redirect
import requests
import pandas as pd
from datetime import datetime
from bokeh.plotting import figure, output_notebook, output_file, save
app = Flask(__name__)
# @app.route('/')
# def main():
# return redirect('/index')
@app.route('/', methods=['GET', 'POST'])
def index():
if request.method == 'GET':
return render_template('index.html')
else:
pitcher = request.form['pitcher']
image_file = pitcher.lower()
image_file = image_file.split()
image_file = '_'.join(image_file) + '.png'
return render_template('results.html', image_file = image_file)
if __name__ == '__main__':
app.run(port=33508)Revert "Remove redirect to avoid Chrome privacy error"
This reverts commit e5322958f14b2428b74de726476fd98adae8c454.
|
from flask import Flask, render_template, request, redirect
import requests
import pandas as pd
from datetime import datetime
from bokeh.plotting import figure, output_notebook, output_file, save
app = Flask(__name__)
@app.route('/')
def main():
return redirect('/index')
@app.route('/index', methods=['GET', 'POST'])
def index():
if request.method == 'GET':
return render_template('index.html')
else:
pitcher = request.form['pitcher']
image_file = pitcher.lower()
image_file = image_file.split()
image_file = '_'.join(image_file) + '.png'
return render_template('results.html', image_file = image_file)
if __name__ == '__main__':
app.run(port=33508)
|
<commit_before>from flask import Flask, render_template, request, redirect
import requests
import pandas as pd
from datetime import datetime
from bokeh.plotting import figure, output_notebook, output_file, save
app = Flask(__name__)
# @app.route('/')
# def main():
# return redirect('/index')
@app.route('/', methods=['GET', 'POST'])
def index():
if request.method == 'GET':
return render_template('index.html')
else:
pitcher = request.form['pitcher']
image_file = pitcher.lower()
image_file = image_file.split()
image_file = '_'.join(image_file) + '.png'
return render_template('results.html', image_file = image_file)
if __name__ == '__main__':
app.run(port=33508)<commit_msg>Revert "Remove redirect to avoid Chrome privacy error"
This reverts commit e5322958f14b2428b74de726476fd98adae8c454.<commit_after>
|
from flask import Flask, render_template, request, redirect
import requests
import pandas as pd
from datetime import datetime
from bokeh.plotting import figure, output_notebook, output_file, save
app = Flask(__name__)
@app.route('/')
def main():
return redirect('/index')
@app.route('/index', methods=['GET', 'POST'])
def index():
if request.method == 'GET':
return render_template('index.html')
else:
pitcher = request.form['pitcher']
image_file = pitcher.lower()
image_file = image_file.split()
image_file = '_'.join(image_file) + '.png'
return render_template('results.html', image_file = image_file)
if __name__ == '__main__':
app.run(port=33508)
|
from flask import Flask, render_template, request, redirect
import requests
import pandas as pd
from datetime import datetime
from bokeh.plotting import figure, output_notebook, output_file, save
app = Flask(__name__)
# @app.route('/')
# def main():
# return redirect('/index')
@app.route('/', methods=['GET', 'POST'])
def index():
if request.method == 'GET':
return render_template('index.html')
else:
pitcher = request.form['pitcher']
image_file = pitcher.lower()
image_file = image_file.split()
image_file = '_'.join(image_file) + '.png'
return render_template('results.html', image_file = image_file)
if __name__ == '__main__':
app.run(port=33508)Revert "Remove redirect to avoid Chrome privacy error"
This reverts commit e5322958f14b2428b74de726476fd98adae8c454.from flask import Flask, render_template, request, redirect
import requests
import pandas as pd
from datetime import datetime
from bokeh.plotting import figure, output_notebook, output_file, save
app = Flask(__name__)
@app.route('/')
def main():
return redirect('/index')
@app.route('/index', methods=['GET', 'POST'])
def index():
if request.method == 'GET':
return render_template('index.html')
else:
pitcher = request.form['pitcher']
image_file = pitcher.lower()
image_file = image_file.split()
image_file = '_'.join(image_file) + '.png'
return render_template('results.html', image_file = image_file)
if __name__ == '__main__':
app.run(port=33508)
|
<commit_before>from flask import Flask, render_template, request, redirect
import requests
import pandas as pd
from datetime import datetime
from bokeh.plotting import figure, output_notebook, output_file, save
app = Flask(__name__)
# @app.route('/')
# def main():
# return redirect('/index')
@app.route('/', methods=['GET', 'POST'])
def index():
if request.method == 'GET':
return render_template('index.html')
else:
pitcher = request.form['pitcher']
image_file = pitcher.lower()
image_file = image_file.split()
image_file = '_'.join(image_file) + '.png'
return render_template('results.html', image_file = image_file)
if __name__ == '__main__':
app.run(port=33508)<commit_msg>Revert "Remove redirect to avoid Chrome privacy error"
This reverts commit e5322958f14b2428b74de726476fd98adae8c454.<commit_after>from flask import Flask, render_template, request, redirect
import requests
import pandas as pd
from datetime import datetime
from bokeh.plotting import figure, output_notebook, output_file, save
app = Flask(__name__)
@app.route('/')
def main():
return redirect('/index')
@app.route('/index', methods=['GET', 'POST'])
def index():
if request.method == 'GET':
return render_template('index.html')
else:
pitcher = request.form['pitcher']
image_file = pitcher.lower()
image_file = image_file.split()
image_file = '_'.join(image_file) + '.png'
return render_template('results.html', image_file = image_file)
if __name__ == '__main__':
app.run(port=33508)
|
ee164dbd27fea029d077db8a1774b7d0ff0ebc91
|
app.py
|
app.py
|
"""
Data Entry Clerk.
The Data Entry Clerk (DEC) accepts data on your behalf and stores it in a
database for later processing. The data is delivered to the DEC by external
services via webhooks, for which it implements an HTTP endpoint.
"""
import os
from flask import Flask, jsonify
app = Flask(__name__)
# Load the default configuration
app.config.from_object('config.default')
# Load environment specific configuration
app.config.from_object(
f"config.{os.getenv('FLASK_ENV', 'development').lower()}")
@app.route("/")
def root():
"""
Endpoint for status checks.
Depending on how and where the app is deployed, it might require an
endpoint for status checks. The root path returns HTTP 200 and can be used
for this purpose.
:return: The status encoded as a JSON object and HTTP 200
"""
return jsonify({"status": "ok"}), 200
|
"""
Data Entry Clerk.
The Data Entry Clerk (DEC) accepts data on your behalf and stores it in a
database for later processing. The data is delivered to the DEC by external
services via webhooks, for which it implements an HTTP endpoint.
"""
import os
from flask import Flask, jsonify
app = Flask(__name__)
# Load the default configuration
app.config.from_object('config.default')
# Load environment specific configuration
app.config.from_object(
f"config.{os.getenv('FLASK_ENV', 'development').lower()}")
@app.route("/")
def status():
"""
Endpoint for status checks.
Depending on how and where the app is deployed, it might require an
endpoint for status checks. The root path returns HTTP 200 and can be used
for this purpose.
:return: The status encoded as a JSON object and HTTP 200
"""
return jsonify({"status": "ok"}), 200
|
Rename method for status checks
|
Rename method for status checks
The method was historically named root, since it serves the root
path of the application. But in light of current developments and
its use as a status check, it seems more appropriate to rename it
to status.
|
Python
|
mit
|
Automatiqa/data-entry-clerk,Automatiqa/data-entry-clerk
|
"""
Data Entry Clerk.
The Data Entry Clerk (DEC) accepts data on your behalf and stores it in a
database for later processing. The data is delivered to the DEC by external
services via webhooks, for which it implements an HTTP endpoint.
"""
import os
from flask import Flask, jsonify
app = Flask(__name__)
# Load the default configuration
app.config.from_object('config.default')
# Load environment specific configuration
app.config.from_object(
f"config.{os.getenv('FLASK_ENV', 'development').lower()}")
@app.route("/")
def root():
"""
Endpoint for status checks.
Depending on how and where the app is deployed, it might require an
endpoint for status checks. The root path returns HTTP 200 and can be used
for this purpose.
:return: The status encoded as a JSON object and HTTP 200
"""
return jsonify({"status": "ok"}), 200
Rename method for status checks
The method was historically named root, since it serves the root
path of the application. But in light of current developments and
its use as a status check, it seems more appropriate to rename it
to status.
|
"""
Data Entry Clerk.
The Data Entry Clerk (DEC) accepts data on your behalf and stores it in a
database for later processing. The data is delivered to the DEC by external
services via webhooks, for which it implements an HTTP endpoint.
"""
import os
from flask import Flask, jsonify
app = Flask(__name__)
# Load the default configuration
app.config.from_object('config.default')
# Load environment specific configuration
app.config.from_object(
f"config.{os.getenv('FLASK_ENV', 'development').lower()}")
@app.route("/")
def status():
"""
Endpoint for status checks.
Depending on how and where the app is deployed, it might require an
endpoint for status checks. The root path returns HTTP 200 and can be used
for this purpose.
:return: The status encoded as a JSON object and HTTP 200
"""
return jsonify({"status": "ok"}), 200
|
<commit_before>"""
Data Entry Clerk.
The Data Entry Clerk (DEC) accepts data on your behalf and stores it in a
database for later processing. The data is delivered to the DEC by external
services via webhooks, for which it implements an HTTP endpoint.
"""
import os
from flask import Flask, jsonify
app = Flask(__name__)
# Load the default configuration
app.config.from_object('config.default')
# Load environment specific configuration
app.config.from_object(
f"config.{os.getenv('FLASK_ENV', 'development').lower()}")
@app.route("/")
def root():
"""
Endpoint for status checks.
Depending on how and where the app is deployed, it might require an
endpoint for status checks. The root path returns HTTP 200 and can be used
for this purpose.
:return: The status encoded as a JSON object and HTTP 200
"""
return jsonify({"status": "ok"}), 200
<commit_msg>Rename method for status checks
The method was historically named root, since it serves the root
path of the application. But in light of current developments and
its use as a status check, it seems more appropriate to rename it
to status.<commit_after>
|
"""
Data Entry Clerk.
The Data Entry Clerk (DEC) accepts data on your behalf and stores it in a
database for later processing. The data is delivered to the DEC by external
services via webhooks, for which it implements an HTTP endpoint.
"""
import os
from flask import Flask, jsonify
app = Flask(__name__)
# Load the default configuration
app.config.from_object('config.default')
# Load environment specific configuration
app.config.from_object(
f"config.{os.getenv('FLASK_ENV', 'development').lower()}")
@app.route("/")
def status():
"""
Endpoint for status checks.
Depending on how and where the app is deployed, it might require an
endpoint for status checks. The root path returns HTTP 200 and can be used
for this purpose.
:return: The status encoded as a JSON object and HTTP 200
"""
return jsonify({"status": "ok"}), 200
|
"""
Data Entry Clerk.
The Data Entry Clerk (DEC) accepts data on your behalf and stores it in a
database for later processing. The data is delivered to the DEC by external
services via webhooks, for which it implements an HTTP endpoint.
"""
import os
from flask import Flask, jsonify
app = Flask(__name__)
# Load the default configuration
app.config.from_object('config.default')
# Load environment specific configuration
app.config.from_object(
f"config.{os.getenv('FLASK_ENV', 'development').lower()}")
@app.route("/")
def root():
"""
Endpoint for status checks.
Depending on how and where the app is deployed, it might require an
endpoint for status checks. The root path returns HTTP 200 and can be used
for this purpose.
:return: The status encoded as a JSON object and HTTP 200
"""
return jsonify({"status": "ok"}), 200
Rename method for status checks
The method was historically named root, since it serves the root
path of the application. But in light of current developments and
its use as a status check, it seems more appropriate to rename it
to status."""
Data Entry Clerk.
The Data Entry Clerk (DEC) accepts data on your behalf and stores it in a
database for later processing. The data is delivered to the DEC by external
services via webhooks, for which it implements an HTTP endpoint.
"""
import os
from flask import Flask, jsonify
app = Flask(__name__)
# Load the default configuration
app.config.from_object('config.default')
# Load environment specific configuration
app.config.from_object(
f"config.{os.getenv('FLASK_ENV', 'development').lower()}")
@app.route("/")
def status():
"""
Endpoint for status checks.
Depending on how and where the app is deployed, it might require an
endpoint for status checks. The root path returns HTTP 200 and can be used
for this purpose.
:return: The status encoded as a JSON object and HTTP 200
"""
return jsonify({"status": "ok"}), 200
|
<commit_before>"""
Data Entry Clerk.
The Data Entry Clerk (DEC) accepts data on your behalf and stores it in a
database for later processing. The data is delivered to the DEC by external
services via webhooks, for which it implements an HTTP endpoint.
"""
import os
from flask import Flask, jsonify
app = Flask(__name__)
# Load the default configuration
app.config.from_object('config.default')
# Load environment specific configuration
app.config.from_object(
f"config.{os.getenv('FLASK_ENV', 'development').lower()}")
@app.route("/")
def root():
"""
Endpoint for status checks.
Depending on how and where the app is deployed, it might require an
endpoint for status checks. The root path returns HTTP 200 and can be used
for this purpose.
:return: The status encoded as a JSON object and HTTP 200
"""
return jsonify({"status": "ok"}), 200
<commit_msg>Rename method for status checks
The method was historically named root, since it serves the root
path of the application. But in light of current developments and
its use as a status check, it seems more appropriate to rename it
to status.<commit_after>"""
Data Entry Clerk.
The Data Entry Clerk (DEC) accepts data on your behalf and stores it in a
database for later processing. The data is delivered to the DEC by external
services via webhooks, for which it implements an HTTP endpoint.
"""
import os
from flask import Flask, jsonify
app = Flask(__name__)
# Load the default configuration
app.config.from_object('config.default')
# Load environment specific configuration
app.config.from_object(
f"config.{os.getenv('FLASK_ENV', 'development').lower()}")
@app.route("/")
def status():
"""
Endpoint for status checks.
Depending on how and where the app is deployed, it might require an
endpoint for status checks. The root path returns HTTP 200 and can be used
for this purpose.
:return: The status encoded as a JSON object and HTTP 200
"""
return jsonify({"status": "ok"}), 200
|
2d6a5a494b42519a1ec849e1fa508f93653e5d33
|
rango/forms.py
|
rango/forms.py
|
from django import forms
from rango.models import Category, Page
class CategoryForm(forms.ModelForm):
name = forms.CharField(max_length=128, help_text="Please enter a category name")
views = forms.IntegerField(widget=forms.HiddenInput(), initial = 0)
likes = forms.IntegerField(widget=forms.HiddenInput(), initial = 0)
slug = forms.CharField(widget=forms.HiddenInput(), required=False)
class Meta:
model = Category
# tuple specifying the classes we want to use
fields = ('name', )
class PageForm(forms.ModelForm):
title = forms.CharField(max_length=128, help_text="Please enter the title of the page.")
url = forms.URLField(max_length=200, help_text="Please enter the URL of the page.")
views = forms.IntegerField(widget=forms.HiddenInput(), initial=0)
class Meta:
model = Page
# excluding the category foreign key field from the form
exclude = ('category', 'slug')
|
from django import forms
from django.contrib.auth.models import User
from rango.models import Category, Page, UserProfile
class CategoryForm(forms.ModelForm):
name = forms.CharField(max_length=128, help_text="Please enter a category name")
views = forms.IntegerField(widget=forms.HiddenInput(), initial = 0)
likes = forms.IntegerField(widget=forms.HiddenInput(), initial = 0)
slug = forms.CharField(widget=forms.HiddenInput(), required=False)
class Meta:
model = Category
# tuple specifying the classes we want to use
fields = ('name', )
class PageForm(forms.ModelForm):
title = forms.CharField(max_length=128, help_text="Please enter the title of the page.")
url = forms.URLField(max_length=200, help_text="Please enter the URL of the page.")
views = forms.IntegerField(widget=forms.HiddenInput(), initial=0)
class Meta:
model = Page
# excluding the category foreign key field from the form
exclude = ('category', 'slug')
class UserForm(forms.ModelForm):
password = forms.CharField(widget=forms.PasswordInput(), help_text="Enter your password")
class Meta:
model = User
fields = ('username', 'email', 'password')
class UserProfileForm(forms.ModelForm):
class Meta:
model = UserProfile
fields = ('website', 'picture')
|
Add form classes for User and UserProfile
|
Add form classes for User and UserProfile
|
Python
|
mit
|
dnestoff/Tango-With-Django,dnestoff/Tango-With-Django
|
from django import forms
from rango.models import Category, Page
class CategoryForm(forms.ModelForm):
name = forms.CharField(max_length=128, help_text="Please enter a category name")
views = forms.IntegerField(widget=forms.HiddenInput(), initial = 0)
likes = forms.IntegerField(widget=forms.HiddenInput(), initial = 0)
slug = forms.CharField(widget=forms.HiddenInput(), required=False)
class Meta:
model = Category
# tuple specifying the classes we want to use
fields = ('name', )
class PageForm(forms.ModelForm):
title = forms.CharField(max_length=128, help_text="Please enter the title of the page.")
url = forms.URLField(max_length=200, help_text="Please enter the URL of the page.")
views = forms.IntegerField(widget=forms.HiddenInput(), initial=0)
class Meta:
model = Page
# excluding the category foreign key field from the form
exclude = ('category', 'slug')
Add form classes for User and UserProfile
|
from django import forms
from django.contrib.auth.models import User
from rango.models import Category, Page, UserProfile
class CategoryForm(forms.ModelForm):
name = forms.CharField(max_length=128, help_text="Please enter a category name")
views = forms.IntegerField(widget=forms.HiddenInput(), initial = 0)
likes = forms.IntegerField(widget=forms.HiddenInput(), initial = 0)
slug = forms.CharField(widget=forms.HiddenInput(), required=False)
class Meta:
model = Category
# tuple specifying the classes we want to use
fields = ('name', )
class PageForm(forms.ModelForm):
title = forms.CharField(max_length=128, help_text="Please enter the title of the page.")
url = forms.URLField(max_length=200, help_text="Please enter the URL of the page.")
views = forms.IntegerField(widget=forms.HiddenInput(), initial=0)
class Meta:
model = Page
# excluding the category foreign key field from the form
exclude = ('category', 'slug')
class UserForm(forms.ModelForm):
password = forms.CharField(widget=forms.PasswordInput(), help_text="Enter your password")
class Meta:
model = User
fields = ('username', 'email', 'password')
class UserProfileForm(forms.ModelForm):
class Meta:
model = UserProfile
fields = ('website', 'picture')
|
<commit_before>from django import forms
from rango.models import Category, Page
class CategoryForm(forms.ModelForm):
name = forms.CharField(max_length=128, help_text="Please enter a category name")
views = forms.IntegerField(widget=forms.HiddenInput(), initial = 0)
likes = forms.IntegerField(widget=forms.HiddenInput(), initial = 0)
slug = forms.CharField(widget=forms.HiddenInput(), required=False)
class Meta:
model = Category
# tuple specifying the classes we want to use
fields = ('name', )
class PageForm(forms.ModelForm):
title = forms.CharField(max_length=128, help_text="Please enter the title of the page.")
url = forms.URLField(max_length=200, help_text="Please enter the URL of the page.")
views = forms.IntegerField(widget=forms.HiddenInput(), initial=0)
class Meta:
model = Page
# excluding the category foreign key field from the form
exclude = ('category', 'slug')
<commit_msg>Add form classes for User and UserProfile<commit_after>
|
from django import forms
from django.contrib.auth.models import User
from rango.models import Category, Page, UserProfile
class CategoryForm(forms.ModelForm):
name = forms.CharField(max_length=128, help_text="Please enter a category name")
views = forms.IntegerField(widget=forms.HiddenInput(), initial = 0)
likes = forms.IntegerField(widget=forms.HiddenInput(), initial = 0)
slug = forms.CharField(widget=forms.HiddenInput(), required=False)
class Meta:
model = Category
# tuple specifying the classes we want to use
fields = ('name', )
class PageForm(forms.ModelForm):
title = forms.CharField(max_length=128, help_text="Please enter the title of the page.")
url = forms.URLField(max_length=200, help_text="Please enter the URL of the page.")
views = forms.IntegerField(widget=forms.HiddenInput(), initial=0)
class Meta:
model = Page
# excluding the category foreign key field from the form
exclude = ('category', 'slug')
class UserForm(forms.ModelForm):
password = forms.CharField(widget=forms.PasswordInput(), help_text="Enter your password")
class Meta:
model = User
fields = ('username', 'email', 'password')
class UserProfileForm(forms.ModelForm):
class Meta:
model = UserProfile
fields = ('website', 'picture')
|
from django import forms
from rango.models import Category, Page
class CategoryForm(forms.ModelForm):
name = forms.CharField(max_length=128, help_text="Please enter a category name")
views = forms.IntegerField(widget=forms.HiddenInput(), initial = 0)
likes = forms.IntegerField(widget=forms.HiddenInput(), initial = 0)
slug = forms.CharField(widget=forms.HiddenInput(), required=False)
class Meta:
model = Category
# tuple specifying the classes we want to use
fields = ('name', )
class PageForm(forms.ModelForm):
title = forms.CharField(max_length=128, help_text="Please enter the title of the page.")
url = forms.URLField(max_length=200, help_text="Please enter the URL of the page.")
views = forms.IntegerField(widget=forms.HiddenInput(), initial=0)
class Meta:
model = Page
# excluding the category foreign key field from the form
exclude = ('category', 'slug')
Add form classes for User and UserProfilefrom django import forms
from django.contrib.auth.models import User
from rango.models import Category, Page, UserProfile
class CategoryForm(forms.ModelForm):
name = forms.CharField(max_length=128, help_text="Please enter a category name")
views = forms.IntegerField(widget=forms.HiddenInput(), initial = 0)
likes = forms.IntegerField(widget=forms.HiddenInput(), initial = 0)
slug = forms.CharField(widget=forms.HiddenInput(), required=False)
class Meta:
model = Category
# tuple specifying the classes we want to use
fields = ('name', )
class PageForm(forms.ModelForm):
title = forms.CharField(max_length=128, help_text="Please enter the title of the page.")
url = forms.URLField(max_length=200, help_text="Please enter the URL of the page.")
views = forms.IntegerField(widget=forms.HiddenInput(), initial=0)
class Meta:
model = Page
# excluding the category foreign key field from the form
exclude = ('category', 'slug')
class UserForm(forms.ModelForm):
password = forms.CharField(widget=forms.PasswordInput(), help_text="Enter your password")
class Meta:
model = User
fields = ('username', 'email', 'password')
class UserProfileForm(forms.ModelForm):
class Meta:
model = UserProfile
fields = ('website', 'picture')
|
<commit_before>from django import forms
from rango.models import Category, Page
class CategoryForm(forms.ModelForm):
name = forms.CharField(max_length=128, help_text="Please enter a category name")
views = forms.IntegerField(widget=forms.HiddenInput(), initial = 0)
likes = forms.IntegerField(widget=forms.HiddenInput(), initial = 0)
slug = forms.CharField(widget=forms.HiddenInput(), required=False)
class Meta:
model = Category
# tuple specifying the classes we want to use
fields = ('name', )
class PageForm(forms.ModelForm):
title = forms.CharField(max_length=128, help_text="Please enter the title of the page.")
url = forms.URLField(max_length=200, help_text="Please enter the URL of the page.")
views = forms.IntegerField(widget=forms.HiddenInput(), initial=0)
class Meta:
model = Page
# excluding the category foreign key field from the form
exclude = ('category', 'slug')
<commit_msg>Add form classes for User and UserProfile<commit_after>from django import forms
from django.contrib.auth.models import User
from rango.models import Category, Page, UserProfile
class CategoryForm(forms.ModelForm):
name = forms.CharField(max_length=128, help_text="Please enter a category name")
views = forms.IntegerField(widget=forms.HiddenInput(), initial = 0)
likes = forms.IntegerField(widget=forms.HiddenInput(), initial = 0)
slug = forms.CharField(widget=forms.HiddenInput(), required=False)
class Meta:
model = Category
# tuple specifying the classes we want to use
fields = ('name', )
class PageForm(forms.ModelForm):
title = forms.CharField(max_length=128, help_text="Please enter the title of the page.")
url = forms.URLField(max_length=200, help_text="Please enter the URL of the page.")
views = forms.IntegerField(widget=forms.HiddenInput(), initial=0)
class Meta:
model = Page
# excluding the category foreign key field from the form
exclude = ('category', 'slug')
class UserForm(forms.ModelForm):
password = forms.CharField(widget=forms.PasswordInput(), help_text="Enter your password")
class Meta:
model = User
fields = ('username', 'email', 'password')
class UserProfileForm(forms.ModelForm):
class Meta:
model = UserProfile
fields = ('website', 'picture')
|
36623cddfd41e3ff7a19e83f0235300a2dfd83f8
|
zerver/webhooks/dialogflow/view.py
|
zerver/webhooks/dialogflow/view.py
|
# Webhooks for external integrations.
from typing import Any, Dict
from django.http import HttpRequest, HttpResponse
from zerver.decorator import api_key_only_webhook_view
from zerver.lib.actions import check_send_private_message
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_success
from zerver.models import UserProfile, get_user_profile_by_email
@api_key_only_webhook_view("dialogflow")
@has_request_variables
def api_dialogflow_webhook(request: HttpRequest, user_profile: UserProfile,
payload: Dict[str, Any]=REQ(argument_type='body'),
email: str=REQ(default='foo')) -> HttpResponse:
status = payload["status"]["code"]
if status == 200:
result = payload["result"]["fulfillment"]["speech"]
if not result:
alternate_result = payload["alternateResult"]["fulfillment"]["speech"]
if not alternate_result:
body = "DialogFlow couldn't process your query."
else:
body = alternate_result
else:
body = result
else:
error_status = payload["status"]["errorDetails"]
body = "{} - {}".format(status, error_status)
profile = get_user_profile_by_email(email)
check_send_private_message(user_profile, request.client, profile, body)
return json_success()
|
# Webhooks for external integrations.
from typing import Any, Dict
from django.http import HttpRequest, HttpResponse
from zerver.decorator import api_key_only_webhook_view
from zerver.lib.actions import check_send_private_message
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_success
from zerver.models import UserProfile, get_user_profile_by_email
@api_key_only_webhook_view("dialogflow")
@has_request_variables
def api_dialogflow_webhook(request: HttpRequest, user_profile: UserProfile,
payload: Dict[str, Any]=REQ(argument_type='body'),
email: str=REQ()) -> HttpResponse:
status = payload["status"]["code"]
if status == 200:
result = payload["result"]["fulfillment"]["speech"]
if not result:
alternate_result = payload["alternateResult"]["fulfillment"]["speech"]
if not alternate_result:
body = "DialogFlow couldn't process your query."
else:
body = alternate_result
else:
body = result
else:
error_status = payload["status"]["errorDetails"]
body = "{} - {}".format(status, error_status)
profile = get_user_profile_by_email(email)
check_send_private_message(user_profile, request.client, profile, body)
return json_success()
|
Remove default value for email parameter.
|
webhooks/dialogflow: Remove default value for email parameter.
The webhook view used a default value for the email, which gave
non-informative errors when the webhook is incorrectly configured without
the email parameter.
|
Python
|
apache-2.0
|
shubhamdhama/zulip,showell/zulip,showell/zulip,hackerkid/zulip,synicalsyntax/zulip,zulip/zulip,punchagan/zulip,andersk/zulip,andersk/zulip,shubhamdhama/zulip,rht/zulip,timabbott/zulip,rht/zulip,shubhamdhama/zulip,zulip/zulip,andersk/zulip,rht/zulip,synicalsyntax/zulip,punchagan/zulip,hackerkid/zulip,andersk/zulip,punchagan/zulip,timabbott/zulip,kou/zulip,zulip/zulip,eeshangarg/zulip,brainwane/zulip,shubhamdhama/zulip,zulip/zulip,synicalsyntax/zulip,hackerkid/zulip,showell/zulip,timabbott/zulip,kou/zulip,showell/zulip,brainwane/zulip,showell/zulip,shubhamdhama/zulip,synicalsyntax/zulip,kou/zulip,eeshangarg/zulip,punchagan/zulip,rht/zulip,brainwane/zulip,timabbott/zulip,andersk/zulip,shubhamdhama/zulip,eeshangarg/zulip,kou/zulip,synicalsyntax/zulip,showell/zulip,andersk/zulip,eeshangarg/zulip,hackerkid/zulip,eeshangarg/zulip,timabbott/zulip,hackerkid/zulip,hackerkid/zulip,zulip/zulip,kou/zulip,zulip/zulip,punchagan/zulip,synicalsyntax/zulip,shubhamdhama/zulip,hackerkid/zulip,brainwane/zulip,brainwane/zulip,kou/zulip,synicalsyntax/zulip,eeshangarg/zulip,rht/zulip,rht/zulip,showell/zulip,brainwane/zulip,andersk/zulip,eeshangarg/zulip,timabbott/zulip,timabbott/zulip,zulip/zulip,kou/zulip,rht/zulip,brainwane/zulip,punchagan/zulip,punchagan/zulip
|
# Webhooks for external integrations.
from typing import Any, Dict
from django.http import HttpRequest, HttpResponse
from zerver.decorator import api_key_only_webhook_view
from zerver.lib.actions import check_send_private_message
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_success
from zerver.models import UserProfile, get_user_profile_by_email
@api_key_only_webhook_view("dialogflow")
@has_request_variables
def api_dialogflow_webhook(request: HttpRequest, user_profile: UserProfile,
payload: Dict[str, Any]=REQ(argument_type='body'),
email: str=REQ(default='foo')) -> HttpResponse:
status = payload["status"]["code"]
if status == 200:
result = payload["result"]["fulfillment"]["speech"]
if not result:
alternate_result = payload["alternateResult"]["fulfillment"]["speech"]
if not alternate_result:
body = "DialogFlow couldn't process your query."
else:
body = alternate_result
else:
body = result
else:
error_status = payload["status"]["errorDetails"]
body = "{} - {}".format(status, error_status)
profile = get_user_profile_by_email(email)
check_send_private_message(user_profile, request.client, profile, body)
return json_success()
webhooks/dialogflow: Remove default value for email parameter.
The webhook view used a default value for the email, which gave
non-informative errors when the webhook is incorrectly configured without
the email parameter.
|
# Webhooks for external integrations.
from typing import Any, Dict
from django.http import HttpRequest, HttpResponse
from zerver.decorator import api_key_only_webhook_view
from zerver.lib.actions import check_send_private_message
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_success
from zerver.models import UserProfile, get_user_profile_by_email
@api_key_only_webhook_view("dialogflow")
@has_request_variables
def api_dialogflow_webhook(request: HttpRequest, user_profile: UserProfile,
payload: Dict[str, Any]=REQ(argument_type='body'),
email: str=REQ()) -> HttpResponse:
status = payload["status"]["code"]
if status == 200:
result = payload["result"]["fulfillment"]["speech"]
if not result:
alternate_result = payload["alternateResult"]["fulfillment"]["speech"]
if not alternate_result:
body = "DialogFlow couldn't process your query."
else:
body = alternate_result
else:
body = result
else:
error_status = payload["status"]["errorDetails"]
body = "{} - {}".format(status, error_status)
profile = get_user_profile_by_email(email)
check_send_private_message(user_profile, request.client, profile, body)
return json_success()
|
<commit_before># Webhooks for external integrations.
from typing import Any, Dict
from django.http import HttpRequest, HttpResponse
from zerver.decorator import api_key_only_webhook_view
from zerver.lib.actions import check_send_private_message
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_success
from zerver.models import UserProfile, get_user_profile_by_email
@api_key_only_webhook_view("dialogflow")
@has_request_variables
def api_dialogflow_webhook(request: HttpRequest, user_profile: UserProfile,
payload: Dict[str, Any]=REQ(argument_type='body'),
email: str=REQ(default='foo')) -> HttpResponse:
status = payload["status"]["code"]
if status == 200:
result = payload["result"]["fulfillment"]["speech"]
if not result:
alternate_result = payload["alternateResult"]["fulfillment"]["speech"]
if not alternate_result:
body = "DialogFlow couldn't process your query."
else:
body = alternate_result
else:
body = result
else:
error_status = payload["status"]["errorDetails"]
body = "{} - {}".format(status, error_status)
profile = get_user_profile_by_email(email)
check_send_private_message(user_profile, request.client, profile, body)
return json_success()
<commit_msg>webhooks/dialogflow: Remove default value for email parameter.
The webhook view used a default value for the email, which gave
non-informative errors when the webhook is incorrectly configured without
the email parameter.<commit_after>
|
# Webhooks for external integrations.
from typing import Any, Dict
from django.http import HttpRequest, HttpResponse
from zerver.decorator import api_key_only_webhook_view
from zerver.lib.actions import check_send_private_message
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_success
from zerver.models import UserProfile, get_user_profile_by_email
@api_key_only_webhook_view("dialogflow")
@has_request_variables
def api_dialogflow_webhook(request: HttpRequest, user_profile: UserProfile,
payload: Dict[str, Any]=REQ(argument_type='body'),
email: str=REQ()) -> HttpResponse:
status = payload["status"]["code"]
if status == 200:
result = payload["result"]["fulfillment"]["speech"]
if not result:
alternate_result = payload["alternateResult"]["fulfillment"]["speech"]
if not alternate_result:
body = "DialogFlow couldn't process your query."
else:
body = alternate_result
else:
body = result
else:
error_status = payload["status"]["errorDetails"]
body = "{} - {}".format(status, error_status)
profile = get_user_profile_by_email(email)
check_send_private_message(user_profile, request.client, profile, body)
return json_success()
|
# Webhooks for external integrations.
from typing import Any, Dict
from django.http import HttpRequest, HttpResponse
from zerver.decorator import api_key_only_webhook_view
from zerver.lib.actions import check_send_private_message
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_success
from zerver.models import UserProfile, get_user_profile_by_email
@api_key_only_webhook_view("dialogflow")
@has_request_variables
def api_dialogflow_webhook(request: HttpRequest, user_profile: UserProfile,
payload: Dict[str, Any]=REQ(argument_type='body'),
email: str=REQ(default='foo')) -> HttpResponse:
status = payload["status"]["code"]
if status == 200:
result = payload["result"]["fulfillment"]["speech"]
if not result:
alternate_result = payload["alternateResult"]["fulfillment"]["speech"]
if not alternate_result:
body = "DialogFlow couldn't process your query."
else:
body = alternate_result
else:
body = result
else:
error_status = payload["status"]["errorDetails"]
body = "{} - {}".format(status, error_status)
profile = get_user_profile_by_email(email)
check_send_private_message(user_profile, request.client, profile, body)
return json_success()
webhooks/dialogflow: Remove default value for email parameter.
The webhook view used a default value for the email, which gave
non-informative errors when the webhook is incorrectly configured without
the email parameter.# Webhooks for external integrations.
from typing import Any, Dict
from django.http import HttpRequest, HttpResponse
from zerver.decorator import api_key_only_webhook_view
from zerver.lib.actions import check_send_private_message
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_success
from zerver.models import UserProfile, get_user_profile_by_email
@api_key_only_webhook_view("dialogflow")
@has_request_variables
def api_dialogflow_webhook(request: HttpRequest, user_profile: UserProfile,
payload: Dict[str, Any]=REQ(argument_type='body'),
email: str=REQ()) -> HttpResponse:
status = payload["status"]["code"]
if status == 200:
result = payload["result"]["fulfillment"]["speech"]
if not result:
alternate_result = payload["alternateResult"]["fulfillment"]["speech"]
if not alternate_result:
body = "DialogFlow couldn't process your query."
else:
body = alternate_result
else:
body = result
else:
error_status = payload["status"]["errorDetails"]
body = "{} - {}".format(status, error_status)
profile = get_user_profile_by_email(email)
check_send_private_message(user_profile, request.client, profile, body)
return json_success()
|
<commit_before># Webhooks for external integrations.
from typing import Any, Dict
from django.http import HttpRequest, HttpResponse
from zerver.decorator import api_key_only_webhook_view
from zerver.lib.actions import check_send_private_message
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_success
from zerver.models import UserProfile, get_user_profile_by_email
@api_key_only_webhook_view("dialogflow")
@has_request_variables
def api_dialogflow_webhook(request: HttpRequest, user_profile: UserProfile,
payload: Dict[str, Any]=REQ(argument_type='body'),
email: str=REQ(default='foo')) -> HttpResponse:
status = payload["status"]["code"]
if status == 200:
result = payload["result"]["fulfillment"]["speech"]
if not result:
alternate_result = payload["alternateResult"]["fulfillment"]["speech"]
if not alternate_result:
body = "DialogFlow couldn't process your query."
else:
body = alternate_result
else:
body = result
else:
error_status = payload["status"]["errorDetails"]
body = "{} - {}".format(status, error_status)
profile = get_user_profile_by_email(email)
check_send_private_message(user_profile, request.client, profile, body)
return json_success()
<commit_msg>webhooks/dialogflow: Remove default value for email parameter.
The webhook view used a default value for the email, which gave
non-informative errors when the webhook is incorrectly configured without
the email parameter.<commit_after># Webhooks for external integrations.
from typing import Any, Dict
from django.http import HttpRequest, HttpResponse
from zerver.decorator import api_key_only_webhook_view
from zerver.lib.actions import check_send_private_message
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_success
from zerver.models import UserProfile, get_user_profile_by_email
@api_key_only_webhook_view("dialogflow")
@has_request_variables
def api_dialogflow_webhook(request: HttpRequest, user_profile: UserProfile,
payload: Dict[str, Any]=REQ(argument_type='body'),
email: str=REQ()) -> HttpResponse:
status = payload["status"]["code"]
if status == 200:
result = payload["result"]["fulfillment"]["speech"]
if not result:
alternate_result = payload["alternateResult"]["fulfillment"]["speech"]
if not alternate_result:
body = "DialogFlow couldn't process your query."
else:
body = alternate_result
else:
body = result
else:
error_status = payload["status"]["errorDetails"]
body = "{} - {}".format(status, error_status)
profile = get_user_profile_by_email(email)
check_send_private_message(user_profile, request.client, profile, body)
return json_success()
|
ee65c8783db3c305914d19e6a39e4d02fdc4a1f2
|
lms/djangoapps/certificates/signals.py
|
lms/djangoapps/certificates/signals.py
|
""" Signal handler for enabling self-generated certificates by default
for self-paced courses.
"""
from celery.task import task
from django.dispatch.dispatcher import receiver
from certificates.models import CertificateGenerationCourseSetting
from opaque_keys.edx.keys import CourseKey
from xmodule.modulestore.django import SignalHandler, modulestore
@receiver(SignalHandler.course_published)
def _listen_for_course_publish(sender, course_key, **kwargs): # pylint: disable=unused-argument
""" Catches the signal that a course has been published in Studio and
enable the self-generated certificates by default for self-paced
courses.
"""
enable_self_generated_certs.delay(unicode(course_key))
@task()
def enable_self_generated_certs(course_key):
"""Enable the self-generated certificates by default for self-paced courses."""
course_key = CourseKey.from_string(course_key)
course = modulestore().get_course(course_key)
is_enabled_for_course = CertificateGenerationCourseSetting.is_enabled_for_course(course_key)
if course.self_paced and not is_enabled_for_course:
CertificateGenerationCourseSetting.set_enabled_for_course(course_key, True)
|
""" Signal handler for enabling self-generated certificates by default
for self-paced courses.
"""
from celery.task import task
from django.dispatch.dispatcher import receiver
from certificates.models import CertificateGenerationCourseSetting
from opaque_keys.edx.keys import CourseKey
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
from xmodule.modulestore.django import SignalHandler
@receiver(SignalHandler.course_published)
def _listen_for_course_publish(sender, course_key, **kwargs): # pylint: disable=unused-argument
""" Catches the signal that a course has been published in Studio and
enable the self-generated certificates by default for self-paced
courses.
"""
enable_self_generated_certs.delay(unicode(course_key))
@task()
def enable_self_generated_certs(course_key):
"""Enable the self-generated certificates by default for self-paced courses."""
course_key = CourseKey.from_string(course_key)
course = CourseOverview.get_from_id(course_key)
is_enabled_for_course = CertificateGenerationCourseSetting.is_enabled_for_course(course_key)
if course.self_paced and not is_enabled_for_course:
CertificateGenerationCourseSetting.set_enabled_for_course(course_key, True)
|
Use CourseOverview instead of modulestore.
|
Use CourseOverview instead of modulestore.
|
Python
|
agpl-3.0
|
jolyonb/edx-platform,deepsrijit1105/edx-platform,synergeticsedx/deployment-wipro,romain-li/edx-platform,amir-qayyum-khan/edx-platform,gymnasium/edx-platform,shabab12/edx-platform,longmen21/edx-platform,synergeticsedx/deployment-wipro,marcore/edx-platform,gsehub/edx-platform,waheedahmed/edx-platform,TeachAtTUM/edx-platform,chrisndodge/edx-platform,JioEducation/edx-platform,a-parhom/edx-platform,Edraak/edraak-platform,a-parhom/edx-platform,CredoReference/edx-platform,ampax/edx-platform,pepeportela/edx-platform,kmoocdev2/edx-platform,ahmedaljazzar/edx-platform,caesar2164/edx-platform,naresh21/synergetics-edx-platform,miptliot/edx-platform,deepsrijit1105/edx-platform,eduNEXT/edunext-platform,CredoReference/edx-platform,Lektorium-LLC/edx-platform,chrisndodge/edx-platform,pabloborrego93/edx-platform,marcore/edx-platform,Stanford-Online/edx-platform,miptliot/edx-platform,eduNEXT/edunext-platform,prarthitm/edxplatform,philanthropy-u/edx-platform,mitocw/edx-platform,gsehub/edx-platform,Lektorium-LLC/edx-platform,louyihua/edx-platform,edx-solutions/edx-platform,msegado/edx-platform,a-parhom/edx-platform,angelapper/edx-platform,fintech-circle/edx-platform,longmen21/edx-platform,appsembler/edx-platform,jzoldak/edx-platform,jjmiranda/edx-platform,longmen21/edx-platform,BehavioralInsightsTeam/edx-platform,philanthropy-u/edx-platform,deepsrijit1105/edx-platform,kmoocdev2/edx-platform,pabloborrego93/edx-platform,prarthitm/edxplatform,jjmiranda/edx-platform,Livit/Livit.Learn.EdX,raccoongang/edx-platform,msegado/edx-platform,naresh21/synergetics-edx-platform,proversity-org/edx-platform,eduNEXT/edx-platform,Livit/Livit.Learn.EdX,jolyonb/edx-platform,mbareta/edx-platform-ft,hastexo/edx-platform,hastexo/edx-platform,chrisndodge/edx-platform,angelapper/edx-platform,Edraak/edraak-platform,louyihua/edx-platform,tanmaykm/edx-platform,hastexo/edx-platform,mitocw/edx-platform,ampax/edx-platform,appsembler/edx-platform,naresh21/synergetics-edx-platform,caesar2164/edx-platform,ESOedX/edx-platform,cecep-edu/edx-platform,romain-li/edx-platform,ESOedX/edx-platform,edx-solutions/edx-platform,proversity-org/edx-platform,synergeticsedx/deployment-wipro,naresh21/synergetics-edx-platform,EDUlib/edx-platform,Stanford-Online/edx-platform,pabloborrego93/edx-platform,eduNEXT/edunext-platform,BehavioralInsightsTeam/edx-platform,eduNEXT/edx-platform,caesar2164/edx-platform,jjmiranda/edx-platform,cpennington/edx-platform,philanthropy-u/edx-platform,pabloborrego93/edx-platform,ahmedaljazzar/edx-platform,stvstnfrd/edx-platform,louyihua/edx-platform,tanmaykm/edx-platform,fintech-circle/edx-platform,Edraak/edraak-platform,itsjeyd/edx-platform,itsjeyd/edx-platform,gymnasium/edx-platform,marcore/edx-platform,edx/edx-platform,jolyonb/edx-platform,arbrandes/edx-platform,cecep-edu/edx-platform,waheedahmed/edx-platform,Stanford-Online/edx-platform,cecep-edu/edx-platform,cecep-edu/edx-platform,jolyonb/edx-platform,appsembler/edx-platform,waheedahmed/edx-platform,synergeticsedx/deployment-wipro,TeachAtTUM/edx-platform,BehavioralInsightsTeam/edx-platform,EDUlib/edx-platform,gsehub/edx-platform,ESOedX/edx-platform,jzoldak/edx-platform,amir-qayyum-khan/edx-platform,procangroup/edx-platform,mbareta/edx-platform-ft,BehavioralInsightsTeam/edx-platform,msegado/edx-platform,angelapper/edx-platform,kmoocdev2/edx-platform,philanthropy-u/edx-platform,lduarte1991/edx-platform,arbrandes/edx-platform,proversity-org/edx-platform,pepeportela/edx-platform,angelapper/edx-platform,pepeportela/edx-platform,mbareta/edx-platform-ft,TeachAtTUM/edx-platform,edx/edx-platform,Livit/Livit.Learn.EdX,louyihua/edx-platform,Livit/Livit.Learn.EdX,longmen21/edx-platform,Stanford-Online/edx-platform,itsjeyd/edx-platform,romain-li/edx-platform,eduNEXT/edunext-platform,marcore/edx-platform,ampax/edx-platform,edx-solutions/edx-platform,appsembler/edx-platform,teltek/edx-platform,tanmaykm/edx-platform,miptliot/edx-platform,edx/edx-platform,tanmaykm/edx-platform,gsehub/edx-platform,stvstnfrd/edx-platform,teltek/edx-platform,ampax/edx-platform,ESOedX/edx-platform,romain-li/edx-platform,lduarte1991/edx-platform,prarthitm/edxplatform,fintech-circle/edx-platform,longmen21/edx-platform,jjmiranda/edx-platform,procangroup/edx-platform,waheedahmed/edx-platform,EDUlib/edx-platform,CredoReference/edx-platform,arbrandes/edx-platform,deepsrijit1105/edx-platform,ahmedaljazzar/edx-platform,proversity-org/edx-platform,teltek/edx-platform,pepeportela/edx-platform,a-parhom/edx-platform,teltek/edx-platform,shabab12/edx-platform,stvstnfrd/edx-platform,shabab12/edx-platform,raccoongang/edx-platform,arbrandes/edx-platform,JioEducation/edx-platform,procangroup/edx-platform,cpennington/edx-platform,cpennington/edx-platform,eduNEXT/edx-platform,EDUlib/edx-platform,amir-qayyum-khan/edx-platform,lduarte1991/edx-platform,chrisndodge/edx-platform,JioEducation/edx-platform,prarthitm/edxplatform,Edraak/edraak-platform,jzoldak/edx-platform,edx-solutions/edx-platform,jzoldak/edx-platform,romain-li/edx-platform,lduarte1991/edx-platform,procangroup/edx-platform,raccoongang/edx-platform,gymnasium/edx-platform,eduNEXT/edx-platform,cecep-edu/edx-platform,gymnasium/edx-platform,CredoReference/edx-platform,msegado/edx-platform,hastexo/edx-platform,amir-qayyum-khan/edx-platform,kmoocdev2/edx-platform,miptliot/edx-platform,mitocw/edx-platform,msegado/edx-platform,kmoocdev2/edx-platform,mitocw/edx-platform,fintech-circle/edx-platform,itsjeyd/edx-platform,raccoongang/edx-platform,caesar2164/edx-platform,edx/edx-platform,Lektorium-LLC/edx-platform,TeachAtTUM/edx-platform,stvstnfrd/edx-platform,shabab12/edx-platform,mbareta/edx-platform-ft,Lektorium-LLC/edx-platform,waheedahmed/edx-platform,JioEducation/edx-platform,cpennington/edx-platform,ahmedaljazzar/edx-platform
|
""" Signal handler for enabling self-generated certificates by default
for self-paced courses.
"""
from celery.task import task
from django.dispatch.dispatcher import receiver
from certificates.models import CertificateGenerationCourseSetting
from opaque_keys.edx.keys import CourseKey
from xmodule.modulestore.django import SignalHandler, modulestore
@receiver(SignalHandler.course_published)
def _listen_for_course_publish(sender, course_key, **kwargs): # pylint: disable=unused-argument
""" Catches the signal that a course has been published in Studio and
enable the self-generated certificates by default for self-paced
courses.
"""
enable_self_generated_certs.delay(unicode(course_key))
@task()
def enable_self_generated_certs(course_key):
"""Enable the self-generated certificates by default for self-paced courses."""
course_key = CourseKey.from_string(course_key)
course = modulestore().get_course(course_key)
is_enabled_for_course = CertificateGenerationCourseSetting.is_enabled_for_course(course_key)
if course.self_paced and not is_enabled_for_course:
CertificateGenerationCourseSetting.set_enabled_for_course(course_key, True)
Use CourseOverview instead of modulestore.
|
""" Signal handler for enabling self-generated certificates by default
for self-paced courses.
"""
from celery.task import task
from django.dispatch.dispatcher import receiver
from certificates.models import CertificateGenerationCourseSetting
from opaque_keys.edx.keys import CourseKey
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
from xmodule.modulestore.django import SignalHandler
@receiver(SignalHandler.course_published)
def _listen_for_course_publish(sender, course_key, **kwargs): # pylint: disable=unused-argument
""" Catches the signal that a course has been published in Studio and
enable the self-generated certificates by default for self-paced
courses.
"""
enable_self_generated_certs.delay(unicode(course_key))
@task()
def enable_self_generated_certs(course_key):
"""Enable the self-generated certificates by default for self-paced courses."""
course_key = CourseKey.from_string(course_key)
course = CourseOverview.get_from_id(course_key)
is_enabled_for_course = CertificateGenerationCourseSetting.is_enabled_for_course(course_key)
if course.self_paced and not is_enabled_for_course:
CertificateGenerationCourseSetting.set_enabled_for_course(course_key, True)
|
<commit_before>""" Signal handler for enabling self-generated certificates by default
for self-paced courses.
"""
from celery.task import task
from django.dispatch.dispatcher import receiver
from certificates.models import CertificateGenerationCourseSetting
from opaque_keys.edx.keys import CourseKey
from xmodule.modulestore.django import SignalHandler, modulestore
@receiver(SignalHandler.course_published)
def _listen_for_course_publish(sender, course_key, **kwargs): # pylint: disable=unused-argument
""" Catches the signal that a course has been published in Studio and
enable the self-generated certificates by default for self-paced
courses.
"""
enable_self_generated_certs.delay(unicode(course_key))
@task()
def enable_self_generated_certs(course_key):
"""Enable the self-generated certificates by default for self-paced courses."""
course_key = CourseKey.from_string(course_key)
course = modulestore().get_course(course_key)
is_enabled_for_course = CertificateGenerationCourseSetting.is_enabled_for_course(course_key)
if course.self_paced and not is_enabled_for_course:
CertificateGenerationCourseSetting.set_enabled_for_course(course_key, True)
<commit_msg>Use CourseOverview instead of modulestore.<commit_after>
|
""" Signal handler for enabling self-generated certificates by default
for self-paced courses.
"""
from celery.task import task
from django.dispatch.dispatcher import receiver
from certificates.models import CertificateGenerationCourseSetting
from opaque_keys.edx.keys import CourseKey
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
from xmodule.modulestore.django import SignalHandler
@receiver(SignalHandler.course_published)
def _listen_for_course_publish(sender, course_key, **kwargs): # pylint: disable=unused-argument
""" Catches the signal that a course has been published in Studio and
enable the self-generated certificates by default for self-paced
courses.
"""
enable_self_generated_certs.delay(unicode(course_key))
@task()
def enable_self_generated_certs(course_key):
"""Enable the self-generated certificates by default for self-paced courses."""
course_key = CourseKey.from_string(course_key)
course = CourseOverview.get_from_id(course_key)
is_enabled_for_course = CertificateGenerationCourseSetting.is_enabled_for_course(course_key)
if course.self_paced and not is_enabled_for_course:
CertificateGenerationCourseSetting.set_enabled_for_course(course_key, True)
|
""" Signal handler for enabling self-generated certificates by default
for self-paced courses.
"""
from celery.task import task
from django.dispatch.dispatcher import receiver
from certificates.models import CertificateGenerationCourseSetting
from opaque_keys.edx.keys import CourseKey
from xmodule.modulestore.django import SignalHandler, modulestore
@receiver(SignalHandler.course_published)
def _listen_for_course_publish(sender, course_key, **kwargs): # pylint: disable=unused-argument
""" Catches the signal that a course has been published in Studio and
enable the self-generated certificates by default for self-paced
courses.
"""
enable_self_generated_certs.delay(unicode(course_key))
@task()
def enable_self_generated_certs(course_key):
"""Enable the self-generated certificates by default for self-paced courses."""
course_key = CourseKey.from_string(course_key)
course = modulestore().get_course(course_key)
is_enabled_for_course = CertificateGenerationCourseSetting.is_enabled_for_course(course_key)
if course.self_paced and not is_enabled_for_course:
CertificateGenerationCourseSetting.set_enabled_for_course(course_key, True)
Use CourseOverview instead of modulestore.""" Signal handler for enabling self-generated certificates by default
for self-paced courses.
"""
from celery.task import task
from django.dispatch.dispatcher import receiver
from certificates.models import CertificateGenerationCourseSetting
from opaque_keys.edx.keys import CourseKey
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
from xmodule.modulestore.django import SignalHandler
@receiver(SignalHandler.course_published)
def _listen_for_course_publish(sender, course_key, **kwargs): # pylint: disable=unused-argument
""" Catches the signal that a course has been published in Studio and
enable the self-generated certificates by default for self-paced
courses.
"""
enable_self_generated_certs.delay(unicode(course_key))
@task()
def enable_self_generated_certs(course_key):
"""Enable the self-generated certificates by default for self-paced courses."""
course_key = CourseKey.from_string(course_key)
course = CourseOverview.get_from_id(course_key)
is_enabled_for_course = CertificateGenerationCourseSetting.is_enabled_for_course(course_key)
if course.self_paced and not is_enabled_for_course:
CertificateGenerationCourseSetting.set_enabled_for_course(course_key, True)
|
<commit_before>""" Signal handler for enabling self-generated certificates by default
for self-paced courses.
"""
from celery.task import task
from django.dispatch.dispatcher import receiver
from certificates.models import CertificateGenerationCourseSetting
from opaque_keys.edx.keys import CourseKey
from xmodule.modulestore.django import SignalHandler, modulestore
@receiver(SignalHandler.course_published)
def _listen_for_course_publish(sender, course_key, **kwargs): # pylint: disable=unused-argument
""" Catches the signal that a course has been published in Studio and
enable the self-generated certificates by default for self-paced
courses.
"""
enable_self_generated_certs.delay(unicode(course_key))
@task()
def enable_self_generated_certs(course_key):
"""Enable the self-generated certificates by default for self-paced courses."""
course_key = CourseKey.from_string(course_key)
course = modulestore().get_course(course_key)
is_enabled_for_course = CertificateGenerationCourseSetting.is_enabled_for_course(course_key)
if course.self_paced and not is_enabled_for_course:
CertificateGenerationCourseSetting.set_enabled_for_course(course_key, True)
<commit_msg>Use CourseOverview instead of modulestore.<commit_after>""" Signal handler for enabling self-generated certificates by default
for self-paced courses.
"""
from celery.task import task
from django.dispatch.dispatcher import receiver
from certificates.models import CertificateGenerationCourseSetting
from opaque_keys.edx.keys import CourseKey
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
from xmodule.modulestore.django import SignalHandler
@receiver(SignalHandler.course_published)
def _listen_for_course_publish(sender, course_key, **kwargs): # pylint: disable=unused-argument
""" Catches the signal that a course has been published in Studio and
enable the self-generated certificates by default for self-paced
courses.
"""
enable_self_generated_certs.delay(unicode(course_key))
@task()
def enable_self_generated_certs(course_key):
"""Enable the self-generated certificates by default for self-paced courses."""
course_key = CourseKey.from_string(course_key)
course = CourseOverview.get_from_id(course_key)
is_enabled_for_course = CertificateGenerationCourseSetting.is_enabled_for_course(course_key)
if course.self_paced and not is_enabled_for_course:
CertificateGenerationCourseSetting.set_enabled_for_course(course_key, True)
|
f0458b71981056660d811c5ed206c465cd450620
|
ddt.py
|
ddt.py
|
from functools import wraps
__version__ = '0.2.0'
MAGIC = '%values' # this value cannot conflict with any real python attribute
def data(*values):
"""
Method decorator to add to your test methods.
Should be added to methods of instances of ``unittest.TestCase``.
"""
def wrapper(func):
setattr(func, MAGIC, values)
return func
return wrapper
def ddt(cls):
"""
Class decorator for subclasses of ``unittest.TestCase``.
Apply this decorator to the test case class, and then
decorate test methods with ``@data``.
For each method decorated with ``@data``, this will effectively create as
many methods as data items are passed as parameters to ``@data``.
The names of the test methods follow the pattern ``test_func_name
+ "_" + str(data)``. If ``data.__name__`` exists, it is used
instead for the test method name.
"""
def feed_data(func, *args, **kwargs):
"""
This internal method decorator feeds the test data item to the test.
"""
@wraps(func)
def wrapper(self):
return func(self, *args, **kwargs)
return wrapper
for name, f in cls.__dict__.iteritems():
if hasattr(f, MAGIC):
for i, v in enumerate(getattr(f, MAGIC)):
test_name = getattr(v, "__name__", "{0}_{1}".format(name, v))
setattr(cls, test_name, feed_data(f, v))
delattr(cls, name)
return cls
|
from functools import wraps
__version__ = '0.2.0'
MAGIC = '%values' # this value cannot conflict with any real python attribute
def data(*values):
"""
Method decorator to add to your test methods.
Should be added to methods of instances of ``unittest.TestCase``.
"""
def wrapper(func):
setattr(func, MAGIC, values)
return func
return wrapper
def ddt(cls):
"""
Class decorator for subclasses of ``unittest.TestCase``.
Apply this decorator to the test case class, and then
decorate test methods with ``@data``.
For each method decorated with ``@data``, this will effectively create as
many methods as data items are passed as parameters to ``@data``.
The names of the test methods follow the pattern ``test_func_name
+ "_" + str(data)``. If ``data.__name__`` exists, it is used
instead for the test method name.
"""
def feed_data(func, *args, **kwargs):
"""
This internal method decorator feeds the test data item to the test.
"""
@wraps(func)
def wrapper(self):
return func(self, *args, **kwargs)
return wrapper
for name, f in cls.__dict__.items():
if hasattr(f, MAGIC):
for i, v in enumerate(getattr(f, MAGIC)):
test_name = getattr(v, "__name__", "{0}_{1}".format(name, v))
setattr(cls, test_name, feed_data(f, v))
delattr(cls, name)
return cls
|
Change iteritems to items, as it was breaking tests
|
Change iteritems to items, as it was breaking tests
|
Python
|
mit
|
domidimi/ddt,datadriventests/ddt,edx/ddt,datadriventests/ddt,edx/ddt,domidimi/ddt
|
from functools import wraps
__version__ = '0.2.0'
MAGIC = '%values' # this value cannot conflict with any real python attribute
def data(*values):
"""
Method decorator to add to your test methods.
Should be added to methods of instances of ``unittest.TestCase``.
"""
def wrapper(func):
setattr(func, MAGIC, values)
return func
return wrapper
def ddt(cls):
"""
Class decorator for subclasses of ``unittest.TestCase``.
Apply this decorator to the test case class, and then
decorate test methods with ``@data``.
For each method decorated with ``@data``, this will effectively create as
many methods as data items are passed as parameters to ``@data``.
The names of the test methods follow the pattern ``test_func_name
+ "_" + str(data)``. If ``data.__name__`` exists, it is used
instead for the test method name.
"""
def feed_data(func, *args, **kwargs):
"""
This internal method decorator feeds the test data item to the test.
"""
@wraps(func)
def wrapper(self):
return func(self, *args, **kwargs)
return wrapper
for name, f in cls.__dict__.iteritems():
if hasattr(f, MAGIC):
for i, v in enumerate(getattr(f, MAGIC)):
test_name = getattr(v, "__name__", "{0}_{1}".format(name, v))
setattr(cls, test_name, feed_data(f, v))
delattr(cls, name)
return cls
Change iteritems to items, as it was breaking tests
|
from functools import wraps
__version__ = '0.2.0'
MAGIC = '%values' # this value cannot conflict with any real python attribute
def data(*values):
"""
Method decorator to add to your test methods.
Should be added to methods of instances of ``unittest.TestCase``.
"""
def wrapper(func):
setattr(func, MAGIC, values)
return func
return wrapper
def ddt(cls):
"""
Class decorator for subclasses of ``unittest.TestCase``.
Apply this decorator to the test case class, and then
decorate test methods with ``@data``.
For each method decorated with ``@data``, this will effectively create as
many methods as data items are passed as parameters to ``@data``.
The names of the test methods follow the pattern ``test_func_name
+ "_" + str(data)``. If ``data.__name__`` exists, it is used
instead for the test method name.
"""
def feed_data(func, *args, **kwargs):
"""
This internal method decorator feeds the test data item to the test.
"""
@wraps(func)
def wrapper(self):
return func(self, *args, **kwargs)
return wrapper
for name, f in cls.__dict__.items():
if hasattr(f, MAGIC):
for i, v in enumerate(getattr(f, MAGIC)):
test_name = getattr(v, "__name__", "{0}_{1}".format(name, v))
setattr(cls, test_name, feed_data(f, v))
delattr(cls, name)
return cls
|
<commit_before>from functools import wraps
__version__ = '0.2.0'
MAGIC = '%values' # this value cannot conflict with any real python attribute
def data(*values):
"""
Method decorator to add to your test methods.
Should be added to methods of instances of ``unittest.TestCase``.
"""
def wrapper(func):
setattr(func, MAGIC, values)
return func
return wrapper
def ddt(cls):
"""
Class decorator for subclasses of ``unittest.TestCase``.
Apply this decorator to the test case class, and then
decorate test methods with ``@data``.
For each method decorated with ``@data``, this will effectively create as
many methods as data items are passed as parameters to ``@data``.
The names of the test methods follow the pattern ``test_func_name
+ "_" + str(data)``. If ``data.__name__`` exists, it is used
instead for the test method name.
"""
def feed_data(func, *args, **kwargs):
"""
This internal method decorator feeds the test data item to the test.
"""
@wraps(func)
def wrapper(self):
return func(self, *args, **kwargs)
return wrapper
for name, f in cls.__dict__.iteritems():
if hasattr(f, MAGIC):
for i, v in enumerate(getattr(f, MAGIC)):
test_name = getattr(v, "__name__", "{0}_{1}".format(name, v))
setattr(cls, test_name, feed_data(f, v))
delattr(cls, name)
return cls
<commit_msg>Change iteritems to items, as it was breaking tests<commit_after>
|
from functools import wraps
__version__ = '0.2.0'
MAGIC = '%values' # this value cannot conflict with any real python attribute
def data(*values):
"""
Method decorator to add to your test methods.
Should be added to methods of instances of ``unittest.TestCase``.
"""
def wrapper(func):
setattr(func, MAGIC, values)
return func
return wrapper
def ddt(cls):
"""
Class decorator for subclasses of ``unittest.TestCase``.
Apply this decorator to the test case class, and then
decorate test methods with ``@data``.
For each method decorated with ``@data``, this will effectively create as
many methods as data items are passed as parameters to ``@data``.
The names of the test methods follow the pattern ``test_func_name
+ "_" + str(data)``. If ``data.__name__`` exists, it is used
instead for the test method name.
"""
def feed_data(func, *args, **kwargs):
"""
This internal method decorator feeds the test data item to the test.
"""
@wraps(func)
def wrapper(self):
return func(self, *args, **kwargs)
return wrapper
for name, f in cls.__dict__.items():
if hasattr(f, MAGIC):
for i, v in enumerate(getattr(f, MAGIC)):
test_name = getattr(v, "__name__", "{0}_{1}".format(name, v))
setattr(cls, test_name, feed_data(f, v))
delattr(cls, name)
return cls
|
from functools import wraps
__version__ = '0.2.0'
MAGIC = '%values' # this value cannot conflict with any real python attribute
def data(*values):
"""
Method decorator to add to your test methods.
Should be added to methods of instances of ``unittest.TestCase``.
"""
def wrapper(func):
setattr(func, MAGIC, values)
return func
return wrapper
def ddt(cls):
"""
Class decorator for subclasses of ``unittest.TestCase``.
Apply this decorator to the test case class, and then
decorate test methods with ``@data``.
For each method decorated with ``@data``, this will effectively create as
many methods as data items are passed as parameters to ``@data``.
The names of the test methods follow the pattern ``test_func_name
+ "_" + str(data)``. If ``data.__name__`` exists, it is used
instead for the test method name.
"""
def feed_data(func, *args, **kwargs):
"""
This internal method decorator feeds the test data item to the test.
"""
@wraps(func)
def wrapper(self):
return func(self, *args, **kwargs)
return wrapper
for name, f in cls.__dict__.iteritems():
if hasattr(f, MAGIC):
for i, v in enumerate(getattr(f, MAGIC)):
test_name = getattr(v, "__name__", "{0}_{1}".format(name, v))
setattr(cls, test_name, feed_data(f, v))
delattr(cls, name)
return cls
Change iteritems to items, as it was breaking testsfrom functools import wraps
__version__ = '0.2.0'
MAGIC = '%values' # this value cannot conflict with any real python attribute
def data(*values):
"""
Method decorator to add to your test methods.
Should be added to methods of instances of ``unittest.TestCase``.
"""
def wrapper(func):
setattr(func, MAGIC, values)
return func
return wrapper
def ddt(cls):
"""
Class decorator for subclasses of ``unittest.TestCase``.
Apply this decorator to the test case class, and then
decorate test methods with ``@data``.
For each method decorated with ``@data``, this will effectively create as
many methods as data items are passed as parameters to ``@data``.
The names of the test methods follow the pattern ``test_func_name
+ "_" + str(data)``. If ``data.__name__`` exists, it is used
instead for the test method name.
"""
def feed_data(func, *args, **kwargs):
"""
This internal method decorator feeds the test data item to the test.
"""
@wraps(func)
def wrapper(self):
return func(self, *args, **kwargs)
return wrapper
for name, f in cls.__dict__.items():
if hasattr(f, MAGIC):
for i, v in enumerate(getattr(f, MAGIC)):
test_name = getattr(v, "__name__", "{0}_{1}".format(name, v))
setattr(cls, test_name, feed_data(f, v))
delattr(cls, name)
return cls
|
<commit_before>from functools import wraps
__version__ = '0.2.0'
MAGIC = '%values' # this value cannot conflict with any real python attribute
def data(*values):
"""
Method decorator to add to your test methods.
Should be added to methods of instances of ``unittest.TestCase``.
"""
def wrapper(func):
setattr(func, MAGIC, values)
return func
return wrapper
def ddt(cls):
"""
Class decorator for subclasses of ``unittest.TestCase``.
Apply this decorator to the test case class, and then
decorate test methods with ``@data``.
For each method decorated with ``@data``, this will effectively create as
many methods as data items are passed as parameters to ``@data``.
The names of the test methods follow the pattern ``test_func_name
+ "_" + str(data)``. If ``data.__name__`` exists, it is used
instead for the test method name.
"""
def feed_data(func, *args, **kwargs):
"""
This internal method decorator feeds the test data item to the test.
"""
@wraps(func)
def wrapper(self):
return func(self, *args, **kwargs)
return wrapper
for name, f in cls.__dict__.iteritems():
if hasattr(f, MAGIC):
for i, v in enumerate(getattr(f, MAGIC)):
test_name = getattr(v, "__name__", "{0}_{1}".format(name, v))
setattr(cls, test_name, feed_data(f, v))
delattr(cls, name)
return cls
<commit_msg>Change iteritems to items, as it was breaking tests<commit_after>from functools import wraps
__version__ = '0.2.0'
MAGIC = '%values' # this value cannot conflict with any real python attribute
def data(*values):
"""
Method decorator to add to your test methods.
Should be added to methods of instances of ``unittest.TestCase``.
"""
def wrapper(func):
setattr(func, MAGIC, values)
return func
return wrapper
def ddt(cls):
"""
Class decorator for subclasses of ``unittest.TestCase``.
Apply this decorator to the test case class, and then
decorate test methods with ``@data``.
For each method decorated with ``@data``, this will effectively create as
many methods as data items are passed as parameters to ``@data``.
The names of the test methods follow the pattern ``test_func_name
+ "_" + str(data)``. If ``data.__name__`` exists, it is used
instead for the test method name.
"""
def feed_data(func, *args, **kwargs):
"""
This internal method decorator feeds the test data item to the test.
"""
@wraps(func)
def wrapper(self):
return func(self, *args, **kwargs)
return wrapper
for name, f in cls.__dict__.items():
if hasattr(f, MAGIC):
for i, v in enumerate(getattr(f, MAGIC)):
test_name = getattr(v, "__name__", "{0}_{1}".format(name, v))
setattr(cls, test_name, feed_data(f, v))
delattr(cls, name)
return cls
|
a89b6ec1bda46c63c0ff0e0a8bb44eb3eda41c1b
|
repo_health/gh_issues/serializers/GhIssueStatsSerializer.py
|
repo_health/gh_issues/serializers/GhIssueStatsSerializer.py
|
"""
serializers.py - (C) Copyright - 2017
This software is copyrighted to contributors listed in CONTRIBUTIONS.md.
SPDX-License-Identifier: MIT
Author(s) of this file:
J. Harding
Serializer for issue stats of a GitHub repo.
"""
from rest_framework import serializers as s
from ..models import GhIssueEvent
from repo_health.index.mixins import CountForPastYearMixin
class GhIssueStatsSerializer(s.Serializer, CountForPastYearMixin):
_label_names = None
issues_count = s.SerializerMethodField()
issues_closed_last_year = s.SerializerMethodField()
issues_opened_last_year = s.SerializerMethodField()
merged_count = s.SerializerMethodField()
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
repo = args[0]
self._label_names = repo.labels.values_list('name', flat=True)
def get_issues_count(self, repo):
return repo.issues_count
def get_issues_closed_last_year(self, repo):
return self.get_count_list_for_year(repo.issues.filter(events__action=GhIssueEvent.CLOSED_ACTION).distinct())
def get_issues_opened_last_year(self, repo):
return self.get_count_list_for_year(repo.issues)
|
"""
serializers.py - (C) Copyright - 2017
This software is copyrighted to contributors listed in CONTRIBUTIONS.md.
SPDX-License-Identifier: MIT
Author(s) of this file:
J. Harding
Serializer for issue stats of a GitHub repo.
"""
from rest_framework import serializers as s
from ..models import GhIssueEvent
from repo_health.index.mixins import CountForPastYearMixin
class GhIssueStatsSerializer(s.Serializer, CountForPastYearMixin):
_label_names = None
issues_count = s.SerializerMethodField()
issues_closed_last_year = s.SerializerMethodField()
issues_opened_last_year = s.SerializerMethodField()
merged_count = s.SerializerMethodField()
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
repo = args[0]
self._label_names = repo.labels.values_list('name', flat=True)
def get_issues_count(self, repo):
return repo.issues_count
def get_issues_closed_last_year(self, repo):
return self.get_count_list_for_year(repo.issues.filter(events__action=GhIssueEvent.CLOSED_ACTION).distinct())
def get_issues_opened_last_year(self, repo):
return self.get_count_list_for_year(repo.issues)
def get_merged_count(self, repo):
return repo.issues.filter(events__action=GhIssueEvent.MERGED_ACTION).count()
|
Add get merged count method.
|
Add get merged count method.
|
Python
|
mit
|
jakeharding/repo-health,jakeharding/repo-health,jakeharding/repo-health,jakeharding/repo-health
|
"""
serializers.py - (C) Copyright - 2017
This software is copyrighted to contributors listed in CONTRIBUTIONS.md.
SPDX-License-Identifier: MIT
Author(s) of this file:
J. Harding
Serializer for issue stats of a GitHub repo.
"""
from rest_framework import serializers as s
from ..models import GhIssueEvent
from repo_health.index.mixins import CountForPastYearMixin
class GhIssueStatsSerializer(s.Serializer, CountForPastYearMixin):
_label_names = None
issues_count = s.SerializerMethodField()
issues_closed_last_year = s.SerializerMethodField()
issues_opened_last_year = s.SerializerMethodField()
merged_count = s.SerializerMethodField()
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
repo = args[0]
self._label_names = repo.labels.values_list('name', flat=True)
def get_issues_count(self, repo):
return repo.issues_count
def get_issues_closed_last_year(self, repo):
return self.get_count_list_for_year(repo.issues.filter(events__action=GhIssueEvent.CLOSED_ACTION).distinct())
def get_issues_opened_last_year(self, repo):
return self.get_count_list_for_year(repo.issues)
Add get merged count method.
|
"""
serializers.py - (C) Copyright - 2017
This software is copyrighted to contributors listed in CONTRIBUTIONS.md.
SPDX-License-Identifier: MIT
Author(s) of this file:
J. Harding
Serializer for issue stats of a GitHub repo.
"""
from rest_framework import serializers as s
from ..models import GhIssueEvent
from repo_health.index.mixins import CountForPastYearMixin
class GhIssueStatsSerializer(s.Serializer, CountForPastYearMixin):
_label_names = None
issues_count = s.SerializerMethodField()
issues_closed_last_year = s.SerializerMethodField()
issues_opened_last_year = s.SerializerMethodField()
merged_count = s.SerializerMethodField()
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
repo = args[0]
self._label_names = repo.labels.values_list('name', flat=True)
def get_issues_count(self, repo):
return repo.issues_count
def get_issues_closed_last_year(self, repo):
return self.get_count_list_for_year(repo.issues.filter(events__action=GhIssueEvent.CLOSED_ACTION).distinct())
def get_issues_opened_last_year(self, repo):
return self.get_count_list_for_year(repo.issues)
def get_merged_count(self, repo):
return repo.issues.filter(events__action=GhIssueEvent.MERGED_ACTION).count()
|
<commit_before>"""
serializers.py - (C) Copyright - 2017
This software is copyrighted to contributors listed in CONTRIBUTIONS.md.
SPDX-License-Identifier: MIT
Author(s) of this file:
J. Harding
Serializer for issue stats of a GitHub repo.
"""
from rest_framework import serializers as s
from ..models import GhIssueEvent
from repo_health.index.mixins import CountForPastYearMixin
class GhIssueStatsSerializer(s.Serializer, CountForPastYearMixin):
_label_names = None
issues_count = s.SerializerMethodField()
issues_closed_last_year = s.SerializerMethodField()
issues_opened_last_year = s.SerializerMethodField()
merged_count = s.SerializerMethodField()
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
repo = args[0]
self._label_names = repo.labels.values_list('name', flat=True)
def get_issues_count(self, repo):
return repo.issues_count
def get_issues_closed_last_year(self, repo):
return self.get_count_list_for_year(repo.issues.filter(events__action=GhIssueEvent.CLOSED_ACTION).distinct())
def get_issues_opened_last_year(self, repo):
return self.get_count_list_for_year(repo.issues)
<commit_msg>Add get merged count method.<commit_after>
|
"""
serializers.py - (C) Copyright - 2017
This software is copyrighted to contributors listed in CONTRIBUTIONS.md.
SPDX-License-Identifier: MIT
Author(s) of this file:
J. Harding
Serializer for issue stats of a GitHub repo.
"""
from rest_framework import serializers as s
from ..models import GhIssueEvent
from repo_health.index.mixins import CountForPastYearMixin
class GhIssueStatsSerializer(s.Serializer, CountForPastYearMixin):
_label_names = None
issues_count = s.SerializerMethodField()
issues_closed_last_year = s.SerializerMethodField()
issues_opened_last_year = s.SerializerMethodField()
merged_count = s.SerializerMethodField()
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
repo = args[0]
self._label_names = repo.labels.values_list('name', flat=True)
def get_issues_count(self, repo):
return repo.issues_count
def get_issues_closed_last_year(self, repo):
return self.get_count_list_for_year(repo.issues.filter(events__action=GhIssueEvent.CLOSED_ACTION).distinct())
def get_issues_opened_last_year(self, repo):
return self.get_count_list_for_year(repo.issues)
def get_merged_count(self, repo):
return repo.issues.filter(events__action=GhIssueEvent.MERGED_ACTION).count()
|
"""
serializers.py - (C) Copyright - 2017
This software is copyrighted to contributors listed in CONTRIBUTIONS.md.
SPDX-License-Identifier: MIT
Author(s) of this file:
J. Harding
Serializer for issue stats of a GitHub repo.
"""
from rest_framework import serializers as s
from ..models import GhIssueEvent
from repo_health.index.mixins import CountForPastYearMixin
class GhIssueStatsSerializer(s.Serializer, CountForPastYearMixin):
_label_names = None
issues_count = s.SerializerMethodField()
issues_closed_last_year = s.SerializerMethodField()
issues_opened_last_year = s.SerializerMethodField()
merged_count = s.SerializerMethodField()
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
repo = args[0]
self._label_names = repo.labels.values_list('name', flat=True)
def get_issues_count(self, repo):
return repo.issues_count
def get_issues_closed_last_year(self, repo):
return self.get_count_list_for_year(repo.issues.filter(events__action=GhIssueEvent.CLOSED_ACTION).distinct())
def get_issues_opened_last_year(self, repo):
return self.get_count_list_for_year(repo.issues)
Add get merged count method."""
serializers.py - (C) Copyright - 2017
This software is copyrighted to contributors listed in CONTRIBUTIONS.md.
SPDX-License-Identifier: MIT
Author(s) of this file:
J. Harding
Serializer for issue stats of a GitHub repo.
"""
from rest_framework import serializers as s
from ..models import GhIssueEvent
from repo_health.index.mixins import CountForPastYearMixin
class GhIssueStatsSerializer(s.Serializer, CountForPastYearMixin):
_label_names = None
issues_count = s.SerializerMethodField()
issues_closed_last_year = s.SerializerMethodField()
issues_opened_last_year = s.SerializerMethodField()
merged_count = s.SerializerMethodField()
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
repo = args[0]
self._label_names = repo.labels.values_list('name', flat=True)
def get_issues_count(self, repo):
return repo.issues_count
def get_issues_closed_last_year(self, repo):
return self.get_count_list_for_year(repo.issues.filter(events__action=GhIssueEvent.CLOSED_ACTION).distinct())
def get_issues_opened_last_year(self, repo):
return self.get_count_list_for_year(repo.issues)
def get_merged_count(self, repo):
return repo.issues.filter(events__action=GhIssueEvent.MERGED_ACTION).count()
|
<commit_before>"""
serializers.py - (C) Copyright - 2017
This software is copyrighted to contributors listed in CONTRIBUTIONS.md.
SPDX-License-Identifier: MIT
Author(s) of this file:
J. Harding
Serializer for issue stats of a GitHub repo.
"""
from rest_framework import serializers as s
from ..models import GhIssueEvent
from repo_health.index.mixins import CountForPastYearMixin
class GhIssueStatsSerializer(s.Serializer, CountForPastYearMixin):
_label_names = None
issues_count = s.SerializerMethodField()
issues_closed_last_year = s.SerializerMethodField()
issues_opened_last_year = s.SerializerMethodField()
merged_count = s.SerializerMethodField()
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
repo = args[0]
self._label_names = repo.labels.values_list('name', flat=True)
def get_issues_count(self, repo):
return repo.issues_count
def get_issues_closed_last_year(self, repo):
return self.get_count_list_for_year(repo.issues.filter(events__action=GhIssueEvent.CLOSED_ACTION).distinct())
def get_issues_opened_last_year(self, repo):
return self.get_count_list_for_year(repo.issues)
<commit_msg>Add get merged count method.<commit_after>"""
serializers.py - (C) Copyright - 2017
This software is copyrighted to contributors listed in CONTRIBUTIONS.md.
SPDX-License-Identifier: MIT
Author(s) of this file:
J. Harding
Serializer for issue stats of a GitHub repo.
"""
from rest_framework import serializers as s
from ..models import GhIssueEvent
from repo_health.index.mixins import CountForPastYearMixin
class GhIssueStatsSerializer(s.Serializer, CountForPastYearMixin):
_label_names = None
issues_count = s.SerializerMethodField()
issues_closed_last_year = s.SerializerMethodField()
issues_opened_last_year = s.SerializerMethodField()
merged_count = s.SerializerMethodField()
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
repo = args[0]
self._label_names = repo.labels.values_list('name', flat=True)
def get_issues_count(self, repo):
return repo.issues_count
def get_issues_closed_last_year(self, repo):
return self.get_count_list_for_year(repo.issues.filter(events__action=GhIssueEvent.CLOSED_ACTION).distinct())
def get_issues_opened_last_year(self, repo):
return self.get_count_list_for_year(repo.issues)
def get_merged_count(self, repo):
return repo.issues.filter(events__action=GhIssueEvent.MERGED_ACTION).count()
|
c43a677e19ba1d2603dd4b7907fe053561c4fa06
|
neutron/objects/__init__.py
|
neutron/objects/__init__.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
def register_objects():
# local import to avoid circular import failure
from neutron.common import utils
utils.import_modules_recursively(sys.modules[__name__].__file__)
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import sys
def register_objects():
# local import to avoid circular import failure
from neutron.common import utils
dirn = os.path.dirname(sys.modules[__name__].__file__)
utils.import_modules_recursively(dirn)
|
Use dirname in object recursive import
|
Use dirname in object recursive import
__file__ just returns the init file which there was nothing
under.
TrivialFix
Change-Id: I39da8a50c0b9197b7a5cb3d5ca4fd95f8d739eaa
|
Python
|
apache-2.0
|
openstack/neutron,huntxu/neutron,openstack/neutron,eayunstack/neutron,eayunstack/neutron,huntxu/neutron,mahak/neutron,openstack/neutron,mahak/neutron,mahak/neutron,noironetworks/neutron,noironetworks/neutron
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
def register_objects():
# local import to avoid circular import failure
from neutron.common import utils
utils.import_modules_recursively(sys.modules[__name__].__file__)
Use dirname in object recursive import
__file__ just returns the init file which there was nothing
under.
TrivialFix
Change-Id: I39da8a50c0b9197b7a5cb3d5ca4fd95f8d739eaa
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import sys
def register_objects():
# local import to avoid circular import failure
from neutron.common import utils
dirn = os.path.dirname(sys.modules[__name__].__file__)
utils.import_modules_recursively(dirn)
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
def register_objects():
# local import to avoid circular import failure
from neutron.common import utils
utils.import_modules_recursively(sys.modules[__name__].__file__)
<commit_msg>Use dirname in object recursive import
__file__ just returns the init file which there was nothing
under.
TrivialFix
Change-Id: I39da8a50c0b9197b7a5cb3d5ca4fd95f8d739eaa<commit_after>
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import sys
def register_objects():
# local import to avoid circular import failure
from neutron.common import utils
dirn = os.path.dirname(sys.modules[__name__].__file__)
utils.import_modules_recursively(dirn)
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
def register_objects():
# local import to avoid circular import failure
from neutron.common import utils
utils.import_modules_recursively(sys.modules[__name__].__file__)
Use dirname in object recursive import
__file__ just returns the init file which there was nothing
under.
TrivialFix
Change-Id: I39da8a50c0b9197b7a5cb3d5ca4fd95f8d739eaa# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import sys
def register_objects():
# local import to avoid circular import failure
from neutron.common import utils
dirn = os.path.dirname(sys.modules[__name__].__file__)
utils.import_modules_recursively(dirn)
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
def register_objects():
# local import to avoid circular import failure
from neutron.common import utils
utils.import_modules_recursively(sys.modules[__name__].__file__)
<commit_msg>Use dirname in object recursive import
__file__ just returns the init file which there was nothing
under.
TrivialFix
Change-Id: I39da8a50c0b9197b7a5cb3d5ca4fd95f8d739eaa<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import sys
def register_objects():
# local import to avoid circular import failure
from neutron.common import utils
dirn = os.path.dirname(sys.modules[__name__].__file__)
utils.import_modules_recursively(dirn)
|
5a7f34323ce4db192f588cab503dab4f21bcb0bf
|
youtube_dl_server/server.py
|
youtube_dl_server/server.py
|
from paste import httpserver
import argparse
from .app import app
"""
A server for providing the app anywhere, no need for GAE
"""
def main():
desc="""
The youtube-dl API server.
"""
default_port = 9191
parser = argparse.ArgumentParser(description=desc)
parser.add_argument('-p','--port',
default= default_port,
type=int,
help='The port the server will use. The default is: {}'.format(default_port)
)
args = parser.parse_args()
httpserver.serve(app, host='localhost', port=args.port)
if __name__ == '__main__':
main()
|
from paste import httpserver
import argparse
from .app import app
from .version import __version__
"""
A server for providing the app anywhere, no need for GAE
"""
def main():
desc="""
The youtube-dl API server.
"""
default_port = 9191
parser = argparse.ArgumentParser(description=desc)
parser.add_argument('-p','--port',
default= default_port,
type=int,
help='The port the server will use. The default is: {}'.format(default_port)
)
parser.add_argument('--version', action='store_true', help='Print the version of the server')
args = parser.parse_args()
if args.version:
print(__version__)
exit(0)
httpserver.serve(app, host='localhost', port=args.port)
if __name__ == '__main__':
main()
|
Add version option to the command line arguments
|
Add version option to the command line arguments
|
Python
|
unlicense
|
jaimeMF/youtube-dl-api-server,apllicationCOM/youtube-dl-api-server,jaimeMF/youtube-dl-api-server,jaimeMF/youtube-dl-api-server,apllicationCOM/youtube-dl-api-server,apllicationCOM/youtube-dl-api-server
|
from paste import httpserver
import argparse
from .app import app
"""
A server for providing the app anywhere, no need for GAE
"""
def main():
desc="""
The youtube-dl API server.
"""
default_port = 9191
parser = argparse.ArgumentParser(description=desc)
parser.add_argument('-p','--port',
default= default_port,
type=int,
help='The port the server will use. The default is: {}'.format(default_port)
)
args = parser.parse_args()
httpserver.serve(app, host='localhost', port=args.port)
if __name__ == '__main__':
main()
Add version option to the command line arguments
|
from paste import httpserver
import argparse
from .app import app
from .version import __version__
"""
A server for providing the app anywhere, no need for GAE
"""
def main():
desc="""
The youtube-dl API server.
"""
default_port = 9191
parser = argparse.ArgumentParser(description=desc)
parser.add_argument('-p','--port',
default= default_port,
type=int,
help='The port the server will use. The default is: {}'.format(default_port)
)
parser.add_argument('--version', action='store_true', help='Print the version of the server')
args = parser.parse_args()
if args.version:
print(__version__)
exit(0)
httpserver.serve(app, host='localhost', port=args.port)
if __name__ == '__main__':
main()
|
<commit_before>from paste import httpserver
import argparse
from .app import app
"""
A server for providing the app anywhere, no need for GAE
"""
def main():
desc="""
The youtube-dl API server.
"""
default_port = 9191
parser = argparse.ArgumentParser(description=desc)
parser.add_argument('-p','--port',
default= default_port,
type=int,
help='The port the server will use. The default is: {}'.format(default_port)
)
args = parser.parse_args()
httpserver.serve(app, host='localhost', port=args.port)
if __name__ == '__main__':
main()
<commit_msg>Add version option to the command line arguments<commit_after>
|
from paste import httpserver
import argparse
from .app import app
from .version import __version__
"""
A server for providing the app anywhere, no need for GAE
"""
def main():
desc="""
The youtube-dl API server.
"""
default_port = 9191
parser = argparse.ArgumentParser(description=desc)
parser.add_argument('-p','--port',
default= default_port,
type=int,
help='The port the server will use. The default is: {}'.format(default_port)
)
parser.add_argument('--version', action='store_true', help='Print the version of the server')
args = parser.parse_args()
if args.version:
print(__version__)
exit(0)
httpserver.serve(app, host='localhost', port=args.port)
if __name__ == '__main__':
main()
|
from paste import httpserver
import argparse
from .app import app
"""
A server for providing the app anywhere, no need for GAE
"""
def main():
desc="""
The youtube-dl API server.
"""
default_port = 9191
parser = argparse.ArgumentParser(description=desc)
parser.add_argument('-p','--port',
default= default_port,
type=int,
help='The port the server will use. The default is: {}'.format(default_port)
)
args = parser.parse_args()
httpserver.serve(app, host='localhost', port=args.port)
if __name__ == '__main__':
main()
Add version option to the command line argumentsfrom paste import httpserver
import argparse
from .app import app
from .version import __version__
"""
A server for providing the app anywhere, no need for GAE
"""
def main():
desc="""
The youtube-dl API server.
"""
default_port = 9191
parser = argparse.ArgumentParser(description=desc)
parser.add_argument('-p','--port',
default= default_port,
type=int,
help='The port the server will use. The default is: {}'.format(default_port)
)
parser.add_argument('--version', action='store_true', help='Print the version of the server')
args = parser.parse_args()
if args.version:
print(__version__)
exit(0)
httpserver.serve(app, host='localhost', port=args.port)
if __name__ == '__main__':
main()
|
<commit_before>from paste import httpserver
import argparse
from .app import app
"""
A server for providing the app anywhere, no need for GAE
"""
def main():
desc="""
The youtube-dl API server.
"""
default_port = 9191
parser = argparse.ArgumentParser(description=desc)
parser.add_argument('-p','--port',
default= default_port,
type=int,
help='The port the server will use. The default is: {}'.format(default_port)
)
args = parser.parse_args()
httpserver.serve(app, host='localhost', port=args.port)
if __name__ == '__main__':
main()
<commit_msg>Add version option to the command line arguments<commit_after>from paste import httpserver
import argparse
from .app import app
from .version import __version__
"""
A server for providing the app anywhere, no need for GAE
"""
def main():
desc="""
The youtube-dl API server.
"""
default_port = 9191
parser = argparse.ArgumentParser(description=desc)
parser.add_argument('-p','--port',
default= default_port,
type=int,
help='The port the server will use. The default is: {}'.format(default_port)
)
parser.add_argument('--version', action='store_true', help='Print the version of the server')
args = parser.parse_args()
if args.version:
print(__version__)
exit(0)
httpserver.serve(app, host='localhost', port=args.port)
if __name__ == '__main__':
main()
|
02c0b4d62242f9297fc945bb6fd7f86d73492c59
|
azure_cli/container_task.py
|
azure_cli/container_task.py
|
"""
usage: azure-cli container list
azure-cli container content <name>
commands:
list list available containers
content list content of given container
"""
# project
from cli_task import CliTask
from storage_account import StorageAccount
from data_collector import DataCollector
from logger import Logger
from exceptions import *
from container import Container
class ContainerTask(CliTask):
def process(self):
account = StorageAccount(self.account_name, self.config_file)
self.container = Container(account)
if self.command_args['list']:
self.__list()
elif self.command_args['content']:
self.__content()
else:
raise AzureUnknownContainerCommand(self.command_args)
def __list(self):
result = DataCollector()
result.add('containers', self.container.list())
Logger.info(result.get())
def __content(self):
result = DataCollector()
result.add('container_content', self.container.content(
self.command_args['<name>'])
)
Logger.info(result.get())
|
"""
usage: azure-cli container list
azure-cli container content <name>
commands:
list list available containers
content list content of given container
"""
# project
from cli_task import CliTask
from storage_account import StorageAccount
from data_collector import DataCollector
from logger import Logger
from exceptions import *
from container import Container
class ContainerTask(CliTask):
def process(self):
self.account = StorageAccount(self.account_name, self.config_file)
self.container = Container(self.account)
if self.command_args['list']:
self.__list()
elif self.command_args['content']:
self.__content()
else:
raise AzureUnknownContainerCommand(self.command_args)
def __list(self):
result = DataCollector()
result.add(
'containers:' + self.account.get_name(),
self.container.list()
)
Logger.info(result.get())
def __content(self):
result = DataCollector()
result.add(
'container_content:' + self.account.get_name(),
self.container.content(self.command_args['<name>'])
)
Logger.info(result.get())
|
Add used storage account name in container info
|
Add used storage account name in container info
When calling e.g 'container list' you get all container names
but you don't know from which storage account name was used
One would need to look at the config file to check which
storage account name was configured which could be avoided
by just adding this information to the output
|
Python
|
apache-2.0
|
SUSE/azurectl,SUSE/azurectl,SUSE/azurectl
|
"""
usage: azure-cli container list
azure-cli container content <name>
commands:
list list available containers
content list content of given container
"""
# project
from cli_task import CliTask
from storage_account import StorageAccount
from data_collector import DataCollector
from logger import Logger
from exceptions import *
from container import Container
class ContainerTask(CliTask):
def process(self):
account = StorageAccount(self.account_name, self.config_file)
self.container = Container(account)
if self.command_args['list']:
self.__list()
elif self.command_args['content']:
self.__content()
else:
raise AzureUnknownContainerCommand(self.command_args)
def __list(self):
result = DataCollector()
result.add('containers', self.container.list())
Logger.info(result.get())
def __content(self):
result = DataCollector()
result.add('container_content', self.container.content(
self.command_args['<name>'])
)
Logger.info(result.get())
Add used storage account name in container info
When calling e.g 'container list' you get all container names
but you don't know from which storage account name was used
One would need to look at the config file to check which
storage account name was configured which could be avoided
by just adding this information to the output
|
"""
usage: azure-cli container list
azure-cli container content <name>
commands:
list list available containers
content list content of given container
"""
# project
from cli_task import CliTask
from storage_account import StorageAccount
from data_collector import DataCollector
from logger import Logger
from exceptions import *
from container import Container
class ContainerTask(CliTask):
def process(self):
self.account = StorageAccount(self.account_name, self.config_file)
self.container = Container(self.account)
if self.command_args['list']:
self.__list()
elif self.command_args['content']:
self.__content()
else:
raise AzureUnknownContainerCommand(self.command_args)
def __list(self):
result = DataCollector()
result.add(
'containers:' + self.account.get_name(),
self.container.list()
)
Logger.info(result.get())
def __content(self):
result = DataCollector()
result.add(
'container_content:' + self.account.get_name(),
self.container.content(self.command_args['<name>'])
)
Logger.info(result.get())
|
<commit_before>"""
usage: azure-cli container list
azure-cli container content <name>
commands:
list list available containers
content list content of given container
"""
# project
from cli_task import CliTask
from storage_account import StorageAccount
from data_collector import DataCollector
from logger import Logger
from exceptions import *
from container import Container
class ContainerTask(CliTask):
def process(self):
account = StorageAccount(self.account_name, self.config_file)
self.container = Container(account)
if self.command_args['list']:
self.__list()
elif self.command_args['content']:
self.__content()
else:
raise AzureUnknownContainerCommand(self.command_args)
def __list(self):
result = DataCollector()
result.add('containers', self.container.list())
Logger.info(result.get())
def __content(self):
result = DataCollector()
result.add('container_content', self.container.content(
self.command_args['<name>'])
)
Logger.info(result.get())
<commit_msg>Add used storage account name in container info
When calling e.g 'container list' you get all container names
but you don't know from which storage account name was used
One would need to look at the config file to check which
storage account name was configured which could be avoided
by just adding this information to the output<commit_after>
|
"""
usage: azure-cli container list
azure-cli container content <name>
commands:
list list available containers
content list content of given container
"""
# project
from cli_task import CliTask
from storage_account import StorageAccount
from data_collector import DataCollector
from logger import Logger
from exceptions import *
from container import Container
class ContainerTask(CliTask):
def process(self):
self.account = StorageAccount(self.account_name, self.config_file)
self.container = Container(self.account)
if self.command_args['list']:
self.__list()
elif self.command_args['content']:
self.__content()
else:
raise AzureUnknownContainerCommand(self.command_args)
def __list(self):
result = DataCollector()
result.add(
'containers:' + self.account.get_name(),
self.container.list()
)
Logger.info(result.get())
def __content(self):
result = DataCollector()
result.add(
'container_content:' + self.account.get_name(),
self.container.content(self.command_args['<name>'])
)
Logger.info(result.get())
|
"""
usage: azure-cli container list
azure-cli container content <name>
commands:
list list available containers
content list content of given container
"""
# project
from cli_task import CliTask
from storage_account import StorageAccount
from data_collector import DataCollector
from logger import Logger
from exceptions import *
from container import Container
class ContainerTask(CliTask):
def process(self):
account = StorageAccount(self.account_name, self.config_file)
self.container = Container(account)
if self.command_args['list']:
self.__list()
elif self.command_args['content']:
self.__content()
else:
raise AzureUnknownContainerCommand(self.command_args)
def __list(self):
result = DataCollector()
result.add('containers', self.container.list())
Logger.info(result.get())
def __content(self):
result = DataCollector()
result.add('container_content', self.container.content(
self.command_args['<name>'])
)
Logger.info(result.get())
Add used storage account name in container info
When calling e.g 'container list' you get all container names
but you don't know from which storage account name was used
One would need to look at the config file to check which
storage account name was configured which could be avoided
by just adding this information to the output"""
usage: azure-cli container list
azure-cli container content <name>
commands:
list list available containers
content list content of given container
"""
# project
from cli_task import CliTask
from storage_account import StorageAccount
from data_collector import DataCollector
from logger import Logger
from exceptions import *
from container import Container
class ContainerTask(CliTask):
def process(self):
self.account = StorageAccount(self.account_name, self.config_file)
self.container = Container(self.account)
if self.command_args['list']:
self.__list()
elif self.command_args['content']:
self.__content()
else:
raise AzureUnknownContainerCommand(self.command_args)
def __list(self):
result = DataCollector()
result.add(
'containers:' + self.account.get_name(),
self.container.list()
)
Logger.info(result.get())
def __content(self):
result = DataCollector()
result.add(
'container_content:' + self.account.get_name(),
self.container.content(self.command_args['<name>'])
)
Logger.info(result.get())
|
<commit_before>"""
usage: azure-cli container list
azure-cli container content <name>
commands:
list list available containers
content list content of given container
"""
# project
from cli_task import CliTask
from storage_account import StorageAccount
from data_collector import DataCollector
from logger import Logger
from exceptions import *
from container import Container
class ContainerTask(CliTask):
def process(self):
account = StorageAccount(self.account_name, self.config_file)
self.container = Container(account)
if self.command_args['list']:
self.__list()
elif self.command_args['content']:
self.__content()
else:
raise AzureUnknownContainerCommand(self.command_args)
def __list(self):
result = DataCollector()
result.add('containers', self.container.list())
Logger.info(result.get())
def __content(self):
result = DataCollector()
result.add('container_content', self.container.content(
self.command_args['<name>'])
)
Logger.info(result.get())
<commit_msg>Add used storage account name in container info
When calling e.g 'container list' you get all container names
but you don't know from which storage account name was used
One would need to look at the config file to check which
storage account name was configured which could be avoided
by just adding this information to the output<commit_after>"""
usage: azure-cli container list
azure-cli container content <name>
commands:
list list available containers
content list content of given container
"""
# project
from cli_task import CliTask
from storage_account import StorageAccount
from data_collector import DataCollector
from logger import Logger
from exceptions import *
from container import Container
class ContainerTask(CliTask):
def process(self):
self.account = StorageAccount(self.account_name, self.config_file)
self.container = Container(self.account)
if self.command_args['list']:
self.__list()
elif self.command_args['content']:
self.__content()
else:
raise AzureUnknownContainerCommand(self.command_args)
def __list(self):
result = DataCollector()
result.add(
'containers:' + self.account.get_name(),
self.container.list()
)
Logger.info(result.get())
def __content(self):
result = DataCollector()
result.add(
'container_content:' + self.account.get_name(),
self.container.content(self.command_args['<name>'])
)
Logger.info(result.get())
|
ee0d5199794d0a74a9a8841108d275e5f32089ad
|
rafem/__init__.py
|
rafem/__init__.py
|
"""River Avulsion Module."""
from .riverbmi import BmiRiverModule
from .rivermodule import RiverModule
__all__ = ['BmiRiverModule', 'RiverModule']
|
"""River Avulsion Module."""
from .riverbmi import BmiRiverModule
from .rivermodule import rivermodule
__all__ = ['BmiRiverModule', 'rivermodule']
|
Rename package from avulsion to rafem.
|
Rename package from avulsion to rafem.
|
Python
|
mit
|
katmratliff/avulsion-bmi,mcflugen/avulsion-bmi
|
"""River Avulsion Module."""
from .riverbmi import BmiRiverModule
from .rivermodule import RiverModule
__all__ = ['BmiRiverModule', 'RiverModule']
Rename package from avulsion to rafem.
|
"""River Avulsion Module."""
from .riverbmi import BmiRiverModule
from .rivermodule import rivermodule
__all__ = ['BmiRiverModule', 'rivermodule']
|
<commit_before>"""River Avulsion Module."""
from .riverbmi import BmiRiverModule
from .rivermodule import RiverModule
__all__ = ['BmiRiverModule', 'RiverModule']
<commit_msg>Rename package from avulsion to rafem.<commit_after>
|
"""River Avulsion Module."""
from .riverbmi import BmiRiverModule
from .rivermodule import rivermodule
__all__ = ['BmiRiverModule', 'rivermodule']
|
"""River Avulsion Module."""
from .riverbmi import BmiRiverModule
from .rivermodule import RiverModule
__all__ = ['BmiRiverModule', 'RiverModule']
Rename package from avulsion to rafem."""River Avulsion Module."""
from .riverbmi import BmiRiverModule
from .rivermodule import rivermodule
__all__ = ['BmiRiverModule', 'rivermodule']
|
<commit_before>"""River Avulsion Module."""
from .riverbmi import BmiRiverModule
from .rivermodule import RiverModule
__all__ = ['BmiRiverModule', 'RiverModule']
<commit_msg>Rename package from avulsion to rafem.<commit_after>"""River Avulsion Module."""
from .riverbmi import BmiRiverModule
from .rivermodule import rivermodule
__all__ = ['BmiRiverModule', 'rivermodule']
|
af5e073f2196bd1ced3e25eec93709c6135fb145
|
frameworks/Python/API-Hour/hello/etc/hello/api_hour/gunicorn_conf.py
|
frameworks/Python/API-Hour/hello/etc/hello/api_hour/gunicorn_conf.py
|
import multiprocessing
import os
_is_travis = os.environ.get('TRAVIS') == 'true'
workers = multiprocessing.cpu_count() * 2
if _is_travis:
workers = 2
bind = "0.0.0.0:8008"
keepalive = 120
errorlog = '-'
pidfile = 'api_hour.pid'
pythonpath = 'hello'
backlog = 10240000
|
import multiprocessing
import os
_is_travis = os.environ.get('TRAVIS') == 'true'
workers = multiprocessing.cpu_count() * 3
if _is_travis:
workers = 2
bind = "0.0.0.0:8008"
keepalive = 120
errorlog = '-'
pidfile = 'api_hour.pid'
pythonpath = 'hello'
backlog = 10240000
|
Increase workers to have the same number as other Python frameworks
|
Increase workers to have the same number as other Python frameworks
|
Python
|
bsd-3-clause
|
grob/FrameworkBenchmarks,methane/FrameworkBenchmarks,valyala/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,sxend/FrameworkBenchmarks,zloster/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,zapov/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,zloster/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,sxend/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,sgml/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,sgml/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,valyala/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,methane/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,methane/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,sxend/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,testn/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,methane/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,actframework/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,grob/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,valyala/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,herloct/FrameworkBenchmarks,zapov/FrameworkBenchmarks,joshk/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,valyala/FrameworkBenchmarks,actframework/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,zapov/FrameworkBenchmarks,zloster/FrameworkBenchmarks,sgml/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,zapov/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,methane/FrameworkBenchmarks,actframework/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,valyala/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,khellang/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,herloct/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,herloct/FrameworkBenchmarks,zloster/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,sxend/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,zloster/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,joshk/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,actframework/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,doom369/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,jamming/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,jamming/FrameworkBenchmarks,methane/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,jamming/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,testn/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,valyala/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,herloct/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,actframework/FrameworkBenchmarks,zloster/FrameworkBenchmarks,sgml/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,doom369/FrameworkBenchmarks,herloct/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,methane/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,doom369/FrameworkBenchmarks,testn/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,jamming/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,zapov/FrameworkBenchmarks,sxend/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,testn/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,khellang/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,testn/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,doom369/FrameworkBenchmarks,grob/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,khellang/FrameworkBenchmarks,testn/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,Verber/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,zapov/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,valyala/FrameworkBenchmarks,herloct/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,jamming/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,joshk/FrameworkBenchmarks,valyala/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,sxend/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,denkab/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,Verber/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,sgml/FrameworkBenchmarks,zapov/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,methane/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,zloster/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,zapov/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,khellang/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,jamming/FrameworkBenchmarks,Verber/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,herloct/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,joshk/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,jamming/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,doom369/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,actframework/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,jamming/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,doom369/FrameworkBenchmarks,grob/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,actframework/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,doom369/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,grob/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,testn/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,actframework/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,sgml/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,actframework/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,grob/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,jamming/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,zapov/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,zapov/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,sxend/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,testn/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,testn/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,zloster/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,joshk/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,zapov/FrameworkBenchmarks,valyala/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,herloct/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,denkab/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,sgml/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,actframework/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,grob/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,sxend/FrameworkBenchmarks,sgml/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Verber/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,valyala/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,Verber/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,denkab/FrameworkBenchmarks,joshk/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,doom369/FrameworkBenchmarks,zapov/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,zloster/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,jamming/FrameworkBenchmarks,Verber/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,denkab/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,sgml/FrameworkBenchmarks,joshk/FrameworkBenchmarks,herloct/FrameworkBenchmarks,joshk/FrameworkBenchmarks,sxend/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,grob/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,testn/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Verber/FrameworkBenchmarks,khellang/FrameworkBenchmarks,herloct/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,joshk/FrameworkBenchmarks,Verber/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,grob/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,denkab/FrameworkBenchmarks,Verber/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,zloster/FrameworkBenchmarks,sxend/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,denkab/FrameworkBenchmarks,actframework/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,sxend/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,denkab/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,actframework/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,doom369/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,grob/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,Verber/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,jamming/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,sgml/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,doom369/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,zloster/FrameworkBenchmarks,testn/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,khellang/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,khellang/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,methane/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,khellang/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,grob/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,zapov/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,denkab/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,denkab/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,doom369/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,doom369/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,khellang/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,zloster/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,denkab/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,testn/FrameworkBenchmarks,doom369/FrameworkBenchmarks,khellang/FrameworkBenchmarks,actframework/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,actframework/FrameworkBenchmarks,methane/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,sxend/FrameworkBenchmarks,methane/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,joshk/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,valyala/FrameworkBenchmarks,khellang/FrameworkBenchmarks,denkab/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,sgml/FrameworkBenchmarks,herloct/FrameworkBenchmarks,sgml/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Verber/FrameworkBenchmarks,doom369/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,zloster/FrameworkBenchmarks,denkab/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,denkab/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,actframework/FrameworkBenchmarks,herloct/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,valyala/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,methane/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,joshk/FrameworkBenchmarks,methane/FrameworkBenchmarks,zloster/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,grob/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,jamming/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,methane/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,jamming/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,joshk/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,joshk/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,herloct/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,valyala/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,Verber/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,actframework/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,khellang/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,sgml/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,jamming/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,grob/FrameworkBenchmarks,zloster/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,valyala/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,sxend/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,sxend/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,denkab/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,grob/FrameworkBenchmarks,doom369/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,sxend/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,zapov/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,zapov/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,sxend/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,khellang/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,doom369/FrameworkBenchmarks,testn/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,Verber/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,testn/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,sgml/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,herloct/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,khellang/FrameworkBenchmarks,herloct/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,zloster/FrameworkBenchmarks,zapov/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,joshk/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,doom369/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,Verber/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,sxend/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks
|
import multiprocessing
import os
_is_travis = os.environ.get('TRAVIS') == 'true'
workers = multiprocessing.cpu_count() * 2
if _is_travis:
workers = 2
bind = "0.0.0.0:8008"
keepalive = 120
errorlog = '-'
pidfile = 'api_hour.pid'
pythonpath = 'hello'
backlog = 10240000Increase workers to have the same number as other Python frameworks
|
import multiprocessing
import os
_is_travis = os.environ.get('TRAVIS') == 'true'
workers = multiprocessing.cpu_count() * 3
if _is_travis:
workers = 2
bind = "0.0.0.0:8008"
keepalive = 120
errorlog = '-'
pidfile = 'api_hour.pid'
pythonpath = 'hello'
backlog = 10240000
|
<commit_before>import multiprocessing
import os
_is_travis = os.environ.get('TRAVIS') == 'true'
workers = multiprocessing.cpu_count() * 2
if _is_travis:
workers = 2
bind = "0.0.0.0:8008"
keepalive = 120
errorlog = '-'
pidfile = 'api_hour.pid'
pythonpath = 'hello'
backlog = 10240000<commit_msg>Increase workers to have the same number as other Python frameworks<commit_after>
|
import multiprocessing
import os
_is_travis = os.environ.get('TRAVIS') == 'true'
workers = multiprocessing.cpu_count() * 3
if _is_travis:
workers = 2
bind = "0.0.0.0:8008"
keepalive = 120
errorlog = '-'
pidfile = 'api_hour.pid'
pythonpath = 'hello'
backlog = 10240000
|
import multiprocessing
import os
_is_travis = os.environ.get('TRAVIS') == 'true'
workers = multiprocessing.cpu_count() * 2
if _is_travis:
workers = 2
bind = "0.0.0.0:8008"
keepalive = 120
errorlog = '-'
pidfile = 'api_hour.pid'
pythonpath = 'hello'
backlog = 10240000Increase workers to have the same number as other Python frameworksimport multiprocessing
import os
_is_travis = os.environ.get('TRAVIS') == 'true'
workers = multiprocessing.cpu_count() * 3
if _is_travis:
workers = 2
bind = "0.0.0.0:8008"
keepalive = 120
errorlog = '-'
pidfile = 'api_hour.pid'
pythonpath = 'hello'
backlog = 10240000
|
<commit_before>import multiprocessing
import os
_is_travis = os.environ.get('TRAVIS') == 'true'
workers = multiprocessing.cpu_count() * 2
if _is_travis:
workers = 2
bind = "0.0.0.0:8008"
keepalive = 120
errorlog = '-'
pidfile = 'api_hour.pid'
pythonpath = 'hello'
backlog = 10240000<commit_msg>Increase workers to have the same number as other Python frameworks<commit_after>import multiprocessing
import os
_is_travis = os.environ.get('TRAVIS') == 'true'
workers = multiprocessing.cpu_count() * 3
if _is_travis:
workers = 2
bind = "0.0.0.0:8008"
keepalive = 120
errorlog = '-'
pidfile = 'api_hour.pid'
pythonpath = 'hello'
backlog = 10240000
|
2494562f9f7eab0125dbee4ddadd47c636f4ff6d
|
run.py
|
run.py
|
import os
import time
import sys
import signal
import subprocess
from muzicast.const import BASEDIR, WEB_PORT
from muzicast.web import app
print 'Running', os.getpid(), os.getppid()
class Runner(object):
def run(self):
self.streamer = subprocess.Popen([sys.executable, os.path.join(BASEDIR, 'streamer.py')])
self.scanner = subprocess.Popen([sys.executable, os.path.join(BASEDIR, 'collection/__init__.py')])
print 'Started streamer PID %d'%self.streamer.pid
print 'Started scanner PID %d'%self.scanner.pid
signal.signal(signal.SIGINT, self.shutdown)
signal.signal(signal.SIGTERM, self.shutdown)
app.run('0.0.0.0', WEB_PORT, debug=True, use_reloader=False)
#app.run('0.0.0.0', WEB_PORT, debug=False, use_reloader=False)
def shutdown(self, signum, frame):
self.streamer.terminate()
self.scanner.terminate()
sys.exit(0)
if __name__ == '__main__':
r = Runner()
r.run()
|
import os
import time
import sys
import signal
import subprocess
from muzicast.const import BASEDIR, WEB_PORT
from muzicast.config import GlobalConfig
from muzicast.web import app
print 'Running', os.getpid(), os.getppid()
class Runner(object):
def run(self):
self.streamer = subprocess.Popen([sys.executable, os.path.join(BASEDIR, 'streamer.py')])
self.scanner = subprocess.Popen([sys.executable, os.path.join(BASEDIR, 'collection/__init__.py')])
print 'Started streamer PID %d'%self.streamer.pid
print 'Started scanner PID %d'%self.scanner.pid
signal.signal(signal.SIGINT, self.shutdown)
signal.signal(signal.SIGTERM, self.shutdown)
app.run('0.0.0.0', WEB_PORT, debug=True, use_reloader=False)
#app.run('0.0.0.0', WEB_PORT, debug=False, use_reloader=False)
def shutdown(self, signum, frame):
self.streamer.terminate()
self.scanner.terminate()
config = GlobalConfig()
config['last_shutdown_time'] = int(time.time())
config.save()
sys.exit(0)
if __name__ == '__main__':
r = Runner()
r.run()
|
Write last shutdown time when we shutdown
|
Write last shutdown time when we shutdown
|
Python
|
mit
|
nikhilm/muzicast,nikhilm/muzicast
|
import os
import time
import sys
import signal
import subprocess
from muzicast.const import BASEDIR, WEB_PORT
from muzicast.web import app
print 'Running', os.getpid(), os.getppid()
class Runner(object):
def run(self):
self.streamer = subprocess.Popen([sys.executable, os.path.join(BASEDIR, 'streamer.py')])
self.scanner = subprocess.Popen([sys.executable, os.path.join(BASEDIR, 'collection/__init__.py')])
print 'Started streamer PID %d'%self.streamer.pid
print 'Started scanner PID %d'%self.scanner.pid
signal.signal(signal.SIGINT, self.shutdown)
signal.signal(signal.SIGTERM, self.shutdown)
app.run('0.0.0.0', WEB_PORT, debug=True, use_reloader=False)
#app.run('0.0.0.0', WEB_PORT, debug=False, use_reloader=False)
def shutdown(self, signum, frame):
self.streamer.terminate()
self.scanner.terminate()
sys.exit(0)
if __name__ == '__main__':
r = Runner()
r.run()
Write last shutdown time when we shutdown
|
import os
import time
import sys
import signal
import subprocess
from muzicast.const import BASEDIR, WEB_PORT
from muzicast.config import GlobalConfig
from muzicast.web import app
print 'Running', os.getpid(), os.getppid()
class Runner(object):
def run(self):
self.streamer = subprocess.Popen([sys.executable, os.path.join(BASEDIR, 'streamer.py')])
self.scanner = subprocess.Popen([sys.executable, os.path.join(BASEDIR, 'collection/__init__.py')])
print 'Started streamer PID %d'%self.streamer.pid
print 'Started scanner PID %d'%self.scanner.pid
signal.signal(signal.SIGINT, self.shutdown)
signal.signal(signal.SIGTERM, self.shutdown)
app.run('0.0.0.0', WEB_PORT, debug=True, use_reloader=False)
#app.run('0.0.0.0', WEB_PORT, debug=False, use_reloader=False)
def shutdown(self, signum, frame):
self.streamer.terminate()
self.scanner.terminate()
config = GlobalConfig()
config['last_shutdown_time'] = int(time.time())
config.save()
sys.exit(0)
if __name__ == '__main__':
r = Runner()
r.run()
|
<commit_before>import os
import time
import sys
import signal
import subprocess
from muzicast.const import BASEDIR, WEB_PORT
from muzicast.web import app
print 'Running', os.getpid(), os.getppid()
class Runner(object):
def run(self):
self.streamer = subprocess.Popen([sys.executable, os.path.join(BASEDIR, 'streamer.py')])
self.scanner = subprocess.Popen([sys.executable, os.path.join(BASEDIR, 'collection/__init__.py')])
print 'Started streamer PID %d'%self.streamer.pid
print 'Started scanner PID %d'%self.scanner.pid
signal.signal(signal.SIGINT, self.shutdown)
signal.signal(signal.SIGTERM, self.shutdown)
app.run('0.0.0.0', WEB_PORT, debug=True, use_reloader=False)
#app.run('0.0.0.0', WEB_PORT, debug=False, use_reloader=False)
def shutdown(self, signum, frame):
self.streamer.terminate()
self.scanner.terminate()
sys.exit(0)
if __name__ == '__main__':
r = Runner()
r.run()
<commit_msg>Write last shutdown time when we shutdown<commit_after>
|
import os
import time
import sys
import signal
import subprocess
from muzicast.const import BASEDIR, WEB_PORT
from muzicast.config import GlobalConfig
from muzicast.web import app
print 'Running', os.getpid(), os.getppid()
class Runner(object):
def run(self):
self.streamer = subprocess.Popen([sys.executable, os.path.join(BASEDIR, 'streamer.py')])
self.scanner = subprocess.Popen([sys.executable, os.path.join(BASEDIR, 'collection/__init__.py')])
print 'Started streamer PID %d'%self.streamer.pid
print 'Started scanner PID %d'%self.scanner.pid
signal.signal(signal.SIGINT, self.shutdown)
signal.signal(signal.SIGTERM, self.shutdown)
app.run('0.0.0.0', WEB_PORT, debug=True, use_reloader=False)
#app.run('0.0.0.0', WEB_PORT, debug=False, use_reloader=False)
def shutdown(self, signum, frame):
self.streamer.terminate()
self.scanner.terminate()
config = GlobalConfig()
config['last_shutdown_time'] = int(time.time())
config.save()
sys.exit(0)
if __name__ == '__main__':
r = Runner()
r.run()
|
import os
import time
import sys
import signal
import subprocess
from muzicast.const import BASEDIR, WEB_PORT
from muzicast.web import app
print 'Running', os.getpid(), os.getppid()
class Runner(object):
def run(self):
self.streamer = subprocess.Popen([sys.executable, os.path.join(BASEDIR, 'streamer.py')])
self.scanner = subprocess.Popen([sys.executable, os.path.join(BASEDIR, 'collection/__init__.py')])
print 'Started streamer PID %d'%self.streamer.pid
print 'Started scanner PID %d'%self.scanner.pid
signal.signal(signal.SIGINT, self.shutdown)
signal.signal(signal.SIGTERM, self.shutdown)
app.run('0.0.0.0', WEB_PORT, debug=True, use_reloader=False)
#app.run('0.0.0.0', WEB_PORT, debug=False, use_reloader=False)
def shutdown(self, signum, frame):
self.streamer.terminate()
self.scanner.terminate()
sys.exit(0)
if __name__ == '__main__':
r = Runner()
r.run()
Write last shutdown time when we shutdownimport os
import time
import sys
import signal
import subprocess
from muzicast.const import BASEDIR, WEB_PORT
from muzicast.config import GlobalConfig
from muzicast.web import app
print 'Running', os.getpid(), os.getppid()
class Runner(object):
def run(self):
self.streamer = subprocess.Popen([sys.executable, os.path.join(BASEDIR, 'streamer.py')])
self.scanner = subprocess.Popen([sys.executable, os.path.join(BASEDIR, 'collection/__init__.py')])
print 'Started streamer PID %d'%self.streamer.pid
print 'Started scanner PID %d'%self.scanner.pid
signal.signal(signal.SIGINT, self.shutdown)
signal.signal(signal.SIGTERM, self.shutdown)
app.run('0.0.0.0', WEB_PORT, debug=True, use_reloader=False)
#app.run('0.0.0.0', WEB_PORT, debug=False, use_reloader=False)
def shutdown(self, signum, frame):
self.streamer.terminate()
self.scanner.terminate()
config = GlobalConfig()
config['last_shutdown_time'] = int(time.time())
config.save()
sys.exit(0)
if __name__ == '__main__':
r = Runner()
r.run()
|
<commit_before>import os
import time
import sys
import signal
import subprocess
from muzicast.const import BASEDIR, WEB_PORT
from muzicast.web import app
print 'Running', os.getpid(), os.getppid()
class Runner(object):
def run(self):
self.streamer = subprocess.Popen([sys.executable, os.path.join(BASEDIR, 'streamer.py')])
self.scanner = subprocess.Popen([sys.executable, os.path.join(BASEDIR, 'collection/__init__.py')])
print 'Started streamer PID %d'%self.streamer.pid
print 'Started scanner PID %d'%self.scanner.pid
signal.signal(signal.SIGINT, self.shutdown)
signal.signal(signal.SIGTERM, self.shutdown)
app.run('0.0.0.0', WEB_PORT, debug=True, use_reloader=False)
#app.run('0.0.0.0', WEB_PORT, debug=False, use_reloader=False)
def shutdown(self, signum, frame):
self.streamer.terminate()
self.scanner.terminate()
sys.exit(0)
if __name__ == '__main__':
r = Runner()
r.run()
<commit_msg>Write last shutdown time when we shutdown<commit_after>import os
import time
import sys
import signal
import subprocess
from muzicast.const import BASEDIR, WEB_PORT
from muzicast.config import GlobalConfig
from muzicast.web import app
print 'Running', os.getpid(), os.getppid()
class Runner(object):
def run(self):
self.streamer = subprocess.Popen([sys.executable, os.path.join(BASEDIR, 'streamer.py')])
self.scanner = subprocess.Popen([sys.executable, os.path.join(BASEDIR, 'collection/__init__.py')])
print 'Started streamer PID %d'%self.streamer.pid
print 'Started scanner PID %d'%self.scanner.pid
signal.signal(signal.SIGINT, self.shutdown)
signal.signal(signal.SIGTERM, self.shutdown)
app.run('0.0.0.0', WEB_PORT, debug=True, use_reloader=False)
#app.run('0.0.0.0', WEB_PORT, debug=False, use_reloader=False)
def shutdown(self, signum, frame):
self.streamer.terminate()
self.scanner.terminate()
config = GlobalConfig()
config['last_shutdown_time'] = int(time.time())
config.save()
sys.exit(0)
if __name__ == '__main__':
r = Runner()
r.run()
|
e16e2a669f883480329f41acbd0955920dfc83e2
|
Tools/send2server/s2s.py
|
Tools/send2server/s2s.py
|
# -*- coding: utf-8 -*-
"""
File Name:
Description: s2s sets up sending files to servers via public SSH keys.
Author: S. Hutchins
Date Created: Tue Apr 11 12:31:17 2017
Project Name: Orthologs Project
"""
#import os
#import logging as log
#import pandas as pd
#from datetime import datetime as d
##import zipfile
#import pexpect
import subprocess
#------------------------------------------------------------------------------
class S2S(object):
"""S2S (Send 2 Server)"""
def __init__(username, server_address):
address = server_address
user = username
sendto = user + "@" + address + ":"
return sendto
def scpto(self, file, destpath):
cmd = "scp " + file + " " + self.sendto + destpath
status = subprocess.call([cmd], shell=True)
if status == 0: # Command was successful.
print("%s file sent." % file)
pass # Continue
else: # Unsuccessful. Stdout will be '1'.
print("%s file not sent." % file)
|
# -*- coding: utf-8 -*-
"""
File Name:
Description: s2s sets up sending files to servers via public SSH keys.
Author: S. Hutchins
Date Created: Tue Apr 11 12:31:17 2017
Project Name: Orthologs Project
"""
#import os
#import logging as log
#import pandas as pd
#from datetime import datetime as d
##import zipfile
#import pexpect
import subprocess
#------------------------------------------------------------------------------
class S2S(object):
"""S2S (Send 2 Server) is designed for use with a public ssh key."""
def __init__(username, server_address):
address = server_address
user = username
sendto = user + "@" + address + ":"
return sendto
def scpto(self, file, destpath):
cmd = "scp " + file + " " + self.sendto + destpath
status = subprocess.call([cmd], shell=True)
if status == 0: # Command was successful.
print("%s file sent." % file)
pass # Continue
else: # Unsuccessful. Stdout will be '1'.
print("%s file not sent." % file)
|
Update description. Module needs testing.
|
Update description. Module needs testing.
|
Python
|
mit
|
datasnakes/Datasnakes-Scripts,datasnakes/Datasnakes-Scripts,datasnakes/Datasnakes-Scripts,datasnakes/Datasnakes-Scripts,datasnakes/Datasnakes-Scripts,datasnakes/Datasnakes-Scripts
|
# -*- coding: utf-8 -*-
"""
File Name:
Description: s2s sets up sending files to servers via public SSH keys.
Author: S. Hutchins
Date Created: Tue Apr 11 12:31:17 2017
Project Name: Orthologs Project
"""
#import os
#import logging as log
#import pandas as pd
#from datetime import datetime as d
##import zipfile
#import pexpect
import subprocess
#------------------------------------------------------------------------------
class S2S(object):
"""S2S (Send 2 Server)"""
def __init__(username, server_address):
address = server_address
user = username
sendto = user + "@" + address + ":"
return sendto
def scpto(self, file, destpath):
cmd = "scp " + file + " " + self.sendto + destpath
status = subprocess.call([cmd], shell=True)
if status == 0: # Command was successful.
print("%s file sent." % file)
pass # Continue
else: # Unsuccessful. Stdout will be '1'.
print("%s file not sent." % file)
Update description. Module needs testing.
|
# -*- coding: utf-8 -*-
"""
File Name:
Description: s2s sets up sending files to servers via public SSH keys.
Author: S. Hutchins
Date Created: Tue Apr 11 12:31:17 2017
Project Name: Orthologs Project
"""
#import os
#import logging as log
#import pandas as pd
#from datetime import datetime as d
##import zipfile
#import pexpect
import subprocess
#------------------------------------------------------------------------------
class S2S(object):
"""S2S (Send 2 Server) is designed for use with a public ssh key."""
def __init__(username, server_address):
address = server_address
user = username
sendto = user + "@" + address + ":"
return sendto
def scpto(self, file, destpath):
cmd = "scp " + file + " " + self.sendto + destpath
status = subprocess.call([cmd], shell=True)
if status == 0: # Command was successful.
print("%s file sent." % file)
pass # Continue
else: # Unsuccessful. Stdout will be '1'.
print("%s file not sent." % file)
|
<commit_before># -*- coding: utf-8 -*-
"""
File Name:
Description: s2s sets up sending files to servers via public SSH keys.
Author: S. Hutchins
Date Created: Tue Apr 11 12:31:17 2017
Project Name: Orthologs Project
"""
#import os
#import logging as log
#import pandas as pd
#from datetime import datetime as d
##import zipfile
#import pexpect
import subprocess
#------------------------------------------------------------------------------
class S2S(object):
"""S2S (Send 2 Server)"""
def __init__(username, server_address):
address = server_address
user = username
sendto = user + "@" + address + ":"
return sendto
def scpto(self, file, destpath):
cmd = "scp " + file + " " + self.sendto + destpath
status = subprocess.call([cmd], shell=True)
if status == 0: # Command was successful.
print("%s file sent." % file)
pass # Continue
else: # Unsuccessful. Stdout will be '1'.
print("%s file not sent." % file)
<commit_msg>Update description. Module needs testing.<commit_after>
|
# -*- coding: utf-8 -*-
"""
File Name:
Description: s2s sets up sending files to servers via public SSH keys.
Author: S. Hutchins
Date Created: Tue Apr 11 12:31:17 2017
Project Name: Orthologs Project
"""
#import os
#import logging as log
#import pandas as pd
#from datetime import datetime as d
##import zipfile
#import pexpect
import subprocess
#------------------------------------------------------------------------------
class S2S(object):
"""S2S (Send 2 Server) is designed for use with a public ssh key."""
def __init__(username, server_address):
address = server_address
user = username
sendto = user + "@" + address + ":"
return sendto
def scpto(self, file, destpath):
cmd = "scp " + file + " " + self.sendto + destpath
status = subprocess.call([cmd], shell=True)
if status == 0: # Command was successful.
print("%s file sent." % file)
pass # Continue
else: # Unsuccessful. Stdout will be '1'.
print("%s file not sent." % file)
|
# -*- coding: utf-8 -*-
"""
File Name:
Description: s2s sets up sending files to servers via public SSH keys.
Author: S. Hutchins
Date Created: Tue Apr 11 12:31:17 2017
Project Name: Orthologs Project
"""
#import os
#import logging as log
#import pandas as pd
#from datetime import datetime as d
##import zipfile
#import pexpect
import subprocess
#------------------------------------------------------------------------------
class S2S(object):
"""S2S (Send 2 Server)"""
def __init__(username, server_address):
address = server_address
user = username
sendto = user + "@" + address + ":"
return sendto
def scpto(self, file, destpath):
cmd = "scp " + file + " " + self.sendto + destpath
status = subprocess.call([cmd], shell=True)
if status == 0: # Command was successful.
print("%s file sent." % file)
pass # Continue
else: # Unsuccessful. Stdout will be '1'.
print("%s file not sent." % file)
Update description. Module needs testing.# -*- coding: utf-8 -*-
"""
File Name:
Description: s2s sets up sending files to servers via public SSH keys.
Author: S. Hutchins
Date Created: Tue Apr 11 12:31:17 2017
Project Name: Orthologs Project
"""
#import os
#import logging as log
#import pandas as pd
#from datetime import datetime as d
##import zipfile
#import pexpect
import subprocess
#------------------------------------------------------------------------------
class S2S(object):
"""S2S (Send 2 Server) is designed for use with a public ssh key."""
def __init__(username, server_address):
address = server_address
user = username
sendto = user + "@" + address + ":"
return sendto
def scpto(self, file, destpath):
cmd = "scp " + file + " " + self.sendto + destpath
status = subprocess.call([cmd], shell=True)
if status == 0: # Command was successful.
print("%s file sent." % file)
pass # Continue
else: # Unsuccessful. Stdout will be '1'.
print("%s file not sent." % file)
|
<commit_before># -*- coding: utf-8 -*-
"""
File Name:
Description: s2s sets up sending files to servers via public SSH keys.
Author: S. Hutchins
Date Created: Tue Apr 11 12:31:17 2017
Project Name: Orthologs Project
"""
#import os
#import logging as log
#import pandas as pd
#from datetime import datetime as d
##import zipfile
#import pexpect
import subprocess
#------------------------------------------------------------------------------
class S2S(object):
"""S2S (Send 2 Server)"""
def __init__(username, server_address):
address = server_address
user = username
sendto = user + "@" + address + ":"
return sendto
def scpto(self, file, destpath):
cmd = "scp " + file + " " + self.sendto + destpath
status = subprocess.call([cmd], shell=True)
if status == 0: # Command was successful.
print("%s file sent." % file)
pass # Continue
else: # Unsuccessful. Stdout will be '1'.
print("%s file not sent." % file)
<commit_msg>Update description. Module needs testing.<commit_after># -*- coding: utf-8 -*-
"""
File Name:
Description: s2s sets up sending files to servers via public SSH keys.
Author: S. Hutchins
Date Created: Tue Apr 11 12:31:17 2017
Project Name: Orthologs Project
"""
#import os
#import logging as log
#import pandas as pd
#from datetime import datetime as d
##import zipfile
#import pexpect
import subprocess
#------------------------------------------------------------------------------
class S2S(object):
"""S2S (Send 2 Server) is designed for use with a public ssh key."""
def __init__(username, server_address):
address = server_address
user = username
sendto = user + "@" + address + ":"
return sendto
def scpto(self, file, destpath):
cmd = "scp " + file + " " + self.sendto + destpath
status = subprocess.call([cmd], shell=True)
if status == 0: # Command was successful.
print("%s file sent." % file)
pass # Continue
else: # Unsuccessful. Stdout will be '1'.
print("%s file not sent." % file)
|
28f59d245d4695de5dbcfc0302f34c46234fd116
|
blackgate/executor_pools.py
|
blackgate/executor_pools.py
|
# -*- coding: utf-8 -*-
from blackgate.executor import QueueExecutor
from tornado.ioloop import IOLoop
class ExecutorPools(object):
class PoolFull(Exception):
pass
class ExecutionTimeout(Exception):
pass
class ExecutionFailure(Exception):
pass
def __init__(self):
self.pools = {}
def register_pool(self, group_key, max_size=1):
executor = QueueExecutor(pool_key=group_key, max_size=max_size)
IOLoop.current().spawn_callback(executor.consume)
self.pools[group_key] = executor
def get_executor(self, group_key):
if group_key not in self.pools:
raise Exception("Pool not registerd")
return self.pools[group_key]
|
# -*- coding: utf-8 -*-
from blackgate.executor import QueueExecutor
from tornado.ioloop import IOLoop
class ExecutorPools(object):
class PoolFull(Exception):
pass
class ExecutionTimeout(Exception):
pass
class ExecutionFailure(Exception):
pass
def __init__(self):
self.pools = {}
def register_pool(self, group_key, max_size=10, max_workers=10):
executor = QueueExecutor(pool_key=group_key, max_size=max_size, max_workers=max_workers)
IOLoop.current().spawn_callback(executor.consume)
self.pools[group_key] = executor
def get_executor(self, group_key):
if group_key not in self.pools:
raise Exception("Pool not registerd")
return self.pools[group_key]
|
Set max_workers the same as max_size.
|
Set max_workers the same as max_size.
|
Python
|
mit
|
soasme/blackgate
|
# -*- coding: utf-8 -*-
from blackgate.executor import QueueExecutor
from tornado.ioloop import IOLoop
class ExecutorPools(object):
class PoolFull(Exception):
pass
class ExecutionTimeout(Exception):
pass
class ExecutionFailure(Exception):
pass
def __init__(self):
self.pools = {}
def register_pool(self, group_key, max_size=1):
executor = QueueExecutor(pool_key=group_key, max_size=max_size)
IOLoop.current().spawn_callback(executor.consume)
self.pools[group_key] = executor
def get_executor(self, group_key):
if group_key not in self.pools:
raise Exception("Pool not registerd")
return self.pools[group_key]
Set max_workers the same as max_size.
|
# -*- coding: utf-8 -*-
from blackgate.executor import QueueExecutor
from tornado.ioloop import IOLoop
class ExecutorPools(object):
class PoolFull(Exception):
pass
class ExecutionTimeout(Exception):
pass
class ExecutionFailure(Exception):
pass
def __init__(self):
self.pools = {}
def register_pool(self, group_key, max_size=10, max_workers=10):
executor = QueueExecutor(pool_key=group_key, max_size=max_size, max_workers=max_workers)
IOLoop.current().spawn_callback(executor.consume)
self.pools[group_key] = executor
def get_executor(self, group_key):
if group_key not in self.pools:
raise Exception("Pool not registerd")
return self.pools[group_key]
|
<commit_before># -*- coding: utf-8 -*-
from blackgate.executor import QueueExecutor
from tornado.ioloop import IOLoop
class ExecutorPools(object):
class PoolFull(Exception):
pass
class ExecutionTimeout(Exception):
pass
class ExecutionFailure(Exception):
pass
def __init__(self):
self.pools = {}
def register_pool(self, group_key, max_size=1):
executor = QueueExecutor(pool_key=group_key, max_size=max_size)
IOLoop.current().spawn_callback(executor.consume)
self.pools[group_key] = executor
def get_executor(self, group_key):
if group_key not in self.pools:
raise Exception("Pool not registerd")
return self.pools[group_key]
<commit_msg>Set max_workers the same as max_size.<commit_after>
|
# -*- coding: utf-8 -*-
from blackgate.executor import QueueExecutor
from tornado.ioloop import IOLoop
class ExecutorPools(object):
class PoolFull(Exception):
pass
class ExecutionTimeout(Exception):
pass
class ExecutionFailure(Exception):
pass
def __init__(self):
self.pools = {}
def register_pool(self, group_key, max_size=10, max_workers=10):
executor = QueueExecutor(pool_key=group_key, max_size=max_size, max_workers=max_workers)
IOLoop.current().spawn_callback(executor.consume)
self.pools[group_key] = executor
def get_executor(self, group_key):
if group_key not in self.pools:
raise Exception("Pool not registerd")
return self.pools[group_key]
|
# -*- coding: utf-8 -*-
from blackgate.executor import QueueExecutor
from tornado.ioloop import IOLoop
class ExecutorPools(object):
class PoolFull(Exception):
pass
class ExecutionTimeout(Exception):
pass
class ExecutionFailure(Exception):
pass
def __init__(self):
self.pools = {}
def register_pool(self, group_key, max_size=1):
executor = QueueExecutor(pool_key=group_key, max_size=max_size)
IOLoop.current().spawn_callback(executor.consume)
self.pools[group_key] = executor
def get_executor(self, group_key):
if group_key not in self.pools:
raise Exception("Pool not registerd")
return self.pools[group_key]
Set max_workers the same as max_size.# -*- coding: utf-8 -*-
from blackgate.executor import QueueExecutor
from tornado.ioloop import IOLoop
class ExecutorPools(object):
class PoolFull(Exception):
pass
class ExecutionTimeout(Exception):
pass
class ExecutionFailure(Exception):
pass
def __init__(self):
self.pools = {}
def register_pool(self, group_key, max_size=10, max_workers=10):
executor = QueueExecutor(pool_key=group_key, max_size=max_size, max_workers=max_workers)
IOLoop.current().spawn_callback(executor.consume)
self.pools[group_key] = executor
def get_executor(self, group_key):
if group_key not in self.pools:
raise Exception("Pool not registerd")
return self.pools[group_key]
|
<commit_before># -*- coding: utf-8 -*-
from blackgate.executor import QueueExecutor
from tornado.ioloop import IOLoop
class ExecutorPools(object):
class PoolFull(Exception):
pass
class ExecutionTimeout(Exception):
pass
class ExecutionFailure(Exception):
pass
def __init__(self):
self.pools = {}
def register_pool(self, group_key, max_size=1):
executor = QueueExecutor(pool_key=group_key, max_size=max_size)
IOLoop.current().spawn_callback(executor.consume)
self.pools[group_key] = executor
def get_executor(self, group_key):
if group_key not in self.pools:
raise Exception("Pool not registerd")
return self.pools[group_key]
<commit_msg>Set max_workers the same as max_size.<commit_after># -*- coding: utf-8 -*-
from blackgate.executor import QueueExecutor
from tornado.ioloop import IOLoop
class ExecutorPools(object):
class PoolFull(Exception):
pass
class ExecutionTimeout(Exception):
pass
class ExecutionFailure(Exception):
pass
def __init__(self):
self.pools = {}
def register_pool(self, group_key, max_size=10, max_workers=10):
executor = QueueExecutor(pool_key=group_key, max_size=max_size, max_workers=max_workers)
IOLoop.current().spawn_callback(executor.consume)
self.pools[group_key] = executor
def get_executor(self, group_key):
if group_key not in self.pools:
raise Exception("Pool not registerd")
return self.pools[group_key]
|
01e2b7aeaefa54f5a45886ee19607906f7d9064f
|
app/views/post_view.py
|
app/views/post_view.py
|
from flask import render_template, redirect, url_for
from flask_classy import FlaskView, route
from ..models import PostModel
from ..forms import PostForm
class Post(FlaskView):
""" Here will handle post creations, delete and update."""
def get(self, entity_id):
post = PostModel()
post = post.get(entity_id)
return render_template("post/post.html", post=post)
@route("/new/", methods=["GET", "POST"])
def new(self):
form = PostForm()
if form.validate_on_submit():
post = PostModel(**form.data)
post.put()
return redirect(url_for("Post:get", entity_id=post.id))
return render_template("post/post_form.html", form=form,
url="Post:new")
@route("/edit/<entity_id>", methods=["GET", "POST"])
def edit(self, entity_id):
post = PostModel()
entity = post.get(entity_id)
form = PostForm(**entity)
if form.validate_on_submit():
post.update(entity_id, form.data)
return redirect(url_for("Post:get", entity_id=entity_id))
return render_template("post/post_form.html", form=form,
url="Post:edit", entity_id=entity_id)
|
from flask import render_template, redirect, url_for
from flask_classy import FlaskView, route
from flask_user import login_required
from ..models import PostModel
from ..forms import PostForm
class Post(FlaskView):
""" Here will handle post creations, delete and update."""
def get(self, entity_id):
post = PostModel()
post = post.get(entity_id)
return render_template("post/post.html", post=post)
@login_required
@route("/new/", methods=["GET", "POST"])
def new(self):
form = PostForm()
if form.validate_on_submit():
post = PostModel(**form.data)
post.put()
return redirect(url_for("Post:get", entity_id=post.id))
return render_template("post/post_form.html", form=form,
url="Post:new")
@login_required
@route("/edit/<entity_id>", methods=["GET", "POST"])
def edit(self, entity_id):
post = PostModel()
entity = post.get(entity_id)
form = PostForm(**entity)
if form.validate_on_submit():
post.update(entity_id, form.data)
return redirect(url_for("Post:get", entity_id=entity_id))
return render_template("post/post_form.html", form=form,
url="Post:edit", entity_id=entity_id)
|
Add login required to new and edit post views
|
Add login required to new and edit post views
|
Python
|
mit
|
oldani/nanodegree-blog,oldani/nanodegree-blog,oldani/nanodegree-blog
|
from flask import render_template, redirect, url_for
from flask_classy import FlaskView, route
from ..models import PostModel
from ..forms import PostForm
class Post(FlaskView):
""" Here will handle post creations, delete and update."""
def get(self, entity_id):
post = PostModel()
post = post.get(entity_id)
return render_template("post/post.html", post=post)
@route("/new/", methods=["GET", "POST"])
def new(self):
form = PostForm()
if form.validate_on_submit():
post = PostModel(**form.data)
post.put()
return redirect(url_for("Post:get", entity_id=post.id))
return render_template("post/post_form.html", form=form,
url="Post:new")
@route("/edit/<entity_id>", methods=["GET", "POST"])
def edit(self, entity_id):
post = PostModel()
entity = post.get(entity_id)
form = PostForm(**entity)
if form.validate_on_submit():
post.update(entity_id, form.data)
return redirect(url_for("Post:get", entity_id=entity_id))
return render_template("post/post_form.html", form=form,
url="Post:edit", entity_id=entity_id)
Add login required to new and edit post views
|
from flask import render_template, redirect, url_for
from flask_classy import FlaskView, route
from flask_user import login_required
from ..models import PostModel
from ..forms import PostForm
class Post(FlaskView):
""" Here will handle post creations, delete and update."""
def get(self, entity_id):
post = PostModel()
post = post.get(entity_id)
return render_template("post/post.html", post=post)
@login_required
@route("/new/", methods=["GET", "POST"])
def new(self):
form = PostForm()
if form.validate_on_submit():
post = PostModel(**form.data)
post.put()
return redirect(url_for("Post:get", entity_id=post.id))
return render_template("post/post_form.html", form=form,
url="Post:new")
@login_required
@route("/edit/<entity_id>", methods=["GET", "POST"])
def edit(self, entity_id):
post = PostModel()
entity = post.get(entity_id)
form = PostForm(**entity)
if form.validate_on_submit():
post.update(entity_id, form.data)
return redirect(url_for("Post:get", entity_id=entity_id))
return render_template("post/post_form.html", form=form,
url="Post:edit", entity_id=entity_id)
|
<commit_before>from flask import render_template, redirect, url_for
from flask_classy import FlaskView, route
from ..models import PostModel
from ..forms import PostForm
class Post(FlaskView):
""" Here will handle post creations, delete and update."""
def get(self, entity_id):
post = PostModel()
post = post.get(entity_id)
return render_template("post/post.html", post=post)
@route("/new/", methods=["GET", "POST"])
def new(self):
form = PostForm()
if form.validate_on_submit():
post = PostModel(**form.data)
post.put()
return redirect(url_for("Post:get", entity_id=post.id))
return render_template("post/post_form.html", form=form,
url="Post:new")
@route("/edit/<entity_id>", methods=["GET", "POST"])
def edit(self, entity_id):
post = PostModel()
entity = post.get(entity_id)
form = PostForm(**entity)
if form.validate_on_submit():
post.update(entity_id, form.data)
return redirect(url_for("Post:get", entity_id=entity_id))
return render_template("post/post_form.html", form=form,
url="Post:edit", entity_id=entity_id)
<commit_msg>Add login required to new and edit post views<commit_after>
|
from flask import render_template, redirect, url_for
from flask_classy import FlaskView, route
from flask_user import login_required
from ..models import PostModel
from ..forms import PostForm
class Post(FlaskView):
""" Here will handle post creations, delete and update."""
def get(self, entity_id):
post = PostModel()
post = post.get(entity_id)
return render_template("post/post.html", post=post)
@login_required
@route("/new/", methods=["GET", "POST"])
def new(self):
form = PostForm()
if form.validate_on_submit():
post = PostModel(**form.data)
post.put()
return redirect(url_for("Post:get", entity_id=post.id))
return render_template("post/post_form.html", form=form,
url="Post:new")
@login_required
@route("/edit/<entity_id>", methods=["GET", "POST"])
def edit(self, entity_id):
post = PostModel()
entity = post.get(entity_id)
form = PostForm(**entity)
if form.validate_on_submit():
post.update(entity_id, form.data)
return redirect(url_for("Post:get", entity_id=entity_id))
return render_template("post/post_form.html", form=form,
url="Post:edit", entity_id=entity_id)
|
from flask import render_template, redirect, url_for
from flask_classy import FlaskView, route
from ..models import PostModel
from ..forms import PostForm
class Post(FlaskView):
""" Here will handle post creations, delete and update."""
def get(self, entity_id):
post = PostModel()
post = post.get(entity_id)
return render_template("post/post.html", post=post)
@route("/new/", methods=["GET", "POST"])
def new(self):
form = PostForm()
if form.validate_on_submit():
post = PostModel(**form.data)
post.put()
return redirect(url_for("Post:get", entity_id=post.id))
return render_template("post/post_form.html", form=form,
url="Post:new")
@route("/edit/<entity_id>", methods=["GET", "POST"])
def edit(self, entity_id):
post = PostModel()
entity = post.get(entity_id)
form = PostForm(**entity)
if form.validate_on_submit():
post.update(entity_id, form.data)
return redirect(url_for("Post:get", entity_id=entity_id))
return render_template("post/post_form.html", form=form,
url="Post:edit", entity_id=entity_id)
Add login required to new and edit post viewsfrom flask import render_template, redirect, url_for
from flask_classy import FlaskView, route
from flask_user import login_required
from ..models import PostModel
from ..forms import PostForm
class Post(FlaskView):
""" Here will handle post creations, delete and update."""
def get(self, entity_id):
post = PostModel()
post = post.get(entity_id)
return render_template("post/post.html", post=post)
@login_required
@route("/new/", methods=["GET", "POST"])
def new(self):
form = PostForm()
if form.validate_on_submit():
post = PostModel(**form.data)
post.put()
return redirect(url_for("Post:get", entity_id=post.id))
return render_template("post/post_form.html", form=form,
url="Post:new")
@login_required
@route("/edit/<entity_id>", methods=["GET", "POST"])
def edit(self, entity_id):
post = PostModel()
entity = post.get(entity_id)
form = PostForm(**entity)
if form.validate_on_submit():
post.update(entity_id, form.data)
return redirect(url_for("Post:get", entity_id=entity_id))
return render_template("post/post_form.html", form=form,
url="Post:edit", entity_id=entity_id)
|
<commit_before>from flask import render_template, redirect, url_for
from flask_classy import FlaskView, route
from ..models import PostModel
from ..forms import PostForm
class Post(FlaskView):
""" Here will handle post creations, delete and update."""
def get(self, entity_id):
post = PostModel()
post = post.get(entity_id)
return render_template("post/post.html", post=post)
@route("/new/", methods=["GET", "POST"])
def new(self):
form = PostForm()
if form.validate_on_submit():
post = PostModel(**form.data)
post.put()
return redirect(url_for("Post:get", entity_id=post.id))
return render_template("post/post_form.html", form=form,
url="Post:new")
@route("/edit/<entity_id>", methods=["GET", "POST"])
def edit(self, entity_id):
post = PostModel()
entity = post.get(entity_id)
form = PostForm(**entity)
if form.validate_on_submit():
post.update(entity_id, form.data)
return redirect(url_for("Post:get", entity_id=entity_id))
return render_template("post/post_form.html", form=form,
url="Post:edit", entity_id=entity_id)
<commit_msg>Add login required to new and edit post views<commit_after>from flask import render_template, redirect, url_for
from flask_classy import FlaskView, route
from flask_user import login_required
from ..models import PostModel
from ..forms import PostForm
class Post(FlaskView):
""" Here will handle post creations, delete and update."""
def get(self, entity_id):
post = PostModel()
post = post.get(entity_id)
return render_template("post/post.html", post=post)
@login_required
@route("/new/", methods=["GET", "POST"])
def new(self):
form = PostForm()
if form.validate_on_submit():
post = PostModel(**form.data)
post.put()
return redirect(url_for("Post:get", entity_id=post.id))
return render_template("post/post_form.html", form=form,
url="Post:new")
@login_required
@route("/edit/<entity_id>", methods=["GET", "POST"])
def edit(self, entity_id):
post = PostModel()
entity = post.get(entity_id)
form = PostForm(**entity)
if form.validate_on_submit():
post.update(entity_id, form.data)
return redirect(url_for("Post:get", entity_id=entity_id))
return render_template("post/post_form.html", form=form,
url="Post:edit", entity_id=entity_id)
|
4a7f152e5feb9393ae548f239b2cbf2d8cee3c4e
|
modules/email.py
|
modules/email.py
|
# -*- coding: utf-8 -*-
from jinja2 import Template
import sender
from imapclient import IMAPClient
import socket
import logging
import time
class email:
def __init__(self, config):
self.logger = logging.getLogger('app_logger')
self.server = config['host']
self.port = config['port']
self.sender = config['sender']
self.password = config['password']
def send(self, name, ret, mailto, subject, content, message):
with open(content, 'r') as mail_config_file:
try:
body = mail_config_file.read()
except:
self.logger.error('Invalid configuration content file')
sys.exit(1)
text_content = Template(body).render(service=name, diagnostic=ret['message'])
text_subject = Template(subject).render(service=name)
try:
test_smtp = sender.Mail(self.server, port=self.port, username=self.sender, password=self.password, use_tls=True)
test_smtp.send_message(text_subject, to=mailto, fromaddr=self.sender, body=text_content)
except:
self.logger.error('Cannot send email {0}'.format(subject))
|
# -*- coding: utf-8 -*-
from jinja2 import Template
import sender
from imapclient import IMAPClient
import socket
import logging
import time
class email:
def __init__(self, config):
self.logger = logging.getLogger('app_logger')
self.server = config['host']
self.port = config['port']
self.sender = config['sender']
self.password = config['password']
def send(self, name, ret, mailto, subject, content, message):
with open(content, 'r') as mail_config_file:
try:
body = mail_config_file.read()
except:
self.logger.error('Invalid configuration content file')
sys.exit(1)
text_content = Template(body).render(service=name, diagnostic=ret['message'])
text_subject = Template(subject).render(service=name)
try:
test_smtp = sender.Mail(self.server, port=self.port, username=self.sender, password=self.password, use_tls=True)
test_smtp.send_message(text_subject, to=mailto, fromaddr=self.sender, body=text_content)
except:
self.logger.error('Cannot send email {0}'.format(text_subject))
|
Fix service name when we failed to send a mail.
|
Fix service name when we failed to send a mail.
This solves issue #3.
|
Python
|
apache-2.0
|
Lex-Persona/SupExt
|
# -*- coding: utf-8 -*-
from jinja2 import Template
import sender
from imapclient import IMAPClient
import socket
import logging
import time
class email:
def __init__(self, config):
self.logger = logging.getLogger('app_logger')
self.server = config['host']
self.port = config['port']
self.sender = config['sender']
self.password = config['password']
def send(self, name, ret, mailto, subject, content, message):
with open(content, 'r') as mail_config_file:
try:
body = mail_config_file.read()
except:
self.logger.error('Invalid configuration content file')
sys.exit(1)
text_content = Template(body).render(service=name, diagnostic=ret['message'])
text_subject = Template(subject).render(service=name)
try:
test_smtp = sender.Mail(self.server, port=self.port, username=self.sender, password=self.password, use_tls=True)
test_smtp.send_message(text_subject, to=mailto, fromaddr=self.sender, body=text_content)
except:
self.logger.error('Cannot send email {0}'.format(subject))
Fix service name when we failed to send a mail.
This solves issue #3.
|
# -*- coding: utf-8 -*-
from jinja2 import Template
import sender
from imapclient import IMAPClient
import socket
import logging
import time
class email:
def __init__(self, config):
self.logger = logging.getLogger('app_logger')
self.server = config['host']
self.port = config['port']
self.sender = config['sender']
self.password = config['password']
def send(self, name, ret, mailto, subject, content, message):
with open(content, 'r') as mail_config_file:
try:
body = mail_config_file.read()
except:
self.logger.error('Invalid configuration content file')
sys.exit(1)
text_content = Template(body).render(service=name, diagnostic=ret['message'])
text_subject = Template(subject).render(service=name)
try:
test_smtp = sender.Mail(self.server, port=self.port, username=self.sender, password=self.password, use_tls=True)
test_smtp.send_message(text_subject, to=mailto, fromaddr=self.sender, body=text_content)
except:
self.logger.error('Cannot send email {0}'.format(text_subject))
|
<commit_before># -*- coding: utf-8 -*-
from jinja2 import Template
import sender
from imapclient import IMAPClient
import socket
import logging
import time
class email:
def __init__(self, config):
self.logger = logging.getLogger('app_logger')
self.server = config['host']
self.port = config['port']
self.sender = config['sender']
self.password = config['password']
def send(self, name, ret, mailto, subject, content, message):
with open(content, 'r') as mail_config_file:
try:
body = mail_config_file.read()
except:
self.logger.error('Invalid configuration content file')
sys.exit(1)
text_content = Template(body).render(service=name, diagnostic=ret['message'])
text_subject = Template(subject).render(service=name)
try:
test_smtp = sender.Mail(self.server, port=self.port, username=self.sender, password=self.password, use_tls=True)
test_smtp.send_message(text_subject, to=mailto, fromaddr=self.sender, body=text_content)
except:
self.logger.error('Cannot send email {0}'.format(subject))
<commit_msg>Fix service name when we failed to send a mail.
This solves issue #3.<commit_after>
|
# -*- coding: utf-8 -*-
from jinja2 import Template
import sender
from imapclient import IMAPClient
import socket
import logging
import time
class email:
def __init__(self, config):
self.logger = logging.getLogger('app_logger')
self.server = config['host']
self.port = config['port']
self.sender = config['sender']
self.password = config['password']
def send(self, name, ret, mailto, subject, content, message):
with open(content, 'r') as mail_config_file:
try:
body = mail_config_file.read()
except:
self.logger.error('Invalid configuration content file')
sys.exit(1)
text_content = Template(body).render(service=name, diagnostic=ret['message'])
text_subject = Template(subject).render(service=name)
try:
test_smtp = sender.Mail(self.server, port=self.port, username=self.sender, password=self.password, use_tls=True)
test_smtp.send_message(text_subject, to=mailto, fromaddr=self.sender, body=text_content)
except:
self.logger.error('Cannot send email {0}'.format(text_subject))
|
# -*- coding: utf-8 -*-
from jinja2 import Template
import sender
from imapclient import IMAPClient
import socket
import logging
import time
class email:
def __init__(self, config):
self.logger = logging.getLogger('app_logger')
self.server = config['host']
self.port = config['port']
self.sender = config['sender']
self.password = config['password']
def send(self, name, ret, mailto, subject, content, message):
with open(content, 'r') as mail_config_file:
try:
body = mail_config_file.read()
except:
self.logger.error('Invalid configuration content file')
sys.exit(1)
text_content = Template(body).render(service=name, diagnostic=ret['message'])
text_subject = Template(subject).render(service=name)
try:
test_smtp = sender.Mail(self.server, port=self.port, username=self.sender, password=self.password, use_tls=True)
test_smtp.send_message(text_subject, to=mailto, fromaddr=self.sender, body=text_content)
except:
self.logger.error('Cannot send email {0}'.format(subject))
Fix service name when we failed to send a mail.
This solves issue #3.# -*- coding: utf-8 -*-
from jinja2 import Template
import sender
from imapclient import IMAPClient
import socket
import logging
import time
class email:
def __init__(self, config):
self.logger = logging.getLogger('app_logger')
self.server = config['host']
self.port = config['port']
self.sender = config['sender']
self.password = config['password']
def send(self, name, ret, mailto, subject, content, message):
with open(content, 'r') as mail_config_file:
try:
body = mail_config_file.read()
except:
self.logger.error('Invalid configuration content file')
sys.exit(1)
text_content = Template(body).render(service=name, diagnostic=ret['message'])
text_subject = Template(subject).render(service=name)
try:
test_smtp = sender.Mail(self.server, port=self.port, username=self.sender, password=self.password, use_tls=True)
test_smtp.send_message(text_subject, to=mailto, fromaddr=self.sender, body=text_content)
except:
self.logger.error('Cannot send email {0}'.format(text_subject))
|
<commit_before># -*- coding: utf-8 -*-
from jinja2 import Template
import sender
from imapclient import IMAPClient
import socket
import logging
import time
class email:
def __init__(self, config):
self.logger = logging.getLogger('app_logger')
self.server = config['host']
self.port = config['port']
self.sender = config['sender']
self.password = config['password']
def send(self, name, ret, mailto, subject, content, message):
with open(content, 'r') as mail_config_file:
try:
body = mail_config_file.read()
except:
self.logger.error('Invalid configuration content file')
sys.exit(1)
text_content = Template(body).render(service=name, diagnostic=ret['message'])
text_subject = Template(subject).render(service=name)
try:
test_smtp = sender.Mail(self.server, port=self.port, username=self.sender, password=self.password, use_tls=True)
test_smtp.send_message(text_subject, to=mailto, fromaddr=self.sender, body=text_content)
except:
self.logger.error('Cannot send email {0}'.format(subject))
<commit_msg>Fix service name when we failed to send a mail.
This solves issue #3.<commit_after># -*- coding: utf-8 -*-
from jinja2 import Template
import sender
from imapclient import IMAPClient
import socket
import logging
import time
class email:
def __init__(self, config):
self.logger = logging.getLogger('app_logger')
self.server = config['host']
self.port = config['port']
self.sender = config['sender']
self.password = config['password']
def send(self, name, ret, mailto, subject, content, message):
with open(content, 'r') as mail_config_file:
try:
body = mail_config_file.read()
except:
self.logger.error('Invalid configuration content file')
sys.exit(1)
text_content = Template(body).render(service=name, diagnostic=ret['message'])
text_subject = Template(subject).render(service=name)
try:
test_smtp = sender.Mail(self.server, port=self.port, username=self.sender, password=self.password, use_tls=True)
test_smtp.send_message(text_subject, to=mailto, fromaddr=self.sender, body=text_content)
except:
self.logger.error('Cannot send email {0}'.format(text_subject))
|
50be7fe6acf0c79af0263b4f1bd60629ecbde832
|
froide/routing.py
|
froide/routing.py
|
from django.urls import path
from channels.routing import ProtocolTypeRouter, URLRouter
from channels.auth import AuthMiddlewareStack
from channels.security.websocket import AllowedHostsOriginValidator
from froide.problem.consumers import ModerationConsumer
websocket_urls = [
path('moderation/', ModerationConsumer)
]
application = ProtocolTypeRouter({
'websocket': AllowedHostsOriginValidator(
AuthMiddlewareStack(
URLRouter([
path('ws/', URLRouter(websocket_urls))
])
)
),
})
|
from django.urls import path
from channels.routing import ProtocolTypeRouter, URLRouter
from channels.auth import AuthMiddlewareStack
from channels.security.websocket import AllowedHostsOriginValidator
from froide.problem.consumers import ModerationConsumer
websocket_urls = [
path('moderation/', ModerationConsumer.as_asgi())
]
application = ProtocolTypeRouter({
'websocket': AllowedHostsOriginValidator(
AuthMiddlewareStack(
URLRouter([
path('ws/', URLRouter(websocket_urls))
])
)
),
})
|
Fix ws consumers for channels 3.x
|
Fix ws consumers for channels 3.x
|
Python
|
mit
|
fin/froide,fin/froide,fin/froide,fin/froide
|
from django.urls import path
from channels.routing import ProtocolTypeRouter, URLRouter
from channels.auth import AuthMiddlewareStack
from channels.security.websocket import AllowedHostsOriginValidator
from froide.problem.consumers import ModerationConsumer
websocket_urls = [
path('moderation/', ModerationConsumer)
]
application = ProtocolTypeRouter({
'websocket': AllowedHostsOriginValidator(
AuthMiddlewareStack(
URLRouter([
path('ws/', URLRouter(websocket_urls))
])
)
),
})
Fix ws consumers for channels 3.x
|
from django.urls import path
from channels.routing import ProtocolTypeRouter, URLRouter
from channels.auth import AuthMiddlewareStack
from channels.security.websocket import AllowedHostsOriginValidator
from froide.problem.consumers import ModerationConsumer
websocket_urls = [
path('moderation/', ModerationConsumer.as_asgi())
]
application = ProtocolTypeRouter({
'websocket': AllowedHostsOriginValidator(
AuthMiddlewareStack(
URLRouter([
path('ws/', URLRouter(websocket_urls))
])
)
),
})
|
<commit_before>from django.urls import path
from channels.routing import ProtocolTypeRouter, URLRouter
from channels.auth import AuthMiddlewareStack
from channels.security.websocket import AllowedHostsOriginValidator
from froide.problem.consumers import ModerationConsumer
websocket_urls = [
path('moderation/', ModerationConsumer)
]
application = ProtocolTypeRouter({
'websocket': AllowedHostsOriginValidator(
AuthMiddlewareStack(
URLRouter([
path('ws/', URLRouter(websocket_urls))
])
)
),
})
<commit_msg>Fix ws consumers for channels 3.x<commit_after>
|
from django.urls import path
from channels.routing import ProtocolTypeRouter, URLRouter
from channels.auth import AuthMiddlewareStack
from channels.security.websocket import AllowedHostsOriginValidator
from froide.problem.consumers import ModerationConsumer
websocket_urls = [
path('moderation/', ModerationConsumer.as_asgi())
]
application = ProtocolTypeRouter({
'websocket': AllowedHostsOriginValidator(
AuthMiddlewareStack(
URLRouter([
path('ws/', URLRouter(websocket_urls))
])
)
),
})
|
from django.urls import path
from channels.routing import ProtocolTypeRouter, URLRouter
from channels.auth import AuthMiddlewareStack
from channels.security.websocket import AllowedHostsOriginValidator
from froide.problem.consumers import ModerationConsumer
websocket_urls = [
path('moderation/', ModerationConsumer)
]
application = ProtocolTypeRouter({
'websocket': AllowedHostsOriginValidator(
AuthMiddlewareStack(
URLRouter([
path('ws/', URLRouter(websocket_urls))
])
)
),
})
Fix ws consumers for channels 3.xfrom django.urls import path
from channels.routing import ProtocolTypeRouter, URLRouter
from channels.auth import AuthMiddlewareStack
from channels.security.websocket import AllowedHostsOriginValidator
from froide.problem.consumers import ModerationConsumer
websocket_urls = [
path('moderation/', ModerationConsumer.as_asgi())
]
application = ProtocolTypeRouter({
'websocket': AllowedHostsOriginValidator(
AuthMiddlewareStack(
URLRouter([
path('ws/', URLRouter(websocket_urls))
])
)
),
})
|
<commit_before>from django.urls import path
from channels.routing import ProtocolTypeRouter, URLRouter
from channels.auth import AuthMiddlewareStack
from channels.security.websocket import AllowedHostsOriginValidator
from froide.problem.consumers import ModerationConsumer
websocket_urls = [
path('moderation/', ModerationConsumer)
]
application = ProtocolTypeRouter({
'websocket': AllowedHostsOriginValidator(
AuthMiddlewareStack(
URLRouter([
path('ws/', URLRouter(websocket_urls))
])
)
),
})
<commit_msg>Fix ws consumers for channels 3.x<commit_after>from django.urls import path
from channels.routing import ProtocolTypeRouter, URLRouter
from channels.auth import AuthMiddlewareStack
from channels.security.websocket import AllowedHostsOriginValidator
from froide.problem.consumers import ModerationConsumer
websocket_urls = [
path('moderation/', ModerationConsumer.as_asgi())
]
application = ProtocolTypeRouter({
'websocket': AllowedHostsOriginValidator(
AuthMiddlewareStack(
URLRouter([
path('ws/', URLRouter(websocket_urls))
])
)
),
})
|
c559c639f7c3deea4e166dd2f6fee1cb8a1297b7
|
tests/integration/test_metrics.py
|
tests/integration/test_metrics.py
|
from kaneda import Metrics
class TestMetrics(object):
def test_elasticsearch_gauge(self, elasticsearch_backend):
value = 42
metrics = Metrics(backend=elasticsearch_backend)
metrics.gauge('test_gauge', value)
result = elasticsearch_backend.client.search(index=elasticsearch_backend._get_index_name(), doc_type='gauge')
assert result
assert result['hits']['hits'][0]['_source']['value'] == value
assert result['hits']['hits'][0]['_source']['name'] == 'test_gauge'
def test_mongo_gauge(self, mongo_backend):
value = 42
metrics = Metrics(backend=mongo_backend)
metrics.gauge('test_gauge', value)
result = mongo_backend.collection.find_one({"metric": 'gauge'})
assert result
assert result['value'] == value
assert result['name'] == 'test_gauge'
def test_logger_gauge(self, logger_backend, logger_filename):
value = 42
metrics = Metrics(backend=logger_backend)
metrics.gauge('test_gauge', value)
with open(logger_filename) as f:
lines = f.readlines()
assert lines
result = lines[-1].split(' - ')[2]
assert result
assert str(value) in result
assert 'test_gauge' in result
|
from kaneda import Metrics
class TestMetrics(object):
def test_elasticsearch_metric(self, elasticsearch_backend):
metrics = Metrics(backend=elasticsearch_backend)
result = metrics.gauge('test_gauge', 42)
assert result
assert result['_id']
def test_mongo_metric(self, mongo_backend):
metrics = Metrics(backend=mongo_backend)
result = metrics.gauge('test_gauge', 42)
assert result
assert result.inserted_id
def test_logger_metric(self, logger_backend, logger_filename):
metrics = Metrics(backend=logger_backend)
metrics.gauge('test_gauge', 42)
with open(logger_filename) as f:
lines = f.readlines()
assert lines
result = lines[-1].split(' - ')[2]
assert result
assert 'test_gauge' in result
|
Change integration test of metrics
|
Change integration test of metrics
|
Python
|
mit
|
APSL/kaneda
|
from kaneda import Metrics
class TestMetrics(object):
def test_elasticsearch_gauge(self, elasticsearch_backend):
value = 42
metrics = Metrics(backend=elasticsearch_backend)
metrics.gauge('test_gauge', value)
result = elasticsearch_backend.client.search(index=elasticsearch_backend._get_index_name(), doc_type='gauge')
assert result
assert result['hits']['hits'][0]['_source']['value'] == value
assert result['hits']['hits'][0]['_source']['name'] == 'test_gauge'
def test_mongo_gauge(self, mongo_backend):
value = 42
metrics = Metrics(backend=mongo_backend)
metrics.gauge('test_gauge', value)
result = mongo_backend.collection.find_one({"metric": 'gauge'})
assert result
assert result['value'] == value
assert result['name'] == 'test_gauge'
def test_logger_gauge(self, logger_backend, logger_filename):
value = 42
metrics = Metrics(backend=logger_backend)
metrics.gauge('test_gauge', value)
with open(logger_filename) as f:
lines = f.readlines()
assert lines
result = lines[-1].split(' - ')[2]
assert result
assert str(value) in result
assert 'test_gauge' in result
Change integration test of metrics
|
from kaneda import Metrics
class TestMetrics(object):
def test_elasticsearch_metric(self, elasticsearch_backend):
metrics = Metrics(backend=elasticsearch_backend)
result = metrics.gauge('test_gauge', 42)
assert result
assert result['_id']
def test_mongo_metric(self, mongo_backend):
metrics = Metrics(backend=mongo_backend)
result = metrics.gauge('test_gauge', 42)
assert result
assert result.inserted_id
def test_logger_metric(self, logger_backend, logger_filename):
metrics = Metrics(backend=logger_backend)
metrics.gauge('test_gauge', 42)
with open(logger_filename) as f:
lines = f.readlines()
assert lines
result = lines[-1].split(' - ')[2]
assert result
assert 'test_gauge' in result
|
<commit_before>from kaneda import Metrics
class TestMetrics(object):
def test_elasticsearch_gauge(self, elasticsearch_backend):
value = 42
metrics = Metrics(backend=elasticsearch_backend)
metrics.gauge('test_gauge', value)
result = elasticsearch_backend.client.search(index=elasticsearch_backend._get_index_name(), doc_type='gauge')
assert result
assert result['hits']['hits'][0]['_source']['value'] == value
assert result['hits']['hits'][0]['_source']['name'] == 'test_gauge'
def test_mongo_gauge(self, mongo_backend):
value = 42
metrics = Metrics(backend=mongo_backend)
metrics.gauge('test_gauge', value)
result = mongo_backend.collection.find_one({"metric": 'gauge'})
assert result
assert result['value'] == value
assert result['name'] == 'test_gauge'
def test_logger_gauge(self, logger_backend, logger_filename):
value = 42
metrics = Metrics(backend=logger_backend)
metrics.gauge('test_gauge', value)
with open(logger_filename) as f:
lines = f.readlines()
assert lines
result = lines[-1].split(' - ')[2]
assert result
assert str(value) in result
assert 'test_gauge' in result
<commit_msg>Change integration test of metrics<commit_after>
|
from kaneda import Metrics
class TestMetrics(object):
def test_elasticsearch_metric(self, elasticsearch_backend):
metrics = Metrics(backend=elasticsearch_backend)
result = metrics.gauge('test_gauge', 42)
assert result
assert result['_id']
def test_mongo_metric(self, mongo_backend):
metrics = Metrics(backend=mongo_backend)
result = metrics.gauge('test_gauge', 42)
assert result
assert result.inserted_id
def test_logger_metric(self, logger_backend, logger_filename):
metrics = Metrics(backend=logger_backend)
metrics.gauge('test_gauge', 42)
with open(logger_filename) as f:
lines = f.readlines()
assert lines
result = lines[-1].split(' - ')[2]
assert result
assert 'test_gauge' in result
|
from kaneda import Metrics
class TestMetrics(object):
def test_elasticsearch_gauge(self, elasticsearch_backend):
value = 42
metrics = Metrics(backend=elasticsearch_backend)
metrics.gauge('test_gauge', value)
result = elasticsearch_backend.client.search(index=elasticsearch_backend._get_index_name(), doc_type='gauge')
assert result
assert result['hits']['hits'][0]['_source']['value'] == value
assert result['hits']['hits'][0]['_source']['name'] == 'test_gauge'
def test_mongo_gauge(self, mongo_backend):
value = 42
metrics = Metrics(backend=mongo_backend)
metrics.gauge('test_gauge', value)
result = mongo_backend.collection.find_one({"metric": 'gauge'})
assert result
assert result['value'] == value
assert result['name'] == 'test_gauge'
def test_logger_gauge(self, logger_backend, logger_filename):
value = 42
metrics = Metrics(backend=logger_backend)
metrics.gauge('test_gauge', value)
with open(logger_filename) as f:
lines = f.readlines()
assert lines
result = lines[-1].split(' - ')[2]
assert result
assert str(value) in result
assert 'test_gauge' in result
Change integration test of metricsfrom kaneda import Metrics
class TestMetrics(object):
def test_elasticsearch_metric(self, elasticsearch_backend):
metrics = Metrics(backend=elasticsearch_backend)
result = metrics.gauge('test_gauge', 42)
assert result
assert result['_id']
def test_mongo_metric(self, mongo_backend):
metrics = Metrics(backend=mongo_backend)
result = metrics.gauge('test_gauge', 42)
assert result
assert result.inserted_id
def test_logger_metric(self, logger_backend, logger_filename):
metrics = Metrics(backend=logger_backend)
metrics.gauge('test_gauge', 42)
with open(logger_filename) as f:
lines = f.readlines()
assert lines
result = lines[-1].split(' - ')[2]
assert result
assert 'test_gauge' in result
|
<commit_before>from kaneda import Metrics
class TestMetrics(object):
def test_elasticsearch_gauge(self, elasticsearch_backend):
value = 42
metrics = Metrics(backend=elasticsearch_backend)
metrics.gauge('test_gauge', value)
result = elasticsearch_backend.client.search(index=elasticsearch_backend._get_index_name(), doc_type='gauge')
assert result
assert result['hits']['hits'][0]['_source']['value'] == value
assert result['hits']['hits'][0]['_source']['name'] == 'test_gauge'
def test_mongo_gauge(self, mongo_backend):
value = 42
metrics = Metrics(backend=mongo_backend)
metrics.gauge('test_gauge', value)
result = mongo_backend.collection.find_one({"metric": 'gauge'})
assert result
assert result['value'] == value
assert result['name'] == 'test_gauge'
def test_logger_gauge(self, logger_backend, logger_filename):
value = 42
metrics = Metrics(backend=logger_backend)
metrics.gauge('test_gauge', value)
with open(logger_filename) as f:
lines = f.readlines()
assert lines
result = lines[-1].split(' - ')[2]
assert result
assert str(value) in result
assert 'test_gauge' in result
<commit_msg>Change integration test of metrics<commit_after>from kaneda import Metrics
class TestMetrics(object):
def test_elasticsearch_metric(self, elasticsearch_backend):
metrics = Metrics(backend=elasticsearch_backend)
result = metrics.gauge('test_gauge', 42)
assert result
assert result['_id']
def test_mongo_metric(self, mongo_backend):
metrics = Metrics(backend=mongo_backend)
result = metrics.gauge('test_gauge', 42)
assert result
assert result.inserted_id
def test_logger_metric(self, logger_backend, logger_filename):
metrics = Metrics(backend=logger_backend)
metrics.gauge('test_gauge', 42)
with open(logger_filename) as f:
lines = f.readlines()
assert lines
result = lines[-1].split(' - ')[2]
assert result
assert 'test_gauge' in result
|
8d4ea7037e74574d98dc376b7413e36b3fd6aa8e
|
knights/loader.py
|
knights/loader.py
|
import os.path
from .compiler import kompile
PATHS = []
class TemplateNotFound(Exception):
pass
def add_path(path):
path = os.path.abspath(path)
if path not in PATHS:
PATHS.append(path)
def load_template(name, paths=None, raw=False):
if paths is None:
paths = PATHS[:]
for path in paths:
full_name = os.path.abspath(os.path.join(path, name))
if not full_name.startswith(path):
continue
try:
with open(full_name, encoding='utf-8') as fin:
src = fin.read()
return kompile(src, raw=raw, filename=name)
except FileNotFoundError:
pass
else:
raise TemplateNotFound(name)
|
import os.path
from .compiler import kompile
PATHS = []
class TemplateNotFound(Exception):
pass
def add_path(path):
path = os.path.abspath(path)
if path not in PATHS:
PATHS.append(path)
def load_template(name, paths=None, raw=False):
if paths is None:
paths = PATHS[:]
for path in paths:
full_name = os.path.abspath(os.path.join(path, name))
if not full_name.startswith(path):
continue
try:
with open(full_name, encoding='utf-8') as fin:
src = fin.read()
return kompile(src, raw=raw, filename=full_name)
except FileNotFoundError:
pass
else:
raise TemplateNotFound(name)
|
Use full path of template as filename when compiling
|
Use full path of template as filename when compiling
|
Python
|
mit
|
funkybob/knights-templater,funkybob/knights-templater
|
import os.path
from .compiler import kompile
PATHS = []
class TemplateNotFound(Exception):
pass
def add_path(path):
path = os.path.abspath(path)
if path not in PATHS:
PATHS.append(path)
def load_template(name, paths=None, raw=False):
if paths is None:
paths = PATHS[:]
for path in paths:
full_name = os.path.abspath(os.path.join(path, name))
if not full_name.startswith(path):
continue
try:
with open(full_name, encoding='utf-8') as fin:
src = fin.read()
return kompile(src, raw=raw, filename=name)
except FileNotFoundError:
pass
else:
raise TemplateNotFound(name)
Use full path of template as filename when compiling
|
import os.path
from .compiler import kompile
PATHS = []
class TemplateNotFound(Exception):
pass
def add_path(path):
path = os.path.abspath(path)
if path not in PATHS:
PATHS.append(path)
def load_template(name, paths=None, raw=False):
if paths is None:
paths = PATHS[:]
for path in paths:
full_name = os.path.abspath(os.path.join(path, name))
if not full_name.startswith(path):
continue
try:
with open(full_name, encoding='utf-8') as fin:
src = fin.read()
return kompile(src, raw=raw, filename=full_name)
except FileNotFoundError:
pass
else:
raise TemplateNotFound(name)
|
<commit_before>
import os.path
from .compiler import kompile
PATHS = []
class TemplateNotFound(Exception):
pass
def add_path(path):
path = os.path.abspath(path)
if path not in PATHS:
PATHS.append(path)
def load_template(name, paths=None, raw=False):
if paths is None:
paths = PATHS[:]
for path in paths:
full_name = os.path.abspath(os.path.join(path, name))
if not full_name.startswith(path):
continue
try:
with open(full_name, encoding='utf-8') as fin:
src = fin.read()
return kompile(src, raw=raw, filename=name)
except FileNotFoundError:
pass
else:
raise TemplateNotFound(name)
<commit_msg>Use full path of template as filename when compiling<commit_after>
|
import os.path
from .compiler import kompile
PATHS = []
class TemplateNotFound(Exception):
pass
def add_path(path):
path = os.path.abspath(path)
if path not in PATHS:
PATHS.append(path)
def load_template(name, paths=None, raw=False):
if paths is None:
paths = PATHS[:]
for path in paths:
full_name = os.path.abspath(os.path.join(path, name))
if not full_name.startswith(path):
continue
try:
with open(full_name, encoding='utf-8') as fin:
src = fin.read()
return kompile(src, raw=raw, filename=full_name)
except FileNotFoundError:
pass
else:
raise TemplateNotFound(name)
|
import os.path
from .compiler import kompile
PATHS = []
class TemplateNotFound(Exception):
pass
def add_path(path):
path = os.path.abspath(path)
if path not in PATHS:
PATHS.append(path)
def load_template(name, paths=None, raw=False):
if paths is None:
paths = PATHS[:]
for path in paths:
full_name = os.path.abspath(os.path.join(path, name))
if not full_name.startswith(path):
continue
try:
with open(full_name, encoding='utf-8') as fin:
src = fin.read()
return kompile(src, raw=raw, filename=name)
except FileNotFoundError:
pass
else:
raise TemplateNotFound(name)
Use full path of template as filename when compiling
import os.path
from .compiler import kompile
PATHS = []
class TemplateNotFound(Exception):
pass
def add_path(path):
path = os.path.abspath(path)
if path not in PATHS:
PATHS.append(path)
def load_template(name, paths=None, raw=False):
if paths is None:
paths = PATHS[:]
for path in paths:
full_name = os.path.abspath(os.path.join(path, name))
if not full_name.startswith(path):
continue
try:
with open(full_name, encoding='utf-8') as fin:
src = fin.read()
return kompile(src, raw=raw, filename=full_name)
except FileNotFoundError:
pass
else:
raise TemplateNotFound(name)
|
<commit_before>
import os.path
from .compiler import kompile
PATHS = []
class TemplateNotFound(Exception):
pass
def add_path(path):
path = os.path.abspath(path)
if path not in PATHS:
PATHS.append(path)
def load_template(name, paths=None, raw=False):
if paths is None:
paths = PATHS[:]
for path in paths:
full_name = os.path.abspath(os.path.join(path, name))
if not full_name.startswith(path):
continue
try:
with open(full_name, encoding='utf-8') as fin:
src = fin.read()
return kompile(src, raw=raw, filename=name)
except FileNotFoundError:
pass
else:
raise TemplateNotFound(name)
<commit_msg>Use full path of template as filename when compiling<commit_after>
import os.path
from .compiler import kompile
PATHS = []
class TemplateNotFound(Exception):
pass
def add_path(path):
path = os.path.abspath(path)
if path not in PATHS:
PATHS.append(path)
def load_template(name, paths=None, raw=False):
if paths is None:
paths = PATHS[:]
for path in paths:
full_name = os.path.abspath(os.path.join(path, name))
if not full_name.startswith(path):
continue
try:
with open(full_name, encoding='utf-8') as fin:
src = fin.read()
return kompile(src, raw=raw, filename=full_name)
except FileNotFoundError:
pass
else:
raise TemplateNotFound(name)
|
1ae2cc1c9b36c323f05c210812e383eb09bb6c7f
|
src/model/predict_rf_model.py
|
src/model/predict_rf_model.py
|
import numpy as np
import pandas as pd
import sys
import os
from sklearn.externals import joblib
from sklearn.ensemble import RandomForestClassifier
scriptpath = os.path.dirname(os.path.realpath(sys.argv[0])) + '/../'
sys.path.append(os.path.abspath(scriptpath))
import utils
parameter_str = '_'.join(['top', str(utils.k), 'cw', str(utils.click_weight), 'year', utils.train_year])
cforest = joblib.load(utils.model_path + 'rf_all_without_time_' + parameter_str +'.pkl')
test = joblib.load(utils.processed_data_path + 'test_all_' + parameter_str +'.pkl')
#X_test = test.ix[:,1:]
X_test = test.ix[:9,1:]
X_test.fillna(-1, inplace=True)
print "predict RandomForest Classifier..."
probs = cforest.predict_proba(X_test)
sorted_index = np.argsort(-np.array(probs))[:,:5]
result = pd.DataFrame(columns = {'hotel_cluster'})
result['hotel_cluster'] = np.array([np.array_str(sorted_index[i])[1:-1] for i in range(sorted_index.shape[0])])
result.hotel_cluster.to_csv(utils.model_path +
'results/submission_rf_all_without_time_' + parameter_str + '.csv', header=True, index_label='id')
|
import numpy as np
import pandas as pd
import sys
import os
from sklearn.externals import joblib
from sklearn.ensemble import RandomForestClassifier
scriptpath = os.path.dirname(os.path.realpath(sys.argv[0])) + '/../'
sys.path.append(os.path.abspath(scriptpath))
import utils
parameter_str = '_'.join(['top', str(utils.k), 'cw', str(utils.click_weight), 'year', utils.train_year])
cforest = joblib.load(utils.model_path + 'rf_all_without_time_' + parameter_str +'.pkl')
test = joblib.load(utils.processed_data_path + 'test_all_' + parameter_str +'.pkl')
X_test = test.ix[:,1:]
X_test.fillna(-1, inplace=True)
print "predict RandomForest Classifier..."
probs = cforest.predict_proba(X_test)
sorted_index = np.argsort(-np.array(probs))[:,:5]
result = pd.DataFrame(columns = {'hotel_cluster'})
result['hotel_cluster'] = np.array([np.array_str(sorted_index[i])[1:-1] for i in range(sorted_index.shape[0])])
result.hotel_cluster.to_csv(utils.model_path +
'results/submission_rf_all_without_time_' + parameter_str + '.csv', header=True, index_label='id')
|
Update to use all the test
|
Update to use all the test
|
Python
|
bsd-3-clause
|
parkerzf/kaggle-expedia,parkerzf/kaggle-expedia,parkerzf/kaggle-expedia
|
import numpy as np
import pandas as pd
import sys
import os
from sklearn.externals import joblib
from sklearn.ensemble import RandomForestClassifier
scriptpath = os.path.dirname(os.path.realpath(sys.argv[0])) + '/../'
sys.path.append(os.path.abspath(scriptpath))
import utils
parameter_str = '_'.join(['top', str(utils.k), 'cw', str(utils.click_weight), 'year', utils.train_year])
cforest = joblib.load(utils.model_path + 'rf_all_without_time_' + parameter_str +'.pkl')
test = joblib.load(utils.processed_data_path + 'test_all_' + parameter_str +'.pkl')
#X_test = test.ix[:,1:]
X_test = test.ix[:9,1:]
X_test.fillna(-1, inplace=True)
print "predict RandomForest Classifier..."
probs = cforest.predict_proba(X_test)
sorted_index = np.argsort(-np.array(probs))[:,:5]
result = pd.DataFrame(columns = {'hotel_cluster'})
result['hotel_cluster'] = np.array([np.array_str(sorted_index[i])[1:-1] for i in range(sorted_index.shape[0])])
result.hotel_cluster.to_csv(utils.model_path +
'results/submission_rf_all_without_time_' + parameter_str + '.csv', header=True, index_label='id')
Update to use all the test
|
import numpy as np
import pandas as pd
import sys
import os
from sklearn.externals import joblib
from sklearn.ensemble import RandomForestClassifier
scriptpath = os.path.dirname(os.path.realpath(sys.argv[0])) + '/../'
sys.path.append(os.path.abspath(scriptpath))
import utils
parameter_str = '_'.join(['top', str(utils.k), 'cw', str(utils.click_weight), 'year', utils.train_year])
cforest = joblib.load(utils.model_path + 'rf_all_without_time_' + parameter_str +'.pkl')
test = joblib.load(utils.processed_data_path + 'test_all_' + parameter_str +'.pkl')
X_test = test.ix[:,1:]
X_test.fillna(-1, inplace=True)
print "predict RandomForest Classifier..."
probs = cforest.predict_proba(X_test)
sorted_index = np.argsort(-np.array(probs))[:,:5]
result = pd.DataFrame(columns = {'hotel_cluster'})
result['hotel_cluster'] = np.array([np.array_str(sorted_index[i])[1:-1] for i in range(sorted_index.shape[0])])
result.hotel_cluster.to_csv(utils.model_path +
'results/submission_rf_all_without_time_' + parameter_str + '.csv', header=True, index_label='id')
|
<commit_before>import numpy as np
import pandas as pd
import sys
import os
from sklearn.externals import joblib
from sklearn.ensemble import RandomForestClassifier
scriptpath = os.path.dirname(os.path.realpath(sys.argv[0])) + '/../'
sys.path.append(os.path.abspath(scriptpath))
import utils
parameter_str = '_'.join(['top', str(utils.k), 'cw', str(utils.click_weight), 'year', utils.train_year])
cforest = joblib.load(utils.model_path + 'rf_all_without_time_' + parameter_str +'.pkl')
test = joblib.load(utils.processed_data_path + 'test_all_' + parameter_str +'.pkl')
#X_test = test.ix[:,1:]
X_test = test.ix[:9,1:]
X_test.fillna(-1, inplace=True)
print "predict RandomForest Classifier..."
probs = cforest.predict_proba(X_test)
sorted_index = np.argsort(-np.array(probs))[:,:5]
result = pd.DataFrame(columns = {'hotel_cluster'})
result['hotel_cluster'] = np.array([np.array_str(sorted_index[i])[1:-1] for i in range(sorted_index.shape[0])])
result.hotel_cluster.to_csv(utils.model_path +
'results/submission_rf_all_without_time_' + parameter_str + '.csv', header=True, index_label='id')
<commit_msg>Update to use all the test<commit_after>
|
import numpy as np
import pandas as pd
import sys
import os
from sklearn.externals import joblib
from sklearn.ensemble import RandomForestClassifier
scriptpath = os.path.dirname(os.path.realpath(sys.argv[0])) + '/../'
sys.path.append(os.path.abspath(scriptpath))
import utils
parameter_str = '_'.join(['top', str(utils.k), 'cw', str(utils.click_weight), 'year', utils.train_year])
cforest = joblib.load(utils.model_path + 'rf_all_without_time_' + parameter_str +'.pkl')
test = joblib.load(utils.processed_data_path + 'test_all_' + parameter_str +'.pkl')
X_test = test.ix[:,1:]
X_test.fillna(-1, inplace=True)
print "predict RandomForest Classifier..."
probs = cforest.predict_proba(X_test)
sorted_index = np.argsort(-np.array(probs))[:,:5]
result = pd.DataFrame(columns = {'hotel_cluster'})
result['hotel_cluster'] = np.array([np.array_str(sorted_index[i])[1:-1] for i in range(sorted_index.shape[0])])
result.hotel_cluster.to_csv(utils.model_path +
'results/submission_rf_all_without_time_' + parameter_str + '.csv', header=True, index_label='id')
|
import numpy as np
import pandas as pd
import sys
import os
from sklearn.externals import joblib
from sklearn.ensemble import RandomForestClassifier
scriptpath = os.path.dirname(os.path.realpath(sys.argv[0])) + '/../'
sys.path.append(os.path.abspath(scriptpath))
import utils
parameter_str = '_'.join(['top', str(utils.k), 'cw', str(utils.click_weight), 'year', utils.train_year])
cforest = joblib.load(utils.model_path + 'rf_all_without_time_' + parameter_str +'.pkl')
test = joblib.load(utils.processed_data_path + 'test_all_' + parameter_str +'.pkl')
#X_test = test.ix[:,1:]
X_test = test.ix[:9,1:]
X_test.fillna(-1, inplace=True)
print "predict RandomForest Classifier..."
probs = cforest.predict_proba(X_test)
sorted_index = np.argsort(-np.array(probs))[:,:5]
result = pd.DataFrame(columns = {'hotel_cluster'})
result['hotel_cluster'] = np.array([np.array_str(sorted_index[i])[1:-1] for i in range(sorted_index.shape[0])])
result.hotel_cluster.to_csv(utils.model_path +
'results/submission_rf_all_without_time_' + parameter_str + '.csv', header=True, index_label='id')
Update to use all the testimport numpy as np
import pandas as pd
import sys
import os
from sklearn.externals import joblib
from sklearn.ensemble import RandomForestClassifier
scriptpath = os.path.dirname(os.path.realpath(sys.argv[0])) + '/../'
sys.path.append(os.path.abspath(scriptpath))
import utils
parameter_str = '_'.join(['top', str(utils.k), 'cw', str(utils.click_weight), 'year', utils.train_year])
cforest = joblib.load(utils.model_path + 'rf_all_without_time_' + parameter_str +'.pkl')
test = joblib.load(utils.processed_data_path + 'test_all_' + parameter_str +'.pkl')
X_test = test.ix[:,1:]
X_test.fillna(-1, inplace=True)
print "predict RandomForest Classifier..."
probs = cforest.predict_proba(X_test)
sorted_index = np.argsort(-np.array(probs))[:,:5]
result = pd.DataFrame(columns = {'hotel_cluster'})
result['hotel_cluster'] = np.array([np.array_str(sorted_index[i])[1:-1] for i in range(sorted_index.shape[0])])
result.hotel_cluster.to_csv(utils.model_path +
'results/submission_rf_all_without_time_' + parameter_str + '.csv', header=True, index_label='id')
|
<commit_before>import numpy as np
import pandas as pd
import sys
import os
from sklearn.externals import joblib
from sklearn.ensemble import RandomForestClassifier
scriptpath = os.path.dirname(os.path.realpath(sys.argv[0])) + '/../'
sys.path.append(os.path.abspath(scriptpath))
import utils
parameter_str = '_'.join(['top', str(utils.k), 'cw', str(utils.click_weight), 'year', utils.train_year])
cforest = joblib.load(utils.model_path + 'rf_all_without_time_' + parameter_str +'.pkl')
test = joblib.load(utils.processed_data_path + 'test_all_' + parameter_str +'.pkl')
#X_test = test.ix[:,1:]
X_test = test.ix[:9,1:]
X_test.fillna(-1, inplace=True)
print "predict RandomForest Classifier..."
probs = cforest.predict_proba(X_test)
sorted_index = np.argsort(-np.array(probs))[:,:5]
result = pd.DataFrame(columns = {'hotel_cluster'})
result['hotel_cluster'] = np.array([np.array_str(sorted_index[i])[1:-1] for i in range(sorted_index.shape[0])])
result.hotel_cluster.to_csv(utils.model_path +
'results/submission_rf_all_without_time_' + parameter_str + '.csv', header=True, index_label='id')
<commit_msg>Update to use all the test<commit_after>import numpy as np
import pandas as pd
import sys
import os
from sklearn.externals import joblib
from sklearn.ensemble import RandomForestClassifier
scriptpath = os.path.dirname(os.path.realpath(sys.argv[0])) + '/../'
sys.path.append(os.path.abspath(scriptpath))
import utils
parameter_str = '_'.join(['top', str(utils.k), 'cw', str(utils.click_weight), 'year', utils.train_year])
cforest = joblib.load(utils.model_path + 'rf_all_without_time_' + parameter_str +'.pkl')
test = joblib.load(utils.processed_data_path + 'test_all_' + parameter_str +'.pkl')
X_test = test.ix[:,1:]
X_test.fillna(-1, inplace=True)
print "predict RandomForest Classifier..."
probs = cforest.predict_proba(X_test)
sorted_index = np.argsort(-np.array(probs))[:,:5]
result = pd.DataFrame(columns = {'hotel_cluster'})
result['hotel_cluster'] = np.array([np.array_str(sorted_index[i])[1:-1] for i in range(sorted_index.shape[0])])
result.hotel_cluster.to_csv(utils.model_path +
'results/submission_rf_all_without_time_' + parameter_str + '.csv', header=True, index_label='id')
|
966070c1257351c0a379473aa87e7d6d27d6c0e1
|
src/__init__.py
|
src/__init__.py
|
#! /usr/bin/env python
"""
Author: Jeremy M. Stober
Program: __INIT__.PY
Date: Friday, March 23 2012
Description: Common utilities.
"""
from pickle_utils import load_or_compute,loaditer
from plot_utils import dual_scatter, lvl_scatter
from utils import create_cluster_colors, create_cluster_colors_rgb, find_duplicates, find_matches, rsme,incavg,consumer, debugflag, timerflag, sp_create, sp_create_data,chunk
from axline import plot_line
|
#! /usr/bin/env python
"""
Author: Jeremy M. Stober
Program: __INIT__.PY
Date: Friday, March 23 2012
Description: Common utilities.
"""
from pickle_utils import load_or_compute,loaditer
from plot_utils import dual_scatter, lvl_scatter
from utils import create_cluster_colors, create_cluster_colors_rgb, find_duplicates, find_matches, rsme,incavg,consumer, debugflag, timerflag, sp_create, sp_create_data,chunk,flip
from axline import plot_line
|
Add flip function to init file.
|
Add flip function to init file.
|
Python
|
bsd-2-clause
|
stober/utils
|
#! /usr/bin/env python
"""
Author: Jeremy M. Stober
Program: __INIT__.PY
Date: Friday, March 23 2012
Description: Common utilities.
"""
from pickle_utils import load_or_compute,loaditer
from plot_utils import dual_scatter, lvl_scatter
from utils import create_cluster_colors, create_cluster_colors_rgb, find_duplicates, find_matches, rsme,incavg,consumer, debugflag, timerflag, sp_create, sp_create_data,chunk
from axline import plot_line
Add flip function to init file.
|
#! /usr/bin/env python
"""
Author: Jeremy M. Stober
Program: __INIT__.PY
Date: Friday, March 23 2012
Description: Common utilities.
"""
from pickle_utils import load_or_compute,loaditer
from plot_utils import dual_scatter, lvl_scatter
from utils import create_cluster_colors, create_cluster_colors_rgb, find_duplicates, find_matches, rsme,incavg,consumer, debugflag, timerflag, sp_create, sp_create_data,chunk,flip
from axline import plot_line
|
<commit_before>#! /usr/bin/env python
"""
Author: Jeremy M. Stober
Program: __INIT__.PY
Date: Friday, March 23 2012
Description: Common utilities.
"""
from pickle_utils import load_or_compute,loaditer
from plot_utils import dual_scatter, lvl_scatter
from utils import create_cluster_colors, create_cluster_colors_rgb, find_duplicates, find_matches, rsme,incavg,consumer, debugflag, timerflag, sp_create, sp_create_data,chunk
from axline import plot_line
<commit_msg>Add flip function to init file.<commit_after>
|
#! /usr/bin/env python
"""
Author: Jeremy M. Stober
Program: __INIT__.PY
Date: Friday, March 23 2012
Description: Common utilities.
"""
from pickle_utils import load_or_compute,loaditer
from plot_utils import dual_scatter, lvl_scatter
from utils import create_cluster_colors, create_cluster_colors_rgb, find_duplicates, find_matches, rsme,incavg,consumer, debugflag, timerflag, sp_create, sp_create_data,chunk,flip
from axline import plot_line
|
#! /usr/bin/env python
"""
Author: Jeremy M. Stober
Program: __INIT__.PY
Date: Friday, March 23 2012
Description: Common utilities.
"""
from pickle_utils import load_or_compute,loaditer
from plot_utils import dual_scatter, lvl_scatter
from utils import create_cluster_colors, create_cluster_colors_rgb, find_duplicates, find_matches, rsme,incavg,consumer, debugflag, timerflag, sp_create, sp_create_data,chunk
from axline import plot_line
Add flip function to init file.#! /usr/bin/env python
"""
Author: Jeremy M. Stober
Program: __INIT__.PY
Date: Friday, March 23 2012
Description: Common utilities.
"""
from pickle_utils import load_or_compute,loaditer
from plot_utils import dual_scatter, lvl_scatter
from utils import create_cluster_colors, create_cluster_colors_rgb, find_duplicates, find_matches, rsme,incavg,consumer, debugflag, timerflag, sp_create, sp_create_data,chunk,flip
from axline import plot_line
|
<commit_before>#! /usr/bin/env python
"""
Author: Jeremy M. Stober
Program: __INIT__.PY
Date: Friday, March 23 2012
Description: Common utilities.
"""
from pickle_utils import load_or_compute,loaditer
from plot_utils import dual_scatter, lvl_scatter
from utils import create_cluster_colors, create_cluster_colors_rgb, find_duplicates, find_matches, rsme,incavg,consumer, debugflag, timerflag, sp_create, sp_create_data,chunk
from axline import plot_line
<commit_msg>Add flip function to init file.<commit_after>#! /usr/bin/env python
"""
Author: Jeremy M. Stober
Program: __INIT__.PY
Date: Friday, March 23 2012
Description: Common utilities.
"""
from pickle_utils import load_or_compute,loaditer
from plot_utils import dual_scatter, lvl_scatter
from utils import create_cluster_colors, create_cluster_colors_rgb, find_duplicates, find_matches, rsme,incavg,consumer, debugflag, timerflag, sp_create, sp_create_data,chunk,flip
from axline import plot_line
|
d732eb43013eedd700ebb00630a26ae97ecdd0b9
|
onetime/views.py
|
onetime/views.py
|
from datetime import datetime
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseGone
from django.contrib import auth
from django.conf import settings
from onetime import utils
from onetime.models import Key
def cleanup(request):
utils.cleanup()
return HttpResponse('ok', content_type='text/plain')
def login(request, key, login_url=None):
next = request.GET.get('next', None)
if next is None:
next = settings.LOGIN_REDIRECT_URL
user = auth.authenticate(key=key)
if user is None:
url = settings.LOGIN_URL
if next is not None:
url = '%s?next=%s' % (url, next)
return HttpResponseRedirect(url)
auth.login(request, user)
data = Key.objects.get(key=key)
data.update_usage()
if data.next is not None:
next = data.next
return HttpResponseRedirect(next)
|
from datetime import datetime
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseGone
from django.contrib import auth
from django.conf import settings
from onetime import utils
from onetime.models import Key
def cleanup(request):
utils.cleanup()
return HttpResponse('ok', content_type='text/plain')
def login(request, key):
next = request.GET.get('next', None)
if next is None:
next = settings.LOGIN_REDIRECT_URL
user = auth.authenticate(key=key)
if user is None:
url = settings.LOGIN_URL
if next is not None:
url = '%s?next=%s' % (url, next)
return HttpResponseRedirect(url)
auth.login(request, user)
data = Key.objects.get(key=key)
data.update_usage()
if data.next is not None:
next = data.next
return HttpResponseRedirect(next)
|
Remove a don't-know-why-it's-still-there parameter: login_url
|
Remove a don't-know-why-it's-still-there parameter: login_url
|
Python
|
agpl-3.0
|
ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,uploadcare/django-loginurl,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,fajran/django-loginurl,ISIFoundation/influenzanet-website,vanschelven/cmsplugin-journal
|
from datetime import datetime
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseGone
from django.contrib import auth
from django.conf import settings
from onetime import utils
from onetime.models import Key
def cleanup(request):
utils.cleanup()
return HttpResponse('ok', content_type='text/plain')
def login(request, key, login_url=None):
next = request.GET.get('next', None)
if next is None:
next = settings.LOGIN_REDIRECT_URL
user = auth.authenticate(key=key)
if user is None:
url = settings.LOGIN_URL
if next is not None:
url = '%s?next=%s' % (url, next)
return HttpResponseRedirect(url)
auth.login(request, user)
data = Key.objects.get(key=key)
data.update_usage()
if data.next is not None:
next = data.next
return HttpResponseRedirect(next)
Remove a don't-know-why-it's-still-there parameter: login_url
|
from datetime import datetime
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseGone
from django.contrib import auth
from django.conf import settings
from onetime import utils
from onetime.models import Key
def cleanup(request):
utils.cleanup()
return HttpResponse('ok', content_type='text/plain')
def login(request, key):
next = request.GET.get('next', None)
if next is None:
next = settings.LOGIN_REDIRECT_URL
user = auth.authenticate(key=key)
if user is None:
url = settings.LOGIN_URL
if next is not None:
url = '%s?next=%s' % (url, next)
return HttpResponseRedirect(url)
auth.login(request, user)
data = Key.objects.get(key=key)
data.update_usage()
if data.next is not None:
next = data.next
return HttpResponseRedirect(next)
|
<commit_before>from datetime import datetime
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseGone
from django.contrib import auth
from django.conf import settings
from onetime import utils
from onetime.models import Key
def cleanup(request):
utils.cleanup()
return HttpResponse('ok', content_type='text/plain')
def login(request, key, login_url=None):
next = request.GET.get('next', None)
if next is None:
next = settings.LOGIN_REDIRECT_URL
user = auth.authenticate(key=key)
if user is None:
url = settings.LOGIN_URL
if next is not None:
url = '%s?next=%s' % (url, next)
return HttpResponseRedirect(url)
auth.login(request, user)
data = Key.objects.get(key=key)
data.update_usage()
if data.next is not None:
next = data.next
return HttpResponseRedirect(next)
<commit_msg>Remove a don't-know-why-it's-still-there parameter: login_url<commit_after>
|
from datetime import datetime
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseGone
from django.contrib import auth
from django.conf import settings
from onetime import utils
from onetime.models import Key
def cleanup(request):
utils.cleanup()
return HttpResponse('ok', content_type='text/plain')
def login(request, key):
next = request.GET.get('next', None)
if next is None:
next = settings.LOGIN_REDIRECT_URL
user = auth.authenticate(key=key)
if user is None:
url = settings.LOGIN_URL
if next is not None:
url = '%s?next=%s' % (url, next)
return HttpResponseRedirect(url)
auth.login(request, user)
data = Key.objects.get(key=key)
data.update_usage()
if data.next is not None:
next = data.next
return HttpResponseRedirect(next)
|
from datetime import datetime
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseGone
from django.contrib import auth
from django.conf import settings
from onetime import utils
from onetime.models import Key
def cleanup(request):
utils.cleanup()
return HttpResponse('ok', content_type='text/plain')
def login(request, key, login_url=None):
next = request.GET.get('next', None)
if next is None:
next = settings.LOGIN_REDIRECT_URL
user = auth.authenticate(key=key)
if user is None:
url = settings.LOGIN_URL
if next is not None:
url = '%s?next=%s' % (url, next)
return HttpResponseRedirect(url)
auth.login(request, user)
data = Key.objects.get(key=key)
data.update_usage()
if data.next is not None:
next = data.next
return HttpResponseRedirect(next)
Remove a don't-know-why-it's-still-there parameter: login_urlfrom datetime import datetime
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseGone
from django.contrib import auth
from django.conf import settings
from onetime import utils
from onetime.models import Key
def cleanup(request):
utils.cleanup()
return HttpResponse('ok', content_type='text/plain')
def login(request, key):
next = request.GET.get('next', None)
if next is None:
next = settings.LOGIN_REDIRECT_URL
user = auth.authenticate(key=key)
if user is None:
url = settings.LOGIN_URL
if next is not None:
url = '%s?next=%s' % (url, next)
return HttpResponseRedirect(url)
auth.login(request, user)
data = Key.objects.get(key=key)
data.update_usage()
if data.next is not None:
next = data.next
return HttpResponseRedirect(next)
|
<commit_before>from datetime import datetime
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseGone
from django.contrib import auth
from django.conf import settings
from onetime import utils
from onetime.models import Key
def cleanup(request):
utils.cleanup()
return HttpResponse('ok', content_type='text/plain')
def login(request, key, login_url=None):
next = request.GET.get('next', None)
if next is None:
next = settings.LOGIN_REDIRECT_URL
user = auth.authenticate(key=key)
if user is None:
url = settings.LOGIN_URL
if next is not None:
url = '%s?next=%s' % (url, next)
return HttpResponseRedirect(url)
auth.login(request, user)
data = Key.objects.get(key=key)
data.update_usage()
if data.next is not None:
next = data.next
return HttpResponseRedirect(next)
<commit_msg>Remove a don't-know-why-it's-still-there parameter: login_url<commit_after>from datetime import datetime
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseGone
from django.contrib import auth
from django.conf import settings
from onetime import utils
from onetime.models import Key
def cleanup(request):
utils.cleanup()
return HttpResponse('ok', content_type='text/plain')
def login(request, key):
next = request.GET.get('next', None)
if next is None:
next = settings.LOGIN_REDIRECT_URL
user = auth.authenticate(key=key)
if user is None:
url = settings.LOGIN_URL
if next is not None:
url = '%s?next=%s' % (url, next)
return HttpResponseRedirect(url)
auth.login(request, user)
data = Key.objects.get(key=key)
data.update_usage()
if data.next is not None:
next = data.next
return HttpResponseRedirect(next)
|
7ed4c7e24a319b43810781a92f70e113fea82adc
|
syntacticframes_project/syntacticframes/templatetags/syntacticframes_extras.py
|
syntacticframes_project/syntacticframes/templatetags/syntacticframes_extras.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import template
from django.utils.html import mark_safe
register = template.Library()
from django.conf import settings
import os, re
rx = re.compile(r"^(.*)\.(.*?)$")
@register.simple_tag
def version(path):
full_path = os.path.join(settings.STATIC_ROOT, path)
if not settings.DEBUG:
# Get file modification time.
os.stat_float_times(False)
mtime = os.path.getmtime(full_path) # raises OSError if file does not exist
path = rx.sub(r"\1.%d.\2" % mtime, path)
return os.path.join(settings.STATIC_URL, path)
@register.filter
def highlight(text, word):
return mark_safe(text.replace(word, "<span class='highlight'>%s</span>" % word))
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import template
from django.utils.html import mark_safe
register = template.Library()
from django.conf import settings
import os, re
rx = re.compile(r"^(.*)\.(.*?)$")
@register.simple_tag
def version(path):
full_path = os.path.join(settings.STATIC_ROOT, path)
if not settings.DEBUG:
# Get file modification time.
os.stat_float_times(False)
mtime = os.path.getmtime(full_path) # raises OSError if file does not exist
path = rx.sub(r"\1.{}.\2".format(mtime), path)
return os.path.join(settings.STATIC_URL, path)
@register.filter
def highlight(text, word):
return mark_safe(text.replace(word, "<span class='highlight'>%s</span>" % word))
|
Use format() instead of %
|
Use format() instead of %
|
Python
|
mit
|
aymara/verbenet-editor,aymara/verbenet-editor,aymara/verbenet-editor
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import template
from django.utils.html import mark_safe
register = template.Library()
from django.conf import settings
import os, re
rx = re.compile(r"^(.*)\.(.*?)$")
@register.simple_tag
def version(path):
full_path = os.path.join(settings.STATIC_ROOT, path)
if not settings.DEBUG:
# Get file modification time.
os.stat_float_times(False)
mtime = os.path.getmtime(full_path) # raises OSError if file does not exist
path = rx.sub(r"\1.%d.\2" % mtime, path)
return os.path.join(settings.STATIC_URL, path)
@register.filter
def highlight(text, word):
return mark_safe(text.replace(word, "<span class='highlight'>%s</span>" % word))
Use format() instead of %
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import template
from django.utils.html import mark_safe
register = template.Library()
from django.conf import settings
import os, re
rx = re.compile(r"^(.*)\.(.*?)$")
@register.simple_tag
def version(path):
full_path = os.path.join(settings.STATIC_ROOT, path)
if not settings.DEBUG:
# Get file modification time.
os.stat_float_times(False)
mtime = os.path.getmtime(full_path) # raises OSError if file does not exist
path = rx.sub(r"\1.{}.\2".format(mtime), path)
return os.path.join(settings.STATIC_URL, path)
@register.filter
def highlight(text, word):
return mark_safe(text.replace(word, "<span class='highlight'>%s</span>" % word))
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import template
from django.utils.html import mark_safe
register = template.Library()
from django.conf import settings
import os, re
rx = re.compile(r"^(.*)\.(.*?)$")
@register.simple_tag
def version(path):
full_path = os.path.join(settings.STATIC_ROOT, path)
if not settings.DEBUG:
# Get file modification time.
os.stat_float_times(False)
mtime = os.path.getmtime(full_path) # raises OSError if file does not exist
path = rx.sub(r"\1.%d.\2" % mtime, path)
return os.path.join(settings.STATIC_URL, path)
@register.filter
def highlight(text, word):
return mark_safe(text.replace(word, "<span class='highlight'>%s</span>" % word))
<commit_msg>Use format() instead of %<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import template
from django.utils.html import mark_safe
register = template.Library()
from django.conf import settings
import os, re
rx = re.compile(r"^(.*)\.(.*?)$")
@register.simple_tag
def version(path):
full_path = os.path.join(settings.STATIC_ROOT, path)
if not settings.DEBUG:
# Get file modification time.
os.stat_float_times(False)
mtime = os.path.getmtime(full_path) # raises OSError if file does not exist
path = rx.sub(r"\1.{}.\2".format(mtime), path)
return os.path.join(settings.STATIC_URL, path)
@register.filter
def highlight(text, word):
return mark_safe(text.replace(word, "<span class='highlight'>%s</span>" % word))
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import template
from django.utils.html import mark_safe
register = template.Library()
from django.conf import settings
import os, re
rx = re.compile(r"^(.*)\.(.*?)$")
@register.simple_tag
def version(path):
full_path = os.path.join(settings.STATIC_ROOT, path)
if not settings.DEBUG:
# Get file modification time.
os.stat_float_times(False)
mtime = os.path.getmtime(full_path) # raises OSError if file does not exist
path = rx.sub(r"\1.%d.\2" % mtime, path)
return os.path.join(settings.STATIC_URL, path)
@register.filter
def highlight(text, word):
return mark_safe(text.replace(word, "<span class='highlight'>%s</span>" % word))
Use format() instead of %#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import template
from django.utils.html import mark_safe
register = template.Library()
from django.conf import settings
import os, re
rx = re.compile(r"^(.*)\.(.*?)$")
@register.simple_tag
def version(path):
full_path = os.path.join(settings.STATIC_ROOT, path)
if not settings.DEBUG:
# Get file modification time.
os.stat_float_times(False)
mtime = os.path.getmtime(full_path) # raises OSError if file does not exist
path = rx.sub(r"\1.{}.\2".format(mtime), path)
return os.path.join(settings.STATIC_URL, path)
@register.filter
def highlight(text, word):
return mark_safe(text.replace(word, "<span class='highlight'>%s</span>" % word))
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import template
from django.utils.html import mark_safe
register = template.Library()
from django.conf import settings
import os, re
rx = re.compile(r"^(.*)\.(.*?)$")
@register.simple_tag
def version(path):
full_path = os.path.join(settings.STATIC_ROOT, path)
if not settings.DEBUG:
# Get file modification time.
os.stat_float_times(False)
mtime = os.path.getmtime(full_path) # raises OSError if file does not exist
path = rx.sub(r"\1.%d.\2" % mtime, path)
return os.path.join(settings.STATIC_URL, path)
@register.filter
def highlight(text, word):
return mark_safe(text.replace(word, "<span class='highlight'>%s</span>" % word))
<commit_msg>Use format() instead of %<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import template
from django.utils.html import mark_safe
register = template.Library()
from django.conf import settings
import os, re
rx = re.compile(r"^(.*)\.(.*?)$")
@register.simple_tag
def version(path):
full_path = os.path.join(settings.STATIC_ROOT, path)
if not settings.DEBUG:
# Get file modification time.
os.stat_float_times(False)
mtime = os.path.getmtime(full_path) # raises OSError if file does not exist
path = rx.sub(r"\1.{}.\2".format(mtime), path)
return os.path.join(settings.STATIC_URL, path)
@register.filter
def highlight(text, word):
return mark_safe(text.replace(word, "<span class='highlight'>%s</span>" % word))
|
2654812e66ad7388a4dd7f1e53dbdf6511f74908
|
blaze/compute/pytables.py
|
blaze/compute/pytables.py
|
from __future__ import absolute_import, division, print_function
import tables as tb
from blaze.expr import *
from datashape import Record
from ..dispatch import dispatch
__all__ = ['tb']
@dispatch(tb.Table)
def discover(t):
return t.shape[0] * Record([[col, t.coltypes[col]] for col in t.colnames])
@dispatch(Selection, tb.Table)
def compute_one(sel, t, **kwargs):
s = eval_str(sel.predicate.expr)
return t.read_where(s)
@dispatch(Head, tb.Table)
def compute_one(h, t, **kwargs):
return t[:h.n]
@dispatch(Column, tb.Table)
def compute_one(c, t, **kwargs):
return t.col(c.column)
|
from __future__ import absolute_import, division, print_function
import numpy as np
import tables as tb
from blaze.expr import Selection, Head, Column, ColumnWise, Projection
from blaze.expr import eval_str
from datashape import Record
from ..dispatch import dispatch
@dispatch(tb.Table)
def discover(t):
return t.shape[0] * Record([[col, t.coltypes[col]] for col in t.colnames])
@dispatch(Selection, tb.Table)
def compute_one(sel, t, **kwargs):
s = eval_str(sel.predicate.expr)
return t.read_where(s)
@dispatch(Projection, tb.Table)
def compute_one(proj, t, **kwargs):
# only options here are
# read the whole thing in and then select
# or
# create an output array that is at most the size of the on disk table and
# fill it will the columns iteratively
# both of these options aren't ideal but pytables has no way to select
# multiple column subsets so pick the one where we can optimize for the best
# case rather than prematurely pessimizing
#
# TODO: benchmark on big tables because i'm not sure exactly what the
# implications here are for memory usage
columns = list(proj.columns)
dtype = np.dtype([(col, t.dtype[col]) for col in columns])
out = np.empty(t.shape, dtype=dtype)
for c in columns:
out[c] = t.col(c)
return out
@dispatch(Head, tb.Table)
def compute_one(h, t, **kwargs):
return t[:h.n]
@dispatch(Column, tb.Table)
def compute_one(c, t, **kwargs):
return t.col(c.column)
@dispatch(ColumnWise, tb.Table)
def compute_one(c, t, **kwargs):
columns = c.active_columns()
uservars = dict((col, getattr(t.cols, col)) for col in columns)
expr = tb.Expr(str(c.expr), uservars=uservars)
return expr.eval()
|
Implement projection and ColumnWise for PyTables
|
Implement projection and ColumnWise for PyTables
|
Python
|
bsd-3-clause
|
mrocklin/blaze,caseyclements/blaze,alexmojaki/blaze,ContinuumIO/blaze,jdmcbr/blaze,xlhtc007/blaze,cpcloud/blaze,maxalbert/blaze,alexmojaki/blaze,jdmcbr/blaze,cowlicks/blaze,cowlicks/blaze,scls19fr/blaze,caseyclements/blaze,LiaoPan/blaze,ChinaQuants/blaze,nkhuyu/blaze,jcrist/blaze,ContinuumIO/blaze,dwillmer/blaze,cpcloud/blaze,jcrist/blaze,nkhuyu/blaze,scls19fr/blaze,dwillmer/blaze,LiaoPan/blaze,maxalbert/blaze,ChinaQuants/blaze,xlhtc007/blaze,mrocklin/blaze
|
from __future__ import absolute_import, division, print_function
import tables as tb
from blaze.expr import *
from datashape import Record
from ..dispatch import dispatch
__all__ = ['tb']
@dispatch(tb.Table)
def discover(t):
return t.shape[0] * Record([[col, t.coltypes[col]] for col in t.colnames])
@dispatch(Selection, tb.Table)
def compute_one(sel, t, **kwargs):
s = eval_str(sel.predicate.expr)
return t.read_where(s)
@dispatch(Head, tb.Table)
def compute_one(h, t, **kwargs):
return t[:h.n]
@dispatch(Column, tb.Table)
def compute_one(c, t, **kwargs):
return t.col(c.column)
Implement projection and ColumnWise for PyTables
|
from __future__ import absolute_import, division, print_function
import numpy as np
import tables as tb
from blaze.expr import Selection, Head, Column, ColumnWise, Projection
from blaze.expr import eval_str
from datashape import Record
from ..dispatch import dispatch
@dispatch(tb.Table)
def discover(t):
return t.shape[0] * Record([[col, t.coltypes[col]] for col in t.colnames])
@dispatch(Selection, tb.Table)
def compute_one(sel, t, **kwargs):
s = eval_str(sel.predicate.expr)
return t.read_where(s)
@dispatch(Projection, tb.Table)
def compute_one(proj, t, **kwargs):
# only options here are
# read the whole thing in and then select
# or
# create an output array that is at most the size of the on disk table and
# fill it will the columns iteratively
# both of these options aren't ideal but pytables has no way to select
# multiple column subsets so pick the one where we can optimize for the best
# case rather than prematurely pessimizing
#
# TODO: benchmark on big tables because i'm not sure exactly what the
# implications here are for memory usage
columns = list(proj.columns)
dtype = np.dtype([(col, t.dtype[col]) for col in columns])
out = np.empty(t.shape, dtype=dtype)
for c in columns:
out[c] = t.col(c)
return out
@dispatch(Head, tb.Table)
def compute_one(h, t, **kwargs):
return t[:h.n]
@dispatch(Column, tb.Table)
def compute_one(c, t, **kwargs):
return t.col(c.column)
@dispatch(ColumnWise, tb.Table)
def compute_one(c, t, **kwargs):
columns = c.active_columns()
uservars = dict((col, getattr(t.cols, col)) for col in columns)
expr = tb.Expr(str(c.expr), uservars=uservars)
return expr.eval()
|
<commit_before>from __future__ import absolute_import, division, print_function
import tables as tb
from blaze.expr import *
from datashape import Record
from ..dispatch import dispatch
__all__ = ['tb']
@dispatch(tb.Table)
def discover(t):
return t.shape[0] * Record([[col, t.coltypes[col]] for col in t.colnames])
@dispatch(Selection, tb.Table)
def compute_one(sel, t, **kwargs):
s = eval_str(sel.predicate.expr)
return t.read_where(s)
@dispatch(Head, tb.Table)
def compute_one(h, t, **kwargs):
return t[:h.n]
@dispatch(Column, tb.Table)
def compute_one(c, t, **kwargs):
return t.col(c.column)
<commit_msg>Implement projection and ColumnWise for PyTables<commit_after>
|
from __future__ import absolute_import, division, print_function
import numpy as np
import tables as tb
from blaze.expr import Selection, Head, Column, ColumnWise, Projection
from blaze.expr import eval_str
from datashape import Record
from ..dispatch import dispatch
@dispatch(tb.Table)
def discover(t):
return t.shape[0] * Record([[col, t.coltypes[col]] for col in t.colnames])
@dispatch(Selection, tb.Table)
def compute_one(sel, t, **kwargs):
s = eval_str(sel.predicate.expr)
return t.read_where(s)
@dispatch(Projection, tb.Table)
def compute_one(proj, t, **kwargs):
# only options here are
# read the whole thing in and then select
# or
# create an output array that is at most the size of the on disk table and
# fill it will the columns iteratively
# both of these options aren't ideal but pytables has no way to select
# multiple column subsets so pick the one where we can optimize for the best
# case rather than prematurely pessimizing
#
# TODO: benchmark on big tables because i'm not sure exactly what the
# implications here are for memory usage
columns = list(proj.columns)
dtype = np.dtype([(col, t.dtype[col]) for col in columns])
out = np.empty(t.shape, dtype=dtype)
for c in columns:
out[c] = t.col(c)
return out
@dispatch(Head, tb.Table)
def compute_one(h, t, **kwargs):
return t[:h.n]
@dispatch(Column, tb.Table)
def compute_one(c, t, **kwargs):
return t.col(c.column)
@dispatch(ColumnWise, tb.Table)
def compute_one(c, t, **kwargs):
columns = c.active_columns()
uservars = dict((col, getattr(t.cols, col)) for col in columns)
expr = tb.Expr(str(c.expr), uservars=uservars)
return expr.eval()
|
from __future__ import absolute_import, division, print_function
import tables as tb
from blaze.expr import *
from datashape import Record
from ..dispatch import dispatch
__all__ = ['tb']
@dispatch(tb.Table)
def discover(t):
return t.shape[0] * Record([[col, t.coltypes[col]] for col in t.colnames])
@dispatch(Selection, tb.Table)
def compute_one(sel, t, **kwargs):
s = eval_str(sel.predicate.expr)
return t.read_where(s)
@dispatch(Head, tb.Table)
def compute_one(h, t, **kwargs):
return t[:h.n]
@dispatch(Column, tb.Table)
def compute_one(c, t, **kwargs):
return t.col(c.column)
Implement projection and ColumnWise for PyTablesfrom __future__ import absolute_import, division, print_function
import numpy as np
import tables as tb
from blaze.expr import Selection, Head, Column, ColumnWise, Projection
from blaze.expr import eval_str
from datashape import Record
from ..dispatch import dispatch
@dispatch(tb.Table)
def discover(t):
return t.shape[0] * Record([[col, t.coltypes[col]] for col in t.colnames])
@dispatch(Selection, tb.Table)
def compute_one(sel, t, **kwargs):
s = eval_str(sel.predicate.expr)
return t.read_where(s)
@dispatch(Projection, tb.Table)
def compute_one(proj, t, **kwargs):
# only options here are
# read the whole thing in and then select
# or
# create an output array that is at most the size of the on disk table and
# fill it will the columns iteratively
# both of these options aren't ideal but pytables has no way to select
# multiple column subsets so pick the one where we can optimize for the best
# case rather than prematurely pessimizing
#
# TODO: benchmark on big tables because i'm not sure exactly what the
# implications here are for memory usage
columns = list(proj.columns)
dtype = np.dtype([(col, t.dtype[col]) for col in columns])
out = np.empty(t.shape, dtype=dtype)
for c in columns:
out[c] = t.col(c)
return out
@dispatch(Head, tb.Table)
def compute_one(h, t, **kwargs):
return t[:h.n]
@dispatch(Column, tb.Table)
def compute_one(c, t, **kwargs):
return t.col(c.column)
@dispatch(ColumnWise, tb.Table)
def compute_one(c, t, **kwargs):
columns = c.active_columns()
uservars = dict((col, getattr(t.cols, col)) for col in columns)
expr = tb.Expr(str(c.expr), uservars=uservars)
return expr.eval()
|
<commit_before>from __future__ import absolute_import, division, print_function
import tables as tb
from blaze.expr import *
from datashape import Record
from ..dispatch import dispatch
__all__ = ['tb']
@dispatch(tb.Table)
def discover(t):
return t.shape[0] * Record([[col, t.coltypes[col]] for col in t.colnames])
@dispatch(Selection, tb.Table)
def compute_one(sel, t, **kwargs):
s = eval_str(sel.predicate.expr)
return t.read_where(s)
@dispatch(Head, tb.Table)
def compute_one(h, t, **kwargs):
return t[:h.n]
@dispatch(Column, tb.Table)
def compute_one(c, t, **kwargs):
return t.col(c.column)
<commit_msg>Implement projection and ColumnWise for PyTables<commit_after>from __future__ import absolute_import, division, print_function
import numpy as np
import tables as tb
from blaze.expr import Selection, Head, Column, ColumnWise, Projection
from blaze.expr import eval_str
from datashape import Record
from ..dispatch import dispatch
@dispatch(tb.Table)
def discover(t):
return t.shape[0] * Record([[col, t.coltypes[col]] for col in t.colnames])
@dispatch(Selection, tb.Table)
def compute_one(sel, t, **kwargs):
s = eval_str(sel.predicate.expr)
return t.read_where(s)
@dispatch(Projection, tb.Table)
def compute_one(proj, t, **kwargs):
# only options here are
# read the whole thing in and then select
# or
# create an output array that is at most the size of the on disk table and
# fill it will the columns iteratively
# both of these options aren't ideal but pytables has no way to select
# multiple column subsets so pick the one where we can optimize for the best
# case rather than prematurely pessimizing
#
# TODO: benchmark on big tables because i'm not sure exactly what the
# implications here are for memory usage
columns = list(proj.columns)
dtype = np.dtype([(col, t.dtype[col]) for col in columns])
out = np.empty(t.shape, dtype=dtype)
for c in columns:
out[c] = t.col(c)
return out
@dispatch(Head, tb.Table)
def compute_one(h, t, **kwargs):
return t[:h.n]
@dispatch(Column, tb.Table)
def compute_one(c, t, **kwargs):
return t.col(c.column)
@dispatch(ColumnWise, tb.Table)
def compute_one(c, t, **kwargs):
columns = c.active_columns()
uservars = dict((col, getattr(t.cols, col)) for col in columns)
expr = tb.Expr(str(c.expr), uservars=uservars)
return expr.eval()
|
ff138858fdc76527ee9f05f7573ddb00cd7eed21
|
conftest.py
|
conftest.py
|
import pytest
def pytest_collection_modifyitems(config, items):
try:
import pandas
except ImportError:
pandas = None
try:
import cesium
except ImportError:
cesium = None
if pandas is None:
skip_marker = pytest.mark.skip(reason="pandas not installed!")
for item in items:
if item.name in [
"tslearn.utils.from_tsfresh_dataset",
"tslearn.utils.from_sktime_dataset",
"tslearn.utils.from_pyflux_dataset",
"tslearn.utils.to_cesium_dataset",
"tslearn.utils.from_cesium_dataset",
]:
item.add_marker(skip_marker)
elif cesium is None:
skip_marker = pytest.mark.skip(reason="cesium not installed!")
for item in items:
if item.name in [
"tslearn.utils.to_cesium_dataset",
"tslearn.utils.from_cesium_dataset",
]:
item.add_marker(skip_marker)
|
import pytest
def pytest_collection_modifyitems(config, items):
try:
import pandas
except ImportError:
pandas = None
try:
import cesium
except ImportError:
cesium = None
if pandas is None:
skip_marker = pytest.mark.skip(reason="pandas not installed!")
for item in items:
if item.name in [
"tslearn.utils.from_tsfresh_dataset",
"tslearn.utils.to_tsfresh_dataset",
"tslearn.utils.from_sktime_dataset",
"tslearn.utils.to_sktime_dataset",
"tslearn.utils.from_pyflux_dataset",
"tslearn.utils.to_pyflux_dataset",
"tslearn.utils.from_cesium_dataset",
"tslearn.utils.to_cesium_dataset",
]:
item.add_marker(skip_marker)
elif cesium is None:
skip_marker = pytest.mark.skip(reason="cesium not installed!")
for item in items:
if item.name in [
"tslearn.utils.to_cesium_dataset",
"tslearn.utils.from_cesium_dataset",
]:
item.add_marker(skip_marker)
|
Add all versions of conversion doctests
|
Add all versions of conversion doctests
|
Python
|
bsd-2-clause
|
rtavenar/tslearn
|
import pytest
def pytest_collection_modifyitems(config, items):
try:
import pandas
except ImportError:
pandas = None
try:
import cesium
except ImportError:
cesium = None
if pandas is None:
skip_marker = pytest.mark.skip(reason="pandas not installed!")
for item in items:
if item.name in [
"tslearn.utils.from_tsfresh_dataset",
"tslearn.utils.from_sktime_dataset",
"tslearn.utils.from_pyflux_dataset",
"tslearn.utils.to_cesium_dataset",
"tslearn.utils.from_cesium_dataset",
]:
item.add_marker(skip_marker)
elif cesium is None:
skip_marker = pytest.mark.skip(reason="cesium not installed!")
for item in items:
if item.name in [
"tslearn.utils.to_cesium_dataset",
"tslearn.utils.from_cesium_dataset",
]:
item.add_marker(skip_marker)
Add all versions of conversion doctests
|
import pytest
def pytest_collection_modifyitems(config, items):
try:
import pandas
except ImportError:
pandas = None
try:
import cesium
except ImportError:
cesium = None
if pandas is None:
skip_marker = pytest.mark.skip(reason="pandas not installed!")
for item in items:
if item.name in [
"tslearn.utils.from_tsfresh_dataset",
"tslearn.utils.to_tsfresh_dataset",
"tslearn.utils.from_sktime_dataset",
"tslearn.utils.to_sktime_dataset",
"tslearn.utils.from_pyflux_dataset",
"tslearn.utils.to_pyflux_dataset",
"tslearn.utils.from_cesium_dataset",
"tslearn.utils.to_cesium_dataset",
]:
item.add_marker(skip_marker)
elif cesium is None:
skip_marker = pytest.mark.skip(reason="cesium not installed!")
for item in items:
if item.name in [
"tslearn.utils.to_cesium_dataset",
"tslearn.utils.from_cesium_dataset",
]:
item.add_marker(skip_marker)
|
<commit_before>import pytest
def pytest_collection_modifyitems(config, items):
try:
import pandas
except ImportError:
pandas = None
try:
import cesium
except ImportError:
cesium = None
if pandas is None:
skip_marker = pytest.mark.skip(reason="pandas not installed!")
for item in items:
if item.name in [
"tslearn.utils.from_tsfresh_dataset",
"tslearn.utils.from_sktime_dataset",
"tslearn.utils.from_pyflux_dataset",
"tslearn.utils.to_cesium_dataset",
"tslearn.utils.from_cesium_dataset",
]:
item.add_marker(skip_marker)
elif cesium is None:
skip_marker = pytest.mark.skip(reason="cesium not installed!")
for item in items:
if item.name in [
"tslearn.utils.to_cesium_dataset",
"tslearn.utils.from_cesium_dataset",
]:
item.add_marker(skip_marker)
<commit_msg>Add all versions of conversion doctests<commit_after>
|
import pytest
def pytest_collection_modifyitems(config, items):
try:
import pandas
except ImportError:
pandas = None
try:
import cesium
except ImportError:
cesium = None
if pandas is None:
skip_marker = pytest.mark.skip(reason="pandas not installed!")
for item in items:
if item.name in [
"tslearn.utils.from_tsfresh_dataset",
"tslearn.utils.to_tsfresh_dataset",
"tslearn.utils.from_sktime_dataset",
"tslearn.utils.to_sktime_dataset",
"tslearn.utils.from_pyflux_dataset",
"tslearn.utils.to_pyflux_dataset",
"tslearn.utils.from_cesium_dataset",
"tslearn.utils.to_cesium_dataset",
]:
item.add_marker(skip_marker)
elif cesium is None:
skip_marker = pytest.mark.skip(reason="cesium not installed!")
for item in items:
if item.name in [
"tslearn.utils.to_cesium_dataset",
"tslearn.utils.from_cesium_dataset",
]:
item.add_marker(skip_marker)
|
import pytest
def pytest_collection_modifyitems(config, items):
try:
import pandas
except ImportError:
pandas = None
try:
import cesium
except ImportError:
cesium = None
if pandas is None:
skip_marker = pytest.mark.skip(reason="pandas not installed!")
for item in items:
if item.name in [
"tslearn.utils.from_tsfresh_dataset",
"tslearn.utils.from_sktime_dataset",
"tslearn.utils.from_pyflux_dataset",
"tslearn.utils.to_cesium_dataset",
"tslearn.utils.from_cesium_dataset",
]:
item.add_marker(skip_marker)
elif cesium is None:
skip_marker = pytest.mark.skip(reason="cesium not installed!")
for item in items:
if item.name in [
"tslearn.utils.to_cesium_dataset",
"tslearn.utils.from_cesium_dataset",
]:
item.add_marker(skip_marker)
Add all versions of conversion doctestsimport pytest
def pytest_collection_modifyitems(config, items):
try:
import pandas
except ImportError:
pandas = None
try:
import cesium
except ImportError:
cesium = None
if pandas is None:
skip_marker = pytest.mark.skip(reason="pandas not installed!")
for item in items:
if item.name in [
"tslearn.utils.from_tsfresh_dataset",
"tslearn.utils.to_tsfresh_dataset",
"tslearn.utils.from_sktime_dataset",
"tslearn.utils.to_sktime_dataset",
"tslearn.utils.from_pyflux_dataset",
"tslearn.utils.to_pyflux_dataset",
"tslearn.utils.from_cesium_dataset",
"tslearn.utils.to_cesium_dataset",
]:
item.add_marker(skip_marker)
elif cesium is None:
skip_marker = pytest.mark.skip(reason="cesium not installed!")
for item in items:
if item.name in [
"tslearn.utils.to_cesium_dataset",
"tslearn.utils.from_cesium_dataset",
]:
item.add_marker(skip_marker)
|
<commit_before>import pytest
def pytest_collection_modifyitems(config, items):
try:
import pandas
except ImportError:
pandas = None
try:
import cesium
except ImportError:
cesium = None
if pandas is None:
skip_marker = pytest.mark.skip(reason="pandas not installed!")
for item in items:
if item.name in [
"tslearn.utils.from_tsfresh_dataset",
"tslearn.utils.from_sktime_dataset",
"tslearn.utils.from_pyflux_dataset",
"tslearn.utils.to_cesium_dataset",
"tslearn.utils.from_cesium_dataset",
]:
item.add_marker(skip_marker)
elif cesium is None:
skip_marker = pytest.mark.skip(reason="cesium not installed!")
for item in items:
if item.name in [
"tslearn.utils.to_cesium_dataset",
"tslearn.utils.from_cesium_dataset",
]:
item.add_marker(skip_marker)
<commit_msg>Add all versions of conversion doctests<commit_after>import pytest
def pytest_collection_modifyitems(config, items):
try:
import pandas
except ImportError:
pandas = None
try:
import cesium
except ImportError:
cesium = None
if pandas is None:
skip_marker = pytest.mark.skip(reason="pandas not installed!")
for item in items:
if item.name in [
"tslearn.utils.from_tsfresh_dataset",
"tslearn.utils.to_tsfresh_dataset",
"tslearn.utils.from_sktime_dataset",
"tslearn.utils.to_sktime_dataset",
"tslearn.utils.from_pyflux_dataset",
"tslearn.utils.to_pyflux_dataset",
"tslearn.utils.from_cesium_dataset",
"tslearn.utils.to_cesium_dataset",
]:
item.add_marker(skip_marker)
elif cesium is None:
skip_marker = pytest.mark.skip(reason="cesium not installed!")
for item in items:
if item.name in [
"tslearn.utils.to_cesium_dataset",
"tslearn.utils.from_cesium_dataset",
]:
item.add_marker(skip_marker)
|
c803473c4bc552b9d82a4bbb0948e071a36821fd
|
web_scraper/core/html_fetchers.py
|
web_scraper/core/html_fetchers.py
|
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import requests
def fetch_html_document(url, user_agent='python_requests.cli-ws'):
"""Request html document from url
Positional Arguments:
url (str): a web address (http://example.com/)
Keyword Arguments:
user_agent (str): the user agent that will be sent with the
request (default: python_requests.cli-ws)
Return:
tuple: the status code of the response and the html document
"""
response = requests.get(url, headers={'User-Agent': user_agent})
return response.status_code, response.text
|
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import requests
def fetch_html_document(url, user_agent='cli-ws/1.0'):
"""Request html document from url
Positional Arguments:
url (str): a web address (http://example.com/)
Keyword Arguments:
user_agent (str): the user agent that will be sent with the
request (default: python_requests.cli-ws)
Return:
tuple: the status code of the response and the html document
"""
response = requests.get(url, headers={'User-Agent': user_agent})
return response.status_code, response.text
|
Change default user_agent to match mozilla standard
|
Change default user_agent to match mozilla standard
|
Python
|
mit
|
Samuel-L/cli-ws,Samuel-L/cli-ws
|
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import requests
def fetch_html_document(url, user_agent='python_requests.cli-ws'):
"""Request html document from url
Positional Arguments:
url (str): a web address (http://example.com/)
Keyword Arguments:
user_agent (str): the user agent that will be sent with the
request (default: python_requests.cli-ws)
Return:
tuple: the status code of the response and the html document
"""
response = requests.get(url, headers={'User-Agent': user_agent})
return response.status_code, response.text
Change default user_agent to match mozilla standard
|
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import requests
def fetch_html_document(url, user_agent='cli-ws/1.0'):
"""Request html document from url
Positional Arguments:
url (str): a web address (http://example.com/)
Keyword Arguments:
user_agent (str): the user agent that will be sent with the
request (default: python_requests.cli-ws)
Return:
tuple: the status code of the response and the html document
"""
response = requests.get(url, headers={'User-Agent': user_agent})
return response.status_code, response.text
|
<commit_before>import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import requests
def fetch_html_document(url, user_agent='python_requests.cli-ws'):
"""Request html document from url
Positional Arguments:
url (str): a web address (http://example.com/)
Keyword Arguments:
user_agent (str): the user agent that will be sent with the
request (default: python_requests.cli-ws)
Return:
tuple: the status code of the response and the html document
"""
response = requests.get(url, headers={'User-Agent': user_agent})
return response.status_code, response.text
<commit_msg>Change default user_agent to match mozilla standard<commit_after>
|
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import requests
def fetch_html_document(url, user_agent='cli-ws/1.0'):
"""Request html document from url
Positional Arguments:
url (str): a web address (http://example.com/)
Keyword Arguments:
user_agent (str): the user agent that will be sent with the
request (default: python_requests.cli-ws)
Return:
tuple: the status code of the response and the html document
"""
response = requests.get(url, headers={'User-Agent': user_agent})
return response.status_code, response.text
|
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import requests
def fetch_html_document(url, user_agent='python_requests.cli-ws'):
"""Request html document from url
Positional Arguments:
url (str): a web address (http://example.com/)
Keyword Arguments:
user_agent (str): the user agent that will be sent with the
request (default: python_requests.cli-ws)
Return:
tuple: the status code of the response and the html document
"""
response = requests.get(url, headers={'User-Agent': user_agent})
return response.status_code, response.text
Change default user_agent to match mozilla standardimport os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import requests
def fetch_html_document(url, user_agent='cli-ws/1.0'):
"""Request html document from url
Positional Arguments:
url (str): a web address (http://example.com/)
Keyword Arguments:
user_agent (str): the user agent that will be sent with the
request (default: python_requests.cli-ws)
Return:
tuple: the status code of the response and the html document
"""
response = requests.get(url, headers={'User-Agent': user_agent})
return response.status_code, response.text
|
<commit_before>import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import requests
def fetch_html_document(url, user_agent='python_requests.cli-ws'):
"""Request html document from url
Positional Arguments:
url (str): a web address (http://example.com/)
Keyword Arguments:
user_agent (str): the user agent that will be sent with the
request (default: python_requests.cli-ws)
Return:
tuple: the status code of the response and the html document
"""
response = requests.get(url, headers={'User-Agent': user_agent})
return response.status_code, response.text
<commit_msg>Change default user_agent to match mozilla standard<commit_after>import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import requests
def fetch_html_document(url, user_agent='cli-ws/1.0'):
"""Request html document from url
Positional Arguments:
url (str): a web address (http://example.com/)
Keyword Arguments:
user_agent (str): the user agent that will be sent with the
request (default: python_requests.cli-ws)
Return:
tuple: the status code of the response and the html document
"""
response = requests.get(url, headers={'User-Agent': user_agent})
return response.status_code, response.text
|
3e60cab845f77f8cc47c64c75b53b79441a90983
|
sources/source.py
|
sources/source.py
|
class Source:
def __init__(self, name, mdb_collection):
self.name = name
self.mdb_collection = mdb_collection
def download_data(self, symbols, params=None):
print('%s - downloading %s' % (self.name, symbols))
data = []
for i in range(0, len(symbols), 10):
data.extend(self._download_data(symbols[i:i+10], params))
self.mdb_collection.insert_many(data)
print('%s - done!' % self.name)
def _download_data(self, symbols, params):
pass
|
class Source:
def __init__(self, name, mdb_collection):
self.name = name
self.mdb_collection = mdb_collection
def download_data(self, symbols, params=None):
print('%s - downloading %s' % (self.name, symbols))
data = []
for i in range(0, len(symbols), 20):
data.extend(self._download_data(symbols[i:i+20], params))
self.mdb_collection.insert_many(data)
print('%s - done!' % self.name)
def _download_data(self, symbols, params):
pass
|
Increase batch size to 20
|
Increase batch size to 20
|
Python
|
mit
|
ErikGartner/stock-collector
|
class Source:
def __init__(self, name, mdb_collection):
self.name = name
self.mdb_collection = mdb_collection
def download_data(self, symbols, params=None):
print('%s - downloading %s' % (self.name, symbols))
data = []
for i in range(0, len(symbols), 10):
data.extend(self._download_data(symbols[i:i+10], params))
self.mdb_collection.insert_many(data)
print('%s - done!' % self.name)
def _download_data(self, symbols, params):
pass
Increase batch size to 20
|
class Source:
def __init__(self, name, mdb_collection):
self.name = name
self.mdb_collection = mdb_collection
def download_data(self, symbols, params=None):
print('%s - downloading %s' % (self.name, symbols))
data = []
for i in range(0, len(symbols), 20):
data.extend(self._download_data(symbols[i:i+20], params))
self.mdb_collection.insert_many(data)
print('%s - done!' % self.name)
def _download_data(self, symbols, params):
pass
|
<commit_before>
class Source:
def __init__(self, name, mdb_collection):
self.name = name
self.mdb_collection = mdb_collection
def download_data(self, symbols, params=None):
print('%s - downloading %s' % (self.name, symbols))
data = []
for i in range(0, len(symbols), 10):
data.extend(self._download_data(symbols[i:i+10], params))
self.mdb_collection.insert_many(data)
print('%s - done!' % self.name)
def _download_data(self, symbols, params):
pass
<commit_msg>Increase batch size to 20<commit_after>
|
class Source:
def __init__(self, name, mdb_collection):
self.name = name
self.mdb_collection = mdb_collection
def download_data(self, symbols, params=None):
print('%s - downloading %s' % (self.name, symbols))
data = []
for i in range(0, len(symbols), 20):
data.extend(self._download_data(symbols[i:i+20], params))
self.mdb_collection.insert_many(data)
print('%s - done!' % self.name)
def _download_data(self, symbols, params):
pass
|
class Source:
def __init__(self, name, mdb_collection):
self.name = name
self.mdb_collection = mdb_collection
def download_data(self, symbols, params=None):
print('%s - downloading %s' % (self.name, symbols))
data = []
for i in range(0, len(symbols), 10):
data.extend(self._download_data(symbols[i:i+10], params))
self.mdb_collection.insert_many(data)
print('%s - done!' % self.name)
def _download_data(self, symbols, params):
pass
Increase batch size to 20
class Source:
def __init__(self, name, mdb_collection):
self.name = name
self.mdb_collection = mdb_collection
def download_data(self, symbols, params=None):
print('%s - downloading %s' % (self.name, symbols))
data = []
for i in range(0, len(symbols), 20):
data.extend(self._download_data(symbols[i:i+20], params))
self.mdb_collection.insert_many(data)
print('%s - done!' % self.name)
def _download_data(self, symbols, params):
pass
|
<commit_before>
class Source:
def __init__(self, name, mdb_collection):
self.name = name
self.mdb_collection = mdb_collection
def download_data(self, symbols, params=None):
print('%s - downloading %s' % (self.name, symbols))
data = []
for i in range(0, len(symbols), 10):
data.extend(self._download_data(symbols[i:i+10], params))
self.mdb_collection.insert_many(data)
print('%s - done!' % self.name)
def _download_data(self, symbols, params):
pass
<commit_msg>Increase batch size to 20<commit_after>
class Source:
def __init__(self, name, mdb_collection):
self.name = name
self.mdb_collection = mdb_collection
def download_data(self, symbols, params=None):
print('%s - downloading %s' % (self.name, symbols))
data = []
for i in range(0, len(symbols), 20):
data.extend(self._download_data(symbols[i:i+20], params))
self.mdb_collection.insert_many(data)
print('%s - done!' % self.name)
def _download_data(self, symbols, params):
pass
|
ccd660c5deba37c0c324e64666eb6421696b3144
|
puffin/gui/form.py
|
puffin/gui/form.py
|
from flask_wtf import Form
from wtforms import StringField, IntegerField, PasswordField, SubmitField, SelectField
from wtforms.validators import Required, Length, Regexp
from ..core.db import db
from ..core.security import User
from .. import app
class ApplicationForm(Form):
start = SubmitField('Start')
stop = SubmitField('Stop')
class ApplicationSettingsForm(Form):
domain = StringField('Domain', description="If you change it then make sure you also configure it with your DNS provider")
submit = SubmitField('Update')
def validate(self):
rv = Form.validate(self)
if not rv:
return False
if self.domain.data:
server_name = app.config["SERVER_NAME_FULL"]
if server_name != "localhost" and self.domain.data.endswith(server_name):
self.domain.errors.append('Invalid domain, cannot end with ' + server_name)
return False
return True
class ProfileForm(Form):
login = StringField('Login')
email = StringField('Email')
name = StringField('Name', validators=[Required(), Length(1, 64),
Regexp(r'^[A-Za-z0-9_\- ]+$', 0, 'Name must have only letters, numbers, spaces, dots, dashes or underscores')])
submit = SubmitField('Update')
|
from flask_wtf import Form
from wtforms import StringField, IntegerField, PasswordField, SubmitField, SelectField
from wtforms.validators import Required, Length, Regexp
from ..core.db import db
from ..core.security import User
from .. import app
class ApplicationForm(Form):
start = SubmitField('Start')
stop = SubmitField('Stop')
class ApplicationSettingsForm(Form):
domain = StringField('Domain', description="If you change it then make sure you also configure it with your DNS provider")
submit = SubmitField('Update')
def validate(self):
rv = Form.validate(self)
if not rv:
return False
if self.domain.data:
server_name = app.config["SERVER_NAME_FULL"]
if (server_name != "localhost"
and not self.domain.data.endswith(current_user.login + "." + server_name)
and self.domain.data.endswith(server_name)):
self.domain.errors.append('Invalid domain, cannot end with ' + server_name)
return False
return True
class ProfileForm(Form):
login = StringField('Login')
email = StringField('Email')
name = StringField('Name', validators=[Required(), Length(1, 64),
Regexp(r'^[A-Za-z0-9_\- ]+$', 0, 'Name must have only letters, numbers, spaces, dots, dashes or underscores')])
submit = SubmitField('Update')
|
Allow changing to own domain name
|
Allow changing to own domain name
|
Python
|
agpl-3.0
|
loomchild/jenca-puffin,puffinrocks/puffin,loomchild/puffin,loomchild/puffin,loomchild/puffin,loomchild/puffin,puffinrocks/puffin,loomchild/jenca-puffin,loomchild/puffin
|
from flask_wtf import Form
from wtforms import StringField, IntegerField, PasswordField, SubmitField, SelectField
from wtforms.validators import Required, Length, Regexp
from ..core.db import db
from ..core.security import User
from .. import app
class ApplicationForm(Form):
start = SubmitField('Start')
stop = SubmitField('Stop')
class ApplicationSettingsForm(Form):
domain = StringField('Domain', description="If you change it then make sure you also configure it with your DNS provider")
submit = SubmitField('Update')
def validate(self):
rv = Form.validate(self)
if not rv:
return False
if self.domain.data:
server_name = app.config["SERVER_NAME_FULL"]
if server_name != "localhost" and self.domain.data.endswith(server_name):
self.domain.errors.append('Invalid domain, cannot end with ' + server_name)
return False
return True
class ProfileForm(Form):
login = StringField('Login')
email = StringField('Email')
name = StringField('Name', validators=[Required(), Length(1, 64),
Regexp(r'^[A-Za-z0-9_\- ]+$', 0, 'Name must have only letters, numbers, spaces, dots, dashes or underscores')])
submit = SubmitField('Update')
Allow changing to own domain name
|
from flask_wtf import Form
from wtforms import StringField, IntegerField, PasswordField, SubmitField, SelectField
from wtforms.validators import Required, Length, Regexp
from ..core.db import db
from ..core.security import User
from .. import app
class ApplicationForm(Form):
start = SubmitField('Start')
stop = SubmitField('Stop')
class ApplicationSettingsForm(Form):
domain = StringField('Domain', description="If you change it then make sure you also configure it with your DNS provider")
submit = SubmitField('Update')
def validate(self):
rv = Form.validate(self)
if not rv:
return False
if self.domain.data:
server_name = app.config["SERVER_NAME_FULL"]
if (server_name != "localhost"
and not self.domain.data.endswith(current_user.login + "." + server_name)
and self.domain.data.endswith(server_name)):
self.domain.errors.append('Invalid domain, cannot end with ' + server_name)
return False
return True
class ProfileForm(Form):
login = StringField('Login')
email = StringField('Email')
name = StringField('Name', validators=[Required(), Length(1, 64),
Regexp(r'^[A-Za-z0-9_\- ]+$', 0, 'Name must have only letters, numbers, spaces, dots, dashes or underscores')])
submit = SubmitField('Update')
|
<commit_before>from flask_wtf import Form
from wtforms import StringField, IntegerField, PasswordField, SubmitField, SelectField
from wtforms.validators import Required, Length, Regexp
from ..core.db import db
from ..core.security import User
from .. import app
class ApplicationForm(Form):
start = SubmitField('Start')
stop = SubmitField('Stop')
class ApplicationSettingsForm(Form):
domain = StringField('Domain', description="If you change it then make sure you also configure it with your DNS provider")
submit = SubmitField('Update')
def validate(self):
rv = Form.validate(self)
if not rv:
return False
if self.domain.data:
server_name = app.config["SERVER_NAME_FULL"]
if server_name != "localhost" and self.domain.data.endswith(server_name):
self.domain.errors.append('Invalid domain, cannot end with ' + server_name)
return False
return True
class ProfileForm(Form):
login = StringField('Login')
email = StringField('Email')
name = StringField('Name', validators=[Required(), Length(1, 64),
Regexp(r'^[A-Za-z0-9_\- ]+$', 0, 'Name must have only letters, numbers, spaces, dots, dashes or underscores')])
submit = SubmitField('Update')
<commit_msg>Allow changing to own domain name<commit_after>
|
from flask_wtf import Form
from wtforms import StringField, IntegerField, PasswordField, SubmitField, SelectField
from wtforms.validators import Required, Length, Regexp
from ..core.db import db
from ..core.security import User
from .. import app
class ApplicationForm(Form):
start = SubmitField('Start')
stop = SubmitField('Stop')
class ApplicationSettingsForm(Form):
domain = StringField('Domain', description="If you change it then make sure you also configure it with your DNS provider")
submit = SubmitField('Update')
def validate(self):
rv = Form.validate(self)
if not rv:
return False
if self.domain.data:
server_name = app.config["SERVER_NAME_FULL"]
if (server_name != "localhost"
and not self.domain.data.endswith(current_user.login + "." + server_name)
and self.domain.data.endswith(server_name)):
self.domain.errors.append('Invalid domain, cannot end with ' + server_name)
return False
return True
class ProfileForm(Form):
login = StringField('Login')
email = StringField('Email')
name = StringField('Name', validators=[Required(), Length(1, 64),
Regexp(r'^[A-Za-z0-9_\- ]+$', 0, 'Name must have only letters, numbers, spaces, dots, dashes or underscores')])
submit = SubmitField('Update')
|
from flask_wtf import Form
from wtforms import StringField, IntegerField, PasswordField, SubmitField, SelectField
from wtforms.validators import Required, Length, Regexp
from ..core.db import db
from ..core.security import User
from .. import app
class ApplicationForm(Form):
start = SubmitField('Start')
stop = SubmitField('Stop')
class ApplicationSettingsForm(Form):
domain = StringField('Domain', description="If you change it then make sure you also configure it with your DNS provider")
submit = SubmitField('Update')
def validate(self):
rv = Form.validate(self)
if not rv:
return False
if self.domain.data:
server_name = app.config["SERVER_NAME_FULL"]
if server_name != "localhost" and self.domain.data.endswith(server_name):
self.domain.errors.append('Invalid domain, cannot end with ' + server_name)
return False
return True
class ProfileForm(Form):
login = StringField('Login')
email = StringField('Email')
name = StringField('Name', validators=[Required(), Length(1, 64),
Regexp(r'^[A-Za-z0-9_\- ]+$', 0, 'Name must have only letters, numbers, spaces, dots, dashes or underscores')])
submit = SubmitField('Update')
Allow changing to own domain namefrom flask_wtf import Form
from wtforms import StringField, IntegerField, PasswordField, SubmitField, SelectField
from wtforms.validators import Required, Length, Regexp
from ..core.db import db
from ..core.security import User
from .. import app
class ApplicationForm(Form):
start = SubmitField('Start')
stop = SubmitField('Stop')
class ApplicationSettingsForm(Form):
domain = StringField('Domain', description="If you change it then make sure you also configure it with your DNS provider")
submit = SubmitField('Update')
def validate(self):
rv = Form.validate(self)
if not rv:
return False
if self.domain.data:
server_name = app.config["SERVER_NAME_FULL"]
if (server_name != "localhost"
and not self.domain.data.endswith(current_user.login + "." + server_name)
and self.domain.data.endswith(server_name)):
self.domain.errors.append('Invalid domain, cannot end with ' + server_name)
return False
return True
class ProfileForm(Form):
login = StringField('Login')
email = StringField('Email')
name = StringField('Name', validators=[Required(), Length(1, 64),
Regexp(r'^[A-Za-z0-9_\- ]+$', 0, 'Name must have only letters, numbers, spaces, dots, dashes or underscores')])
submit = SubmitField('Update')
|
<commit_before>from flask_wtf import Form
from wtforms import StringField, IntegerField, PasswordField, SubmitField, SelectField
from wtforms.validators import Required, Length, Regexp
from ..core.db import db
from ..core.security import User
from .. import app
class ApplicationForm(Form):
start = SubmitField('Start')
stop = SubmitField('Stop')
class ApplicationSettingsForm(Form):
domain = StringField('Domain', description="If you change it then make sure you also configure it with your DNS provider")
submit = SubmitField('Update')
def validate(self):
rv = Form.validate(self)
if not rv:
return False
if self.domain.data:
server_name = app.config["SERVER_NAME_FULL"]
if server_name != "localhost" and self.domain.data.endswith(server_name):
self.domain.errors.append('Invalid domain, cannot end with ' + server_name)
return False
return True
class ProfileForm(Form):
login = StringField('Login')
email = StringField('Email')
name = StringField('Name', validators=[Required(), Length(1, 64),
Regexp(r'^[A-Za-z0-9_\- ]+$', 0, 'Name must have only letters, numbers, spaces, dots, dashes or underscores')])
submit = SubmitField('Update')
<commit_msg>Allow changing to own domain name<commit_after>from flask_wtf import Form
from wtforms import StringField, IntegerField, PasswordField, SubmitField, SelectField
from wtforms.validators import Required, Length, Regexp
from ..core.db import db
from ..core.security import User
from .. import app
class ApplicationForm(Form):
start = SubmitField('Start')
stop = SubmitField('Stop')
class ApplicationSettingsForm(Form):
domain = StringField('Domain', description="If you change it then make sure you also configure it with your DNS provider")
submit = SubmitField('Update')
def validate(self):
rv = Form.validate(self)
if not rv:
return False
if self.domain.data:
server_name = app.config["SERVER_NAME_FULL"]
if (server_name != "localhost"
and not self.domain.data.endswith(current_user.login + "." + server_name)
and self.domain.data.endswith(server_name)):
self.domain.errors.append('Invalid domain, cannot end with ' + server_name)
return False
return True
class ProfileForm(Form):
login = StringField('Login')
email = StringField('Email')
name = StringField('Name', validators=[Required(), Length(1, 64),
Regexp(r'^[A-Za-z0-9_\- ]+$', 0, 'Name must have only letters, numbers, spaces, dots, dashes or underscores')])
submit = SubmitField('Update')
|
403f23ae486c14066e0a93c7deca91c5fbc15b87
|
plugins/brian.py
|
plugins/brian.py
|
"""Displays a randomly generated witticism from Brian Chu himself."""
import json
import random
__match__ = r"!brian"
with open('plugins/brian_corpus/cache.json', 'r') as infile:
cache = json.load(infile)
with open('plugins/brian_corpus/phrases.json', 'r') as infile:
phrases = json.load(infile)
def generate_phrase(phrases, cache):
seed_phrase = []
while len(seed_phrase) < 3:
seed_phrase = random.choice(phrases).split()
w1, w2 = seed_phrase[:2]
chosen = [w1, w2]
while "{}|{}".format(w1, w2) in cache:
choice = random.choice(cache["{}|{}".format(w1, w2)])
w1, w2 = w2, choice
chosen.append(choice)
return ' '.join(chosen)
def on_message(bot, channel, user, message):
return '> {} ~brian'.format(generate_phrase(phrases, cache))
|
"""Displays a randomly generated witticism from Brian Chu himself."""
import json
import random
__match__ = r"!brian"
attribution = [
"salad master",
"esquire",
"the one and only",
"startup enthusiast",
"boba king",
"not-dictator",
"normal citizen",
"ping-pong expert"
]
with open('plugins/brian_corpus/phrases.json', 'r') as infile:
phrases = json.load(infile)
with open('plugins/brian_corpus/cache.json', 'r') as infile:
cache = json.load(infile)
def generate_phrase(phrases, cache, max_length=40):
seed_phrase = []
while len(seed_phrase) < 2:
seed_phrase = random.choice(phrases).split()
w1, = seed_phrase[:1]
chosen = [w1]
while w1 in cache and len(chosen)<max_length:
w1 = random.choice(cache[w1])
chosen.append(w1)
return ' '.join(chosen)
def on_message(bot, channel, user, message):
return '> {} ~ Brian Chu, {}'.format(generate_phrase(phrases, cache),
random.choice(attribution))
|
Use bigrams in Markov chain generator
|
Use bigrams in Markov chain generator
|
Python
|
mit
|
kvchen/keffbot,kvchen/keffbot-py
|
"""Displays a randomly generated witticism from Brian Chu himself."""
import json
import random
__match__ = r"!brian"
with open('plugins/brian_corpus/cache.json', 'r') as infile:
cache = json.load(infile)
with open('plugins/brian_corpus/phrases.json', 'r') as infile:
phrases = json.load(infile)
def generate_phrase(phrases, cache):
seed_phrase = []
while len(seed_phrase) < 3:
seed_phrase = random.choice(phrases).split()
w1, w2 = seed_phrase[:2]
chosen = [w1, w2]
while "{}|{}".format(w1, w2) in cache:
choice = random.choice(cache["{}|{}".format(w1, w2)])
w1, w2 = w2, choice
chosen.append(choice)
return ' '.join(chosen)
def on_message(bot, channel, user, message):
return '> {} ~brian'.format(generate_phrase(phrases, cache))
Use bigrams in Markov chain generator
|
"""Displays a randomly generated witticism from Brian Chu himself."""
import json
import random
__match__ = r"!brian"
attribution = [
"salad master",
"esquire",
"the one and only",
"startup enthusiast",
"boba king",
"not-dictator",
"normal citizen",
"ping-pong expert"
]
with open('plugins/brian_corpus/phrases.json', 'r') as infile:
phrases = json.load(infile)
with open('plugins/brian_corpus/cache.json', 'r') as infile:
cache = json.load(infile)
def generate_phrase(phrases, cache, max_length=40):
seed_phrase = []
while len(seed_phrase) < 2:
seed_phrase = random.choice(phrases).split()
w1, = seed_phrase[:1]
chosen = [w1]
while w1 in cache and len(chosen)<max_length:
w1 = random.choice(cache[w1])
chosen.append(w1)
return ' '.join(chosen)
def on_message(bot, channel, user, message):
return '> {} ~ Brian Chu, {}'.format(generate_phrase(phrases, cache),
random.choice(attribution))
|
<commit_before>"""Displays a randomly generated witticism from Brian Chu himself."""
import json
import random
__match__ = r"!brian"
with open('plugins/brian_corpus/cache.json', 'r') as infile:
cache = json.load(infile)
with open('plugins/brian_corpus/phrases.json', 'r') as infile:
phrases = json.load(infile)
def generate_phrase(phrases, cache):
seed_phrase = []
while len(seed_phrase) < 3:
seed_phrase = random.choice(phrases).split()
w1, w2 = seed_phrase[:2]
chosen = [w1, w2]
while "{}|{}".format(w1, w2) in cache:
choice = random.choice(cache["{}|{}".format(w1, w2)])
w1, w2 = w2, choice
chosen.append(choice)
return ' '.join(chosen)
def on_message(bot, channel, user, message):
return '> {} ~brian'.format(generate_phrase(phrases, cache))
<commit_msg>Use bigrams in Markov chain generator<commit_after>
|
"""Displays a randomly generated witticism from Brian Chu himself."""
import json
import random
__match__ = r"!brian"
attribution = [
"salad master",
"esquire",
"the one and only",
"startup enthusiast",
"boba king",
"not-dictator",
"normal citizen",
"ping-pong expert"
]
with open('plugins/brian_corpus/phrases.json', 'r') as infile:
phrases = json.load(infile)
with open('plugins/brian_corpus/cache.json', 'r') as infile:
cache = json.load(infile)
def generate_phrase(phrases, cache, max_length=40):
seed_phrase = []
while len(seed_phrase) < 2:
seed_phrase = random.choice(phrases).split()
w1, = seed_phrase[:1]
chosen = [w1]
while w1 in cache and len(chosen)<max_length:
w1 = random.choice(cache[w1])
chosen.append(w1)
return ' '.join(chosen)
def on_message(bot, channel, user, message):
return '> {} ~ Brian Chu, {}'.format(generate_phrase(phrases, cache),
random.choice(attribution))
|
"""Displays a randomly generated witticism from Brian Chu himself."""
import json
import random
__match__ = r"!brian"
with open('plugins/brian_corpus/cache.json', 'r') as infile:
cache = json.load(infile)
with open('plugins/brian_corpus/phrases.json', 'r') as infile:
phrases = json.load(infile)
def generate_phrase(phrases, cache):
seed_phrase = []
while len(seed_phrase) < 3:
seed_phrase = random.choice(phrases).split()
w1, w2 = seed_phrase[:2]
chosen = [w1, w2]
while "{}|{}".format(w1, w2) in cache:
choice = random.choice(cache["{}|{}".format(w1, w2)])
w1, w2 = w2, choice
chosen.append(choice)
return ' '.join(chosen)
def on_message(bot, channel, user, message):
return '> {} ~brian'.format(generate_phrase(phrases, cache))
Use bigrams in Markov chain generator"""Displays a randomly generated witticism from Brian Chu himself."""
import json
import random
__match__ = r"!brian"
attribution = [
"salad master",
"esquire",
"the one and only",
"startup enthusiast",
"boba king",
"not-dictator",
"normal citizen",
"ping-pong expert"
]
with open('plugins/brian_corpus/phrases.json', 'r') as infile:
phrases = json.load(infile)
with open('plugins/brian_corpus/cache.json', 'r') as infile:
cache = json.load(infile)
def generate_phrase(phrases, cache, max_length=40):
seed_phrase = []
while len(seed_phrase) < 2:
seed_phrase = random.choice(phrases).split()
w1, = seed_phrase[:1]
chosen = [w1]
while w1 in cache and len(chosen)<max_length:
w1 = random.choice(cache[w1])
chosen.append(w1)
return ' '.join(chosen)
def on_message(bot, channel, user, message):
return '> {} ~ Brian Chu, {}'.format(generate_phrase(phrases, cache),
random.choice(attribution))
|
<commit_before>"""Displays a randomly generated witticism from Brian Chu himself."""
import json
import random
__match__ = r"!brian"
with open('plugins/brian_corpus/cache.json', 'r') as infile:
cache = json.load(infile)
with open('plugins/brian_corpus/phrases.json', 'r') as infile:
phrases = json.load(infile)
def generate_phrase(phrases, cache):
seed_phrase = []
while len(seed_phrase) < 3:
seed_phrase = random.choice(phrases).split()
w1, w2 = seed_phrase[:2]
chosen = [w1, w2]
while "{}|{}".format(w1, w2) in cache:
choice = random.choice(cache["{}|{}".format(w1, w2)])
w1, w2 = w2, choice
chosen.append(choice)
return ' '.join(chosen)
def on_message(bot, channel, user, message):
return '> {} ~brian'.format(generate_phrase(phrases, cache))
<commit_msg>Use bigrams in Markov chain generator<commit_after>"""Displays a randomly generated witticism from Brian Chu himself."""
import json
import random
__match__ = r"!brian"
attribution = [
"salad master",
"esquire",
"the one and only",
"startup enthusiast",
"boba king",
"not-dictator",
"normal citizen",
"ping-pong expert"
]
with open('plugins/brian_corpus/phrases.json', 'r') as infile:
phrases = json.load(infile)
with open('plugins/brian_corpus/cache.json', 'r') as infile:
cache = json.load(infile)
def generate_phrase(phrases, cache, max_length=40):
seed_phrase = []
while len(seed_phrase) < 2:
seed_phrase = random.choice(phrases).split()
w1, = seed_phrase[:1]
chosen = [w1]
while w1 in cache and len(chosen)<max_length:
w1 = random.choice(cache[w1])
chosen.append(w1)
return ' '.join(chosen)
def on_message(bot, channel, user, message):
return '> {} ~ Brian Chu, {}'.format(generate_phrase(phrases, cache),
random.choice(attribution))
|
b995eac557d239d33c6b1cfe274dc495a0b1f1e9
|
py2app/__init__.py
|
py2app/__init__.py
|
"""
builds Mac OS X application bundles from Python scripts
New keywords for distutils' setup function specify what to build:
app
list of scripts to convert into gui app bundles
py2app options, to be specified in the options keyword to the setup function:
optimize - string or int (0, 1, or 2)
includes - list of module names to include
packages - list of packages to include with subpackages
ignores - list of modules to ignore if they are not found
excludes - list of module names to exclude
dylib_excludes - list of dylibs and/or frameworks to exclude
resources - list of additional files and folders to include
plist - Info.plist template file, dict, or plistlib.Plist
dist_dir - directory where to build the final files
Items in the macosx list can also be
dictionaries to further customize the build process. The following
keys in the dictionary are recognized, most are optional:
script (MACOSX) - list of python scripts (required)
dest_base - directory and basename for the executable
if a directory is contained, must be the same for all targets
"""
__version__ = "0.4.4"
# This makes the py2app command work in the distutils.core.setup() case
import setuptools
|
"""
builds Mac OS X application bundles from Python scripts
New keywords for distutils' setup function specify what to build:
app
list of scripts to convert into gui app bundles
py2app options, to be specified in the options keyword to the setup function:
optimize - string or int (0, 1, or 2)
includes - list of module names to include
packages - list of packages to include with subpackages
ignores - list of modules to ignore if they are not found
excludes - list of module names to exclude
dylib_excludes - list of dylibs and/or frameworks to exclude
resources - list of additional files and folders to include
plist - Info.plist template file, dict, or plistlib.Plist
dist_dir - directory where to build the final files
Items in the macosx list can also be
dictionaries to further customize the build process. The following
keys in the dictionary are recognized, most are optional:
script (MACOSX) - list of python scripts (required)
dest_base - directory and basename for the executable
if a directory is contained, must be the same for all targets
"""
import pkg_resources
__version__ = pkg_resources.require('py2app')[0].version
# This makes the py2app command work in the distutils.core.setup() case
import setuptools
|
Set py2app.__version__ using pkg_resources, that ensures that the version stays in sync with the value in setup.py
|
Set py2app.__version__ using pkg_resources, that ensures that the version
stays in sync with the value in setup.py
|
Python
|
mit
|
hsoft/pluginbuilder
|
"""
builds Mac OS X application bundles from Python scripts
New keywords for distutils' setup function specify what to build:
app
list of scripts to convert into gui app bundles
py2app options, to be specified in the options keyword to the setup function:
optimize - string or int (0, 1, or 2)
includes - list of module names to include
packages - list of packages to include with subpackages
ignores - list of modules to ignore if they are not found
excludes - list of module names to exclude
dylib_excludes - list of dylibs and/or frameworks to exclude
resources - list of additional files and folders to include
plist - Info.plist template file, dict, or plistlib.Plist
dist_dir - directory where to build the final files
Items in the macosx list can also be
dictionaries to further customize the build process. The following
keys in the dictionary are recognized, most are optional:
script (MACOSX) - list of python scripts (required)
dest_base - directory and basename for the executable
if a directory is contained, must be the same for all targets
"""
__version__ = "0.4.4"
# This makes the py2app command work in the distutils.core.setup() case
import setuptools
Set py2app.__version__ using pkg_resources, that ensures that the version
stays in sync with the value in setup.py
|
"""
builds Mac OS X application bundles from Python scripts
New keywords for distutils' setup function specify what to build:
app
list of scripts to convert into gui app bundles
py2app options, to be specified in the options keyword to the setup function:
optimize - string or int (0, 1, or 2)
includes - list of module names to include
packages - list of packages to include with subpackages
ignores - list of modules to ignore if they are not found
excludes - list of module names to exclude
dylib_excludes - list of dylibs and/or frameworks to exclude
resources - list of additional files and folders to include
plist - Info.plist template file, dict, or plistlib.Plist
dist_dir - directory where to build the final files
Items in the macosx list can also be
dictionaries to further customize the build process. The following
keys in the dictionary are recognized, most are optional:
script (MACOSX) - list of python scripts (required)
dest_base - directory and basename for the executable
if a directory is contained, must be the same for all targets
"""
import pkg_resources
__version__ = pkg_resources.require('py2app')[0].version
# This makes the py2app command work in the distutils.core.setup() case
import setuptools
|
<commit_before>"""
builds Mac OS X application bundles from Python scripts
New keywords for distutils' setup function specify what to build:
app
list of scripts to convert into gui app bundles
py2app options, to be specified in the options keyword to the setup function:
optimize - string or int (0, 1, or 2)
includes - list of module names to include
packages - list of packages to include with subpackages
ignores - list of modules to ignore if they are not found
excludes - list of module names to exclude
dylib_excludes - list of dylibs and/or frameworks to exclude
resources - list of additional files and folders to include
plist - Info.plist template file, dict, or plistlib.Plist
dist_dir - directory where to build the final files
Items in the macosx list can also be
dictionaries to further customize the build process. The following
keys in the dictionary are recognized, most are optional:
script (MACOSX) - list of python scripts (required)
dest_base - directory and basename for the executable
if a directory is contained, must be the same for all targets
"""
__version__ = "0.4.4"
# This makes the py2app command work in the distutils.core.setup() case
import setuptools
<commit_msg>Set py2app.__version__ using pkg_resources, that ensures that the version
stays in sync with the value in setup.py<commit_after>
|
"""
builds Mac OS X application bundles from Python scripts
New keywords for distutils' setup function specify what to build:
app
list of scripts to convert into gui app bundles
py2app options, to be specified in the options keyword to the setup function:
optimize - string or int (0, 1, or 2)
includes - list of module names to include
packages - list of packages to include with subpackages
ignores - list of modules to ignore if they are not found
excludes - list of module names to exclude
dylib_excludes - list of dylibs and/or frameworks to exclude
resources - list of additional files and folders to include
plist - Info.plist template file, dict, or plistlib.Plist
dist_dir - directory where to build the final files
Items in the macosx list can also be
dictionaries to further customize the build process. The following
keys in the dictionary are recognized, most are optional:
script (MACOSX) - list of python scripts (required)
dest_base - directory and basename for the executable
if a directory is contained, must be the same for all targets
"""
import pkg_resources
__version__ = pkg_resources.require('py2app')[0].version
# This makes the py2app command work in the distutils.core.setup() case
import setuptools
|
"""
builds Mac OS X application bundles from Python scripts
New keywords for distutils' setup function specify what to build:
app
list of scripts to convert into gui app bundles
py2app options, to be specified in the options keyword to the setup function:
optimize - string or int (0, 1, or 2)
includes - list of module names to include
packages - list of packages to include with subpackages
ignores - list of modules to ignore if they are not found
excludes - list of module names to exclude
dylib_excludes - list of dylibs and/or frameworks to exclude
resources - list of additional files and folders to include
plist - Info.plist template file, dict, or plistlib.Plist
dist_dir - directory where to build the final files
Items in the macosx list can also be
dictionaries to further customize the build process. The following
keys in the dictionary are recognized, most are optional:
script (MACOSX) - list of python scripts (required)
dest_base - directory and basename for the executable
if a directory is contained, must be the same for all targets
"""
__version__ = "0.4.4"
# This makes the py2app command work in the distutils.core.setup() case
import setuptools
Set py2app.__version__ using pkg_resources, that ensures that the version
stays in sync with the value in setup.py"""
builds Mac OS X application bundles from Python scripts
New keywords for distutils' setup function specify what to build:
app
list of scripts to convert into gui app bundles
py2app options, to be specified in the options keyword to the setup function:
optimize - string or int (0, 1, or 2)
includes - list of module names to include
packages - list of packages to include with subpackages
ignores - list of modules to ignore if they are not found
excludes - list of module names to exclude
dylib_excludes - list of dylibs and/or frameworks to exclude
resources - list of additional files and folders to include
plist - Info.plist template file, dict, or plistlib.Plist
dist_dir - directory where to build the final files
Items in the macosx list can also be
dictionaries to further customize the build process. The following
keys in the dictionary are recognized, most are optional:
script (MACOSX) - list of python scripts (required)
dest_base - directory and basename for the executable
if a directory is contained, must be the same for all targets
"""
import pkg_resources
__version__ = pkg_resources.require('py2app')[0].version
# This makes the py2app command work in the distutils.core.setup() case
import setuptools
|
<commit_before>"""
builds Mac OS X application bundles from Python scripts
New keywords for distutils' setup function specify what to build:
app
list of scripts to convert into gui app bundles
py2app options, to be specified in the options keyword to the setup function:
optimize - string or int (0, 1, or 2)
includes - list of module names to include
packages - list of packages to include with subpackages
ignores - list of modules to ignore if they are not found
excludes - list of module names to exclude
dylib_excludes - list of dylibs and/or frameworks to exclude
resources - list of additional files and folders to include
plist - Info.plist template file, dict, or plistlib.Plist
dist_dir - directory where to build the final files
Items in the macosx list can also be
dictionaries to further customize the build process. The following
keys in the dictionary are recognized, most are optional:
script (MACOSX) - list of python scripts (required)
dest_base - directory and basename for the executable
if a directory is contained, must be the same for all targets
"""
__version__ = "0.4.4"
# This makes the py2app command work in the distutils.core.setup() case
import setuptools
<commit_msg>Set py2app.__version__ using pkg_resources, that ensures that the version
stays in sync with the value in setup.py<commit_after>"""
builds Mac OS X application bundles from Python scripts
New keywords for distutils' setup function specify what to build:
app
list of scripts to convert into gui app bundles
py2app options, to be specified in the options keyword to the setup function:
optimize - string or int (0, 1, or 2)
includes - list of module names to include
packages - list of packages to include with subpackages
ignores - list of modules to ignore if they are not found
excludes - list of module names to exclude
dylib_excludes - list of dylibs and/or frameworks to exclude
resources - list of additional files and folders to include
plist - Info.plist template file, dict, or plistlib.Plist
dist_dir - directory where to build the final files
Items in the macosx list can also be
dictionaries to further customize the build process. The following
keys in the dictionary are recognized, most are optional:
script (MACOSX) - list of python scripts (required)
dest_base - directory and basename for the executable
if a directory is contained, must be the same for all targets
"""
import pkg_resources
__version__ = pkg_resources.require('py2app')[0].version
# This makes the py2app command work in the distutils.core.setup() case
import setuptools
|
791954f7a877aab75d615b4b00e5b40a849671f4
|
sheldon/bot.py
|
sheldon/bot.py
|
# -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from sheldon.exceptions import *
from sheldon.manager import *
from sheldon.config import *
from sheldon.adapter import *
from sheldon.storage import *
class Sheldon:
"""
Main class of the bot.
Run script creating new instance of this class and run it.
"""
def __init__(self, command_line_arguments):
"""
Function for loading bot.
:param command_line_arguments: dict, arguments for start script
:return:
"""
self._load_config(command_line_arguments)
def _load_config(self, command_line_arguments):
"""
Сreate and load bot config.
:param command_line_arguments: dict, arguments for creating config:
config-prefix - prefix of environment
variables.
Default - 'SHELDON_'
:return:
"""
# Config class is imported from sheldon.config
if 'config-prefix' in command_line_arguments:
self.config = Config(prefix=command_line_arguments['config-prefix'])
else:
self.config = Config()
|
# -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from sheldon.adapter import *
from sheldon.config import *
from sheldon.exceptions import *
from sheldon.manager import *
from sheldon.storage import *
class Sheldon:
"""
Main class of the bot.
Run script creating new instance of this class and run it.
"""
def __init__(self, command_line_arguments):
"""
Function for loading bot.
:param command_line_arguments: dict, arguments for start script
:return:
"""
self._load_config(command_line_arguments)
def _load_config(self, command_line_arguments):
"""
Сreate and load bot config.
:param command_line_arguments: dict, arguments for creating config:
config-prefix - prefix of environment
variables.
Default - 'SHELDON_'
:return:
"""
# Config class is imported from sheldon.config
if 'config-prefix' in command_line_arguments:
self.config = Config(prefix=command_line_arguments['config-prefix'])
else:
self.config = Config()
|
Move imports in alphabet order
|
Move imports in alphabet order
|
Python
|
mit
|
lises/sheldon
|
# -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from sheldon.exceptions import *
from sheldon.manager import *
from sheldon.config import *
from sheldon.adapter import *
from sheldon.storage import *
class Sheldon:
"""
Main class of the bot.
Run script creating new instance of this class and run it.
"""
def __init__(self, command_line_arguments):
"""
Function for loading bot.
:param command_line_arguments: dict, arguments for start script
:return:
"""
self._load_config(command_line_arguments)
def _load_config(self, command_line_arguments):
"""
Сreate and load bot config.
:param command_line_arguments: dict, arguments for creating config:
config-prefix - prefix of environment
variables.
Default - 'SHELDON_'
:return:
"""
# Config class is imported from sheldon.config
if 'config-prefix' in command_line_arguments:
self.config = Config(prefix=command_line_arguments['config-prefix'])
else:
self.config = Config()
Move imports in alphabet order
|
# -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from sheldon.adapter import *
from sheldon.config import *
from sheldon.exceptions import *
from sheldon.manager import *
from sheldon.storage import *
class Sheldon:
"""
Main class of the bot.
Run script creating new instance of this class and run it.
"""
def __init__(self, command_line_arguments):
"""
Function for loading bot.
:param command_line_arguments: dict, arguments for start script
:return:
"""
self._load_config(command_line_arguments)
def _load_config(self, command_line_arguments):
"""
Сreate and load bot config.
:param command_line_arguments: dict, arguments for creating config:
config-prefix - prefix of environment
variables.
Default - 'SHELDON_'
:return:
"""
# Config class is imported from sheldon.config
if 'config-prefix' in command_line_arguments:
self.config = Config(prefix=command_line_arguments['config-prefix'])
else:
self.config = Config()
|
<commit_before># -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from sheldon.exceptions import *
from sheldon.manager import *
from sheldon.config import *
from sheldon.adapter import *
from sheldon.storage import *
class Sheldon:
"""
Main class of the bot.
Run script creating new instance of this class and run it.
"""
def __init__(self, command_line_arguments):
"""
Function for loading bot.
:param command_line_arguments: dict, arguments for start script
:return:
"""
self._load_config(command_line_arguments)
def _load_config(self, command_line_arguments):
"""
Сreate and load bot config.
:param command_line_arguments: dict, arguments for creating config:
config-prefix - prefix of environment
variables.
Default - 'SHELDON_'
:return:
"""
# Config class is imported from sheldon.config
if 'config-prefix' in command_line_arguments:
self.config = Config(prefix=command_line_arguments['config-prefix'])
else:
self.config = Config()
<commit_msg>Move imports in alphabet order<commit_after>
|
# -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from sheldon.adapter import *
from sheldon.config import *
from sheldon.exceptions import *
from sheldon.manager import *
from sheldon.storage import *
class Sheldon:
"""
Main class of the bot.
Run script creating new instance of this class and run it.
"""
def __init__(self, command_line_arguments):
"""
Function for loading bot.
:param command_line_arguments: dict, arguments for start script
:return:
"""
self._load_config(command_line_arguments)
def _load_config(self, command_line_arguments):
"""
Сreate and load bot config.
:param command_line_arguments: dict, arguments for creating config:
config-prefix - prefix of environment
variables.
Default - 'SHELDON_'
:return:
"""
# Config class is imported from sheldon.config
if 'config-prefix' in command_line_arguments:
self.config = Config(prefix=command_line_arguments['config-prefix'])
else:
self.config = Config()
|
# -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from sheldon.exceptions import *
from sheldon.manager import *
from sheldon.config import *
from sheldon.adapter import *
from sheldon.storage import *
class Sheldon:
"""
Main class of the bot.
Run script creating new instance of this class and run it.
"""
def __init__(self, command_line_arguments):
"""
Function for loading bot.
:param command_line_arguments: dict, arguments for start script
:return:
"""
self._load_config(command_line_arguments)
def _load_config(self, command_line_arguments):
"""
Сreate and load bot config.
:param command_line_arguments: dict, arguments for creating config:
config-prefix - prefix of environment
variables.
Default - 'SHELDON_'
:return:
"""
# Config class is imported from sheldon.config
if 'config-prefix' in command_line_arguments:
self.config = Config(prefix=command_line_arguments['config-prefix'])
else:
self.config = Config()
Move imports in alphabet order# -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from sheldon.adapter import *
from sheldon.config import *
from sheldon.exceptions import *
from sheldon.manager import *
from sheldon.storage import *
class Sheldon:
"""
Main class of the bot.
Run script creating new instance of this class and run it.
"""
def __init__(self, command_line_arguments):
"""
Function for loading bot.
:param command_line_arguments: dict, arguments for start script
:return:
"""
self._load_config(command_line_arguments)
def _load_config(self, command_line_arguments):
"""
Сreate and load bot config.
:param command_line_arguments: dict, arguments for creating config:
config-prefix - prefix of environment
variables.
Default - 'SHELDON_'
:return:
"""
# Config class is imported from sheldon.config
if 'config-prefix' in command_line_arguments:
self.config = Config(prefix=command_line_arguments['config-prefix'])
else:
self.config = Config()
|
<commit_before># -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from sheldon.exceptions import *
from sheldon.manager import *
from sheldon.config import *
from sheldon.adapter import *
from sheldon.storage import *
class Sheldon:
"""
Main class of the bot.
Run script creating new instance of this class and run it.
"""
def __init__(self, command_line_arguments):
"""
Function for loading bot.
:param command_line_arguments: dict, arguments for start script
:return:
"""
self._load_config(command_line_arguments)
def _load_config(self, command_line_arguments):
"""
Сreate and load bot config.
:param command_line_arguments: dict, arguments for creating config:
config-prefix - prefix of environment
variables.
Default - 'SHELDON_'
:return:
"""
# Config class is imported from sheldon.config
if 'config-prefix' in command_line_arguments:
self.config = Config(prefix=command_line_arguments['config-prefix'])
else:
self.config = Config()
<commit_msg>Move imports in alphabet order<commit_after># -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from sheldon.adapter import *
from sheldon.config import *
from sheldon.exceptions import *
from sheldon.manager import *
from sheldon.storage import *
class Sheldon:
"""
Main class of the bot.
Run script creating new instance of this class and run it.
"""
def __init__(self, command_line_arguments):
"""
Function for loading bot.
:param command_line_arguments: dict, arguments for start script
:return:
"""
self._load_config(command_line_arguments)
def _load_config(self, command_line_arguments):
"""
Сreate and load bot config.
:param command_line_arguments: dict, arguments for creating config:
config-prefix - prefix of environment
variables.
Default - 'SHELDON_'
:return:
"""
# Config class is imported from sheldon.config
if 'config-prefix' in command_line_arguments:
self.config = Config(prefix=command_line_arguments['config-prefix'])
else:
self.config = Config()
|
28dcc0ae5b960006a6cc4f1131cb51562192b969
|
pyquil/__init__.py
|
pyquil/__init__.py
|
__version__ = "2.0.0b7"
from pyquil.quil import Program
from pyquil.api import list_quantum_computers, get_qc
|
__version__ = "2.0.0b8.dev0"
from pyquil.quil import Program
from pyquil.api import list_quantum_computers, get_qc
|
Bump pyquil version to v2.0.0b8.dev0
|
Bump pyquil version to v2.0.0b8.dev0
|
Python
|
apache-2.0
|
rigetticomputing/pyquil
|
__version__ = "2.0.0b7"
from pyquil.quil import Program
from pyquil.api import list_quantum_computers, get_qc
Bump pyquil version to v2.0.0b8.dev0
|
__version__ = "2.0.0b8.dev0"
from pyquil.quil import Program
from pyquil.api import list_quantum_computers, get_qc
|
<commit_before>__version__ = "2.0.0b7"
from pyquil.quil import Program
from pyquil.api import list_quantum_computers, get_qc
<commit_msg>Bump pyquil version to v2.0.0b8.dev0<commit_after>
|
__version__ = "2.0.0b8.dev0"
from pyquil.quil import Program
from pyquil.api import list_quantum_computers, get_qc
|
__version__ = "2.0.0b7"
from pyquil.quil import Program
from pyquil.api import list_quantum_computers, get_qc
Bump pyquil version to v2.0.0b8.dev0__version__ = "2.0.0b8.dev0"
from pyquil.quil import Program
from pyquil.api import list_quantum_computers, get_qc
|
<commit_before>__version__ = "2.0.0b7"
from pyquil.quil import Program
from pyquil.api import list_quantum_computers, get_qc
<commit_msg>Bump pyquil version to v2.0.0b8.dev0<commit_after>__version__ = "2.0.0b8.dev0"
from pyquil.quil import Program
from pyquil.api import list_quantum_computers, get_qc
|
b3c01b61fd510aacd13d89c6ca1097746dfd99d5
|
pytips/__init__.py
|
pytips/__init__.py
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""Primary setup for PyTips."""
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
import os
from flask import Flask
from flask_heroku import Heroku
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('pytips.default_settings')
heroku = Heroku(app)
# Flask-Heroku is looking at an env var that I don't have, so overwrite
# it with one that I found by dumping os.environ in a log statement.
app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get(
'HEROKU_POSTGRESQL_CRIMSON_URL',
app.config['SQLALCHEMY_DATABASE_URI'])
db = SQLAlchemy(app)
# I'm about to import a module that I won't use explicitly; when it loads, the
# model definitions created, so you *must* leave the import in place. Also, it
# relies on `db` being already configured, so don't import it before everything
# is all set up.
from pytips import models
# I'm about to import a module that I won't use explicitly; when it loads, the
# routes for the app will be defined, so you *must* leave the import in place.
# Also, it relies on `app` being already configured, so don't import it before
# everything is all set up.
from pytips import views
if __name__ == '__main__':
app.run()
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""Primary setup for PyTips."""
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
from flask import Flask
from flask_heroku import Heroku
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('pytips.default_settings')
heroku = Heroku(app)
db = SQLAlchemy(app)
# I'm about to import a module that I won't use explicitly; when it loads, the
# model definitions created, so you *must* leave the import in place. Also, it
# relies on `db` being already configured, so don't import it before everything
# is all set up.
from pytips import models
# I'm about to import a module that I won't use explicitly; when it loads, the
# routes for the app will be defined, so you *must* leave the import in place.
# Also, it relies on `app` being already configured, so don't import it before
# everything is all set up.
from pytips import views
if __name__ == '__main__':
app.run()
|
Undo my 'work-around' for flask-heroku.
|
Undo my 'work-around' for flask-heroku.
|
Python
|
isc
|
gthank/pytips,gthank/pytips,gthank/pytips,gthank/pytips
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""Primary setup for PyTips."""
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
import os
from flask import Flask
from flask_heroku import Heroku
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('pytips.default_settings')
heroku = Heroku(app)
# Flask-Heroku is looking at an env var that I don't have, so overwrite
# it with one that I found by dumping os.environ in a log statement.
app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get(
'HEROKU_POSTGRESQL_CRIMSON_URL',
app.config['SQLALCHEMY_DATABASE_URI'])
db = SQLAlchemy(app)
# I'm about to import a module that I won't use explicitly; when it loads, the
# model definitions created, so you *must* leave the import in place. Also, it
# relies on `db` being already configured, so don't import it before everything
# is all set up.
from pytips import models
# I'm about to import a module that I won't use explicitly; when it loads, the
# routes for the app will be defined, so you *must* leave the import in place.
# Also, it relies on `app` being already configured, so don't import it before
# everything is all set up.
from pytips import views
if __name__ == '__main__':
app.run()
Undo my 'work-around' for flask-heroku.
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""Primary setup for PyTips."""
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
from flask import Flask
from flask_heroku import Heroku
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('pytips.default_settings')
heroku = Heroku(app)
db = SQLAlchemy(app)
# I'm about to import a module that I won't use explicitly; when it loads, the
# model definitions created, so you *must* leave the import in place. Also, it
# relies on `db` being already configured, so don't import it before everything
# is all set up.
from pytips import models
# I'm about to import a module that I won't use explicitly; when it loads, the
# routes for the app will be defined, so you *must* leave the import in place.
# Also, it relies on `app` being already configured, so don't import it before
# everything is all set up.
from pytips import views
if __name__ == '__main__':
app.run()
|
<commit_before>#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""Primary setup for PyTips."""
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
import os
from flask import Flask
from flask_heroku import Heroku
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('pytips.default_settings')
heroku = Heroku(app)
# Flask-Heroku is looking at an env var that I don't have, so overwrite
# it with one that I found by dumping os.environ in a log statement.
app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get(
'HEROKU_POSTGRESQL_CRIMSON_URL',
app.config['SQLALCHEMY_DATABASE_URI'])
db = SQLAlchemy(app)
# I'm about to import a module that I won't use explicitly; when it loads, the
# model definitions created, so you *must* leave the import in place. Also, it
# relies on `db` being already configured, so don't import it before everything
# is all set up.
from pytips import models
# I'm about to import a module that I won't use explicitly; when it loads, the
# routes for the app will be defined, so you *must* leave the import in place.
# Also, it relies on `app` being already configured, so don't import it before
# everything is all set up.
from pytips import views
if __name__ == '__main__':
app.run()
<commit_msg>Undo my 'work-around' for flask-heroku.<commit_after>
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""Primary setup for PyTips."""
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
from flask import Flask
from flask_heroku import Heroku
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('pytips.default_settings')
heroku = Heroku(app)
db = SQLAlchemy(app)
# I'm about to import a module that I won't use explicitly; when it loads, the
# model definitions created, so you *must* leave the import in place. Also, it
# relies on `db` being already configured, so don't import it before everything
# is all set up.
from pytips import models
# I'm about to import a module that I won't use explicitly; when it loads, the
# routes for the app will be defined, so you *must* leave the import in place.
# Also, it relies on `app` being already configured, so don't import it before
# everything is all set up.
from pytips import views
if __name__ == '__main__':
app.run()
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""Primary setup for PyTips."""
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
import os
from flask import Flask
from flask_heroku import Heroku
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('pytips.default_settings')
heroku = Heroku(app)
# Flask-Heroku is looking at an env var that I don't have, so overwrite
# it with one that I found by dumping os.environ in a log statement.
app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get(
'HEROKU_POSTGRESQL_CRIMSON_URL',
app.config['SQLALCHEMY_DATABASE_URI'])
db = SQLAlchemy(app)
# I'm about to import a module that I won't use explicitly; when it loads, the
# model definitions created, so you *must* leave the import in place. Also, it
# relies on `db` being already configured, so don't import it before everything
# is all set up.
from pytips import models
# I'm about to import a module that I won't use explicitly; when it loads, the
# routes for the app will be defined, so you *must* leave the import in place.
# Also, it relies on `app` being already configured, so don't import it before
# everything is all set up.
from pytips import views
if __name__ == '__main__':
app.run()
Undo my 'work-around' for flask-heroku.#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""Primary setup for PyTips."""
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
from flask import Flask
from flask_heroku import Heroku
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('pytips.default_settings')
heroku = Heroku(app)
db = SQLAlchemy(app)
# I'm about to import a module that I won't use explicitly; when it loads, the
# model definitions created, so you *must* leave the import in place. Also, it
# relies on `db` being already configured, so don't import it before everything
# is all set up.
from pytips import models
# I'm about to import a module that I won't use explicitly; when it loads, the
# routes for the app will be defined, so you *must* leave the import in place.
# Also, it relies on `app` being already configured, so don't import it before
# everything is all set up.
from pytips import views
if __name__ == '__main__':
app.run()
|
<commit_before>#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""Primary setup for PyTips."""
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
import os
from flask import Flask
from flask_heroku import Heroku
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('pytips.default_settings')
heroku = Heroku(app)
# Flask-Heroku is looking at an env var that I don't have, so overwrite
# it with one that I found by dumping os.environ in a log statement.
app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get(
'HEROKU_POSTGRESQL_CRIMSON_URL',
app.config['SQLALCHEMY_DATABASE_URI'])
db = SQLAlchemy(app)
# I'm about to import a module that I won't use explicitly; when it loads, the
# model definitions created, so you *must* leave the import in place. Also, it
# relies on `db` being already configured, so don't import it before everything
# is all set up.
from pytips import models
# I'm about to import a module that I won't use explicitly; when it loads, the
# routes for the app will be defined, so you *must* leave the import in place.
# Also, it relies on `app` being already configured, so don't import it before
# everything is all set up.
from pytips import views
if __name__ == '__main__':
app.run()
<commit_msg>Undo my 'work-around' for flask-heroku.<commit_after>#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""Primary setup for PyTips."""
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
from flask import Flask
from flask_heroku import Heroku
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('pytips.default_settings')
heroku = Heroku(app)
db = SQLAlchemy(app)
# I'm about to import a module that I won't use explicitly; when it loads, the
# model definitions created, so you *must* leave the import in place. Also, it
# relies on `db` being already configured, so don't import it before everything
# is all set up.
from pytips import models
# I'm about to import a module that I won't use explicitly; when it loads, the
# routes for the app will be defined, so you *must* leave the import in place.
# Also, it relies on `app` being already configured, so don't import it before
# everything is all set up.
from pytips import views
if __name__ == '__main__':
app.run()
|
faf42acd1f6e0bb5f049f54d17601bd65dd73017
|
bslint/bslint.py
|
bslint/bslint.py
|
"""bslint.bslint: provides entry point main()."""
import sys
import os
from bslint.interface_handler import InterfaceHandler as InterfaceHandler
__version__ = "0.6.0"
def main():
try:
interface_handler = InterfaceHandler()
interface_handler.main()
except KeyboardInterrupt:
pass
def runner(to_lex=None):
sys.argv = [sys.argv[0]]
if to_lex is not None:
sys.argv.append(os.path.abspath(to_lex))
else:
sys.argv.append(os.getcwd())
interface_handler = InterfaceHandler()
interface_handler.main()
return interface_handler
|
"""bslint.bslint: provides entry point main()."""
import sys
import os
from bslint.interface_handler import InterfaceHandler as InterfaceHandler
__version__ = "0.6.1"
def main():
try:
interface_handler = InterfaceHandler()
interface_handler.main()
except KeyboardInterrupt:
pass
def runner(to_lex=None):
sys.argv = [sys.argv[0]]
if to_lex is not None:
sys.argv.append(os.path.abspath(to_lex))
else:
sys.argv.append(os.getcwd())
interface_handler = InterfaceHandler()
interface_handler.main()
return interface_handler
|
Increment version number for release
|
Increment version number for release
|
Python
|
bsd-3-clause
|
sky-uk/bslint
|
"""bslint.bslint: provides entry point main()."""
import sys
import os
from bslint.interface_handler import InterfaceHandler as InterfaceHandler
__version__ = "0.6.0"
def main():
try:
interface_handler = InterfaceHandler()
interface_handler.main()
except KeyboardInterrupt:
pass
def runner(to_lex=None):
sys.argv = [sys.argv[0]]
if to_lex is not None:
sys.argv.append(os.path.abspath(to_lex))
else:
sys.argv.append(os.getcwd())
interface_handler = InterfaceHandler()
interface_handler.main()
return interface_handler
Increment version number for release
|
"""bslint.bslint: provides entry point main()."""
import sys
import os
from bslint.interface_handler import InterfaceHandler as InterfaceHandler
__version__ = "0.6.1"
def main():
try:
interface_handler = InterfaceHandler()
interface_handler.main()
except KeyboardInterrupt:
pass
def runner(to_lex=None):
sys.argv = [sys.argv[0]]
if to_lex is not None:
sys.argv.append(os.path.abspath(to_lex))
else:
sys.argv.append(os.getcwd())
interface_handler = InterfaceHandler()
interface_handler.main()
return interface_handler
|
<commit_before>"""bslint.bslint: provides entry point main()."""
import sys
import os
from bslint.interface_handler import InterfaceHandler as InterfaceHandler
__version__ = "0.6.0"
def main():
try:
interface_handler = InterfaceHandler()
interface_handler.main()
except KeyboardInterrupt:
pass
def runner(to_lex=None):
sys.argv = [sys.argv[0]]
if to_lex is not None:
sys.argv.append(os.path.abspath(to_lex))
else:
sys.argv.append(os.getcwd())
interface_handler = InterfaceHandler()
interface_handler.main()
return interface_handler
<commit_msg>Increment version number for release<commit_after>
|
"""bslint.bslint: provides entry point main()."""
import sys
import os
from bslint.interface_handler import InterfaceHandler as InterfaceHandler
__version__ = "0.6.1"
def main():
try:
interface_handler = InterfaceHandler()
interface_handler.main()
except KeyboardInterrupt:
pass
def runner(to_lex=None):
sys.argv = [sys.argv[0]]
if to_lex is not None:
sys.argv.append(os.path.abspath(to_lex))
else:
sys.argv.append(os.getcwd())
interface_handler = InterfaceHandler()
interface_handler.main()
return interface_handler
|
"""bslint.bslint: provides entry point main()."""
import sys
import os
from bslint.interface_handler import InterfaceHandler as InterfaceHandler
__version__ = "0.6.0"
def main():
try:
interface_handler = InterfaceHandler()
interface_handler.main()
except KeyboardInterrupt:
pass
def runner(to_lex=None):
sys.argv = [sys.argv[0]]
if to_lex is not None:
sys.argv.append(os.path.abspath(to_lex))
else:
sys.argv.append(os.getcwd())
interface_handler = InterfaceHandler()
interface_handler.main()
return interface_handler
Increment version number for release"""bslint.bslint: provides entry point main()."""
import sys
import os
from bslint.interface_handler import InterfaceHandler as InterfaceHandler
__version__ = "0.6.1"
def main():
try:
interface_handler = InterfaceHandler()
interface_handler.main()
except KeyboardInterrupt:
pass
def runner(to_lex=None):
sys.argv = [sys.argv[0]]
if to_lex is not None:
sys.argv.append(os.path.abspath(to_lex))
else:
sys.argv.append(os.getcwd())
interface_handler = InterfaceHandler()
interface_handler.main()
return interface_handler
|
<commit_before>"""bslint.bslint: provides entry point main()."""
import sys
import os
from bslint.interface_handler import InterfaceHandler as InterfaceHandler
__version__ = "0.6.0"
def main():
try:
interface_handler = InterfaceHandler()
interface_handler.main()
except KeyboardInterrupt:
pass
def runner(to_lex=None):
sys.argv = [sys.argv[0]]
if to_lex is not None:
sys.argv.append(os.path.abspath(to_lex))
else:
sys.argv.append(os.getcwd())
interface_handler = InterfaceHandler()
interface_handler.main()
return interface_handler
<commit_msg>Increment version number for release<commit_after>"""bslint.bslint: provides entry point main()."""
import sys
import os
from bslint.interface_handler import InterfaceHandler as InterfaceHandler
__version__ = "0.6.1"
def main():
try:
interface_handler = InterfaceHandler()
interface_handler.main()
except KeyboardInterrupt:
pass
def runner(to_lex=None):
sys.argv = [sys.argv[0]]
if to_lex is not None:
sys.argv.append(os.path.abspath(to_lex))
else:
sys.argv.append(os.getcwd())
interface_handler = InterfaceHandler()
interface_handler.main()
return interface_handler
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.