commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
887adb2d388a3cffbc30ec0243a6b1d3797bfeb7
|
dashboard/src/repositories.py
|
dashboard/src/repositories.py
|
"""List of repositories to check."""
repositories = [
"fabric8-analytics-common",
"fabric8-analytics-server",
"fabric8-analytics-worker",
"fabric8-analytics-jobs",
"fabric8-analytics-tagger",
"fabric8-analytics-stack-analysis",
"fabric8-analytics-license-analysis",
"fabric8-analytics-data-model",
"fabric8-analytics-recommender",
"fabric8-analytics-api-gateway",
"fabric8-analytics-nvd-toolkit",
"fabric8-gemini-server",
"f8a-server-backbone"
]
|
"""List of repositories to check."""
repositories = [
"fabric8-analytics-common",
"fabric8-analytics-server",
"fabric8-analytics-worker",
"fabric8-analytics-jobs",
"fabric8-analytics-tagger",
"fabric8-analytics-stack-analysis",
"fabric8-analytics-license-analysis",
"fabric8-analytics-data-model",
"fabric8-analytics-recommender",
"fabric8-analytics-api-gateway",
"fabric8-analytics-nvd-toolkit",
"fabric8-analytics-auth",
"fabric8-gemini-server",
"f8a-server-backbone"
]
|
Add new repo into repolist
|
Add new repo into repolist
|
Python
|
apache-2.0
|
tisnik/fabric8-analytics-common,tisnik/fabric8-analytics-common,tisnik/fabric8-analytics-common
|
"""List of repositories to check."""
repositories = [
"fabric8-analytics-common",
"fabric8-analytics-server",
"fabric8-analytics-worker",
"fabric8-analytics-jobs",
"fabric8-analytics-tagger",
"fabric8-analytics-stack-analysis",
"fabric8-analytics-license-analysis",
"fabric8-analytics-data-model",
"fabric8-analytics-recommender",
"fabric8-analytics-api-gateway",
"fabric8-analytics-nvd-toolkit",
"fabric8-gemini-server",
"f8a-server-backbone"
]
Add new repo into repolist
|
"""List of repositories to check."""
repositories = [
"fabric8-analytics-common",
"fabric8-analytics-server",
"fabric8-analytics-worker",
"fabric8-analytics-jobs",
"fabric8-analytics-tagger",
"fabric8-analytics-stack-analysis",
"fabric8-analytics-license-analysis",
"fabric8-analytics-data-model",
"fabric8-analytics-recommender",
"fabric8-analytics-api-gateway",
"fabric8-analytics-nvd-toolkit",
"fabric8-analytics-auth",
"fabric8-gemini-server",
"f8a-server-backbone"
]
|
<commit_before>"""List of repositories to check."""
repositories = [
"fabric8-analytics-common",
"fabric8-analytics-server",
"fabric8-analytics-worker",
"fabric8-analytics-jobs",
"fabric8-analytics-tagger",
"fabric8-analytics-stack-analysis",
"fabric8-analytics-license-analysis",
"fabric8-analytics-data-model",
"fabric8-analytics-recommender",
"fabric8-analytics-api-gateway",
"fabric8-analytics-nvd-toolkit",
"fabric8-gemini-server",
"f8a-server-backbone"
]
<commit_msg>Add new repo into repolist<commit_after>
|
"""List of repositories to check."""
repositories = [
"fabric8-analytics-common",
"fabric8-analytics-server",
"fabric8-analytics-worker",
"fabric8-analytics-jobs",
"fabric8-analytics-tagger",
"fabric8-analytics-stack-analysis",
"fabric8-analytics-license-analysis",
"fabric8-analytics-data-model",
"fabric8-analytics-recommender",
"fabric8-analytics-api-gateway",
"fabric8-analytics-nvd-toolkit",
"fabric8-analytics-auth",
"fabric8-gemini-server",
"f8a-server-backbone"
]
|
"""List of repositories to check."""
repositories = [
"fabric8-analytics-common",
"fabric8-analytics-server",
"fabric8-analytics-worker",
"fabric8-analytics-jobs",
"fabric8-analytics-tagger",
"fabric8-analytics-stack-analysis",
"fabric8-analytics-license-analysis",
"fabric8-analytics-data-model",
"fabric8-analytics-recommender",
"fabric8-analytics-api-gateway",
"fabric8-analytics-nvd-toolkit",
"fabric8-gemini-server",
"f8a-server-backbone"
]
Add new repo into repolist"""List of repositories to check."""
repositories = [
"fabric8-analytics-common",
"fabric8-analytics-server",
"fabric8-analytics-worker",
"fabric8-analytics-jobs",
"fabric8-analytics-tagger",
"fabric8-analytics-stack-analysis",
"fabric8-analytics-license-analysis",
"fabric8-analytics-data-model",
"fabric8-analytics-recommender",
"fabric8-analytics-api-gateway",
"fabric8-analytics-nvd-toolkit",
"fabric8-analytics-auth",
"fabric8-gemini-server",
"f8a-server-backbone"
]
|
<commit_before>"""List of repositories to check."""
repositories = [
"fabric8-analytics-common",
"fabric8-analytics-server",
"fabric8-analytics-worker",
"fabric8-analytics-jobs",
"fabric8-analytics-tagger",
"fabric8-analytics-stack-analysis",
"fabric8-analytics-license-analysis",
"fabric8-analytics-data-model",
"fabric8-analytics-recommender",
"fabric8-analytics-api-gateway",
"fabric8-analytics-nvd-toolkit",
"fabric8-gemini-server",
"f8a-server-backbone"
]
<commit_msg>Add new repo into repolist<commit_after>"""List of repositories to check."""
repositories = [
"fabric8-analytics-common",
"fabric8-analytics-server",
"fabric8-analytics-worker",
"fabric8-analytics-jobs",
"fabric8-analytics-tagger",
"fabric8-analytics-stack-analysis",
"fabric8-analytics-license-analysis",
"fabric8-analytics-data-model",
"fabric8-analytics-recommender",
"fabric8-analytics-api-gateway",
"fabric8-analytics-nvd-toolkit",
"fabric8-analytics-auth",
"fabric8-gemini-server",
"f8a-server-backbone"
]
|
ecce15f103b51ece25f33490af5adaa666017a86
|
booksite/urls.py
|
booksite/urls.py
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.tale_list, name='tale_list'),
url(r'^create-book/(?P<tale_id>[0-9]+)$', views.create_book, name='create_book'),
]
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.tale_list, name='tale_list'),
url(r'^create-tale/(?P<tale_id>[0-9]+)$', views.create_tale, name='create_tale'),
]
|
Rename links from *book* to *tale*
|
Rename links from *book* to *tale*
|
Python
|
apache-2.0
|
mark-graciov/bookit,mark-graciov/bookit
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.tale_list, name='tale_list'),
url(r'^create-book/(?P<tale_id>[0-9]+)$', views.create_book, name='create_book'),
]
Rename links from *book* to *tale*
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.tale_list, name='tale_list'),
url(r'^create-tale/(?P<tale_id>[0-9]+)$', views.create_tale, name='create_tale'),
]
|
<commit_before>from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.tale_list, name='tale_list'),
url(r'^create-book/(?P<tale_id>[0-9]+)$', views.create_book, name='create_book'),
]
<commit_msg>Rename links from *book* to *tale*<commit_after>
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.tale_list, name='tale_list'),
url(r'^create-tale/(?P<tale_id>[0-9]+)$', views.create_tale, name='create_tale'),
]
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.tale_list, name='tale_list'),
url(r'^create-book/(?P<tale_id>[0-9]+)$', views.create_book, name='create_book'),
]
Rename links from *book* to *tale*from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.tale_list, name='tale_list'),
url(r'^create-tale/(?P<tale_id>[0-9]+)$', views.create_tale, name='create_tale'),
]
|
<commit_before>from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.tale_list, name='tale_list'),
url(r'^create-book/(?P<tale_id>[0-9]+)$', views.create_book, name='create_book'),
]
<commit_msg>Rename links from *book* to *tale*<commit_after>from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.tale_list, name='tale_list'),
url(r'^create-tale/(?P<tale_id>[0-9]+)$', views.create_tale, name='create_tale'),
]
|
200523d20333c17117539552ac9fb51c9f677543
|
irrigator_pro/home/views.py
|
irrigator_pro/home/views.py
|
#! /usr/bin/env python2.7
from django.views.generic import TemplateView
class HomeView(TemplateView):
template_name = 'home.html'
def get(self, request, *args, **kwargs):
context = {
'some_dynamic_value': 'Now available as a web application!',
}
return self.render_to_response(context)
|
#! /usr/bin/env python2.7
from django.views.generic import TemplateView
class HomeView(TemplateView):
template_name = 'home.html'
def get(self, request, *args, **kwargs):
context = {
'some_dynamic_value': 'Coming soon as a web application!',
}
return self.render_to_response(context)
|
Change subtext on main page
|
Change subtext on main page
|
Python
|
mit
|
warnes/irrigatorpro,warnes/irrigatorpro,warnes/irrigatorpro,warnes/irrigatorpro
|
#! /usr/bin/env python2.7
from django.views.generic import TemplateView
class HomeView(TemplateView):
template_name = 'home.html'
def get(self, request, *args, **kwargs):
context = {
'some_dynamic_value': 'Now available as a web application!',
}
return self.render_to_response(context)
Change subtext on main page
|
#! /usr/bin/env python2.7
from django.views.generic import TemplateView
class HomeView(TemplateView):
template_name = 'home.html'
def get(self, request, *args, **kwargs):
context = {
'some_dynamic_value': 'Coming soon as a web application!',
}
return self.render_to_response(context)
|
<commit_before>#! /usr/bin/env python2.7
from django.views.generic import TemplateView
class HomeView(TemplateView):
template_name = 'home.html'
def get(self, request, *args, **kwargs):
context = {
'some_dynamic_value': 'Now available as a web application!',
}
return self.render_to_response(context)
<commit_msg>Change subtext on main page<commit_after>
|
#! /usr/bin/env python2.7
from django.views.generic import TemplateView
class HomeView(TemplateView):
template_name = 'home.html'
def get(self, request, *args, **kwargs):
context = {
'some_dynamic_value': 'Coming soon as a web application!',
}
return self.render_to_response(context)
|
#! /usr/bin/env python2.7
from django.views.generic import TemplateView
class HomeView(TemplateView):
template_name = 'home.html'
def get(self, request, *args, **kwargs):
context = {
'some_dynamic_value': 'Now available as a web application!',
}
return self.render_to_response(context)
Change subtext on main page#! /usr/bin/env python2.7
from django.views.generic import TemplateView
class HomeView(TemplateView):
template_name = 'home.html'
def get(self, request, *args, **kwargs):
context = {
'some_dynamic_value': 'Coming soon as a web application!',
}
return self.render_to_response(context)
|
<commit_before>#! /usr/bin/env python2.7
from django.views.generic import TemplateView
class HomeView(TemplateView):
template_name = 'home.html'
def get(self, request, *args, **kwargs):
context = {
'some_dynamic_value': 'Now available as a web application!',
}
return self.render_to_response(context)
<commit_msg>Change subtext on main page<commit_after>#! /usr/bin/env python2.7
from django.views.generic import TemplateView
class HomeView(TemplateView):
template_name = 'home.html'
def get(self, request, *args, **kwargs):
context = {
'some_dynamic_value': 'Coming soon as a web application!',
}
return self.render_to_response(context)
|
acf77c9550620dd239a10c1895b1bee1e421c5df
|
project_fish/whats_fresh/tests/test_vendor_product_join_model.py
|
project_fish/whats_fresh/tests/test_vendor_product_join_model.py
|
from django.test import TestCase
from django.conf import settings
from phonenumber_field.modelfields import PhoneNumberField
from whats_fresh.models import *
from django.contrib.gis.db import models
class VendorProductJoinTestCase(TestCase):
def setUp(self):
self.expected_fields = {
'vendor_id': models.ForeignKey,
'product_id': models.ForeignKey,
'preparation_id': models.ForeignKey,
'vendor_price': models.TextField,
'available': models.NullBooleanField,
'id': models.AutoField
}
def test_fields_exist(self):
model = models.get_model('whats_fresh', 'VendorProducts')
for field, field_type in self.expected_fields.items():
self.assertEqual(
field_type, type(model._meta.get_field_by_name(field)[0]))
def test_no_additional_fields(self):
fields = VendorProducts._meta.get_all_field_names()
self.assertTrue(sorted(fields) == sorted(self.expected_fields.keys()))
|
from django.test import TestCase
from django.conf import settings
from phonenumber_field.modelfields import PhoneNumberField
from whats_fresh.models import *
from django.contrib.gis.db import models
class VendorProductJoinTestCase(TestCase):
def setUp(self):
self.expected_fields = {
'vendor_id': models.ForeignKey,
'product_id': models.ForeignKey,
'preparation_id': models.ForeignKey,
'vendor_price': models.TextField,
'available': models.NullBooleanField,
'id': models.AutoField
}
def test_fields_exist(self):
model = models.get_model('whats_fresh', 'VendorProduct')
for field, field_type in self.expected_fields.items():
self.assertEqual(
field_type, type(model._meta.get_field_by_name(field)[0]))
def test_no_additional_fields(self):
fields = VendorProduct._meta.get_all_field_names()
self.assertTrue(sorted(fields) == sorted(self.expected_fields.keys()))
|
Change tested model name to VendorProduct
|
Change tested model name to VendorProduct
|
Python
|
apache-2.0
|
osu-cass/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api,iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api
|
from django.test import TestCase
from django.conf import settings
from phonenumber_field.modelfields import PhoneNumberField
from whats_fresh.models import *
from django.contrib.gis.db import models
class VendorProductJoinTestCase(TestCase):
def setUp(self):
self.expected_fields = {
'vendor_id': models.ForeignKey,
'product_id': models.ForeignKey,
'preparation_id': models.ForeignKey,
'vendor_price': models.TextField,
'available': models.NullBooleanField,
'id': models.AutoField
}
def test_fields_exist(self):
model = models.get_model('whats_fresh', 'VendorProducts')
for field, field_type in self.expected_fields.items():
self.assertEqual(
field_type, type(model._meta.get_field_by_name(field)[0]))
def test_no_additional_fields(self):
fields = VendorProducts._meta.get_all_field_names()
self.assertTrue(sorted(fields) == sorted(self.expected_fields.keys()))
Change tested model name to VendorProduct
|
from django.test import TestCase
from django.conf import settings
from phonenumber_field.modelfields import PhoneNumberField
from whats_fresh.models import *
from django.contrib.gis.db import models
class VendorProductJoinTestCase(TestCase):
def setUp(self):
self.expected_fields = {
'vendor_id': models.ForeignKey,
'product_id': models.ForeignKey,
'preparation_id': models.ForeignKey,
'vendor_price': models.TextField,
'available': models.NullBooleanField,
'id': models.AutoField
}
def test_fields_exist(self):
model = models.get_model('whats_fresh', 'VendorProduct')
for field, field_type in self.expected_fields.items():
self.assertEqual(
field_type, type(model._meta.get_field_by_name(field)[0]))
def test_no_additional_fields(self):
fields = VendorProduct._meta.get_all_field_names()
self.assertTrue(sorted(fields) == sorted(self.expected_fields.keys()))
|
<commit_before>from django.test import TestCase
from django.conf import settings
from phonenumber_field.modelfields import PhoneNumberField
from whats_fresh.models import *
from django.contrib.gis.db import models
class VendorProductJoinTestCase(TestCase):
def setUp(self):
self.expected_fields = {
'vendor_id': models.ForeignKey,
'product_id': models.ForeignKey,
'preparation_id': models.ForeignKey,
'vendor_price': models.TextField,
'available': models.NullBooleanField,
'id': models.AutoField
}
def test_fields_exist(self):
model = models.get_model('whats_fresh', 'VendorProducts')
for field, field_type in self.expected_fields.items():
self.assertEqual(
field_type, type(model._meta.get_field_by_name(field)[0]))
def test_no_additional_fields(self):
fields = VendorProducts._meta.get_all_field_names()
self.assertTrue(sorted(fields) == sorted(self.expected_fields.keys()))
<commit_msg>Change tested model name to VendorProduct<commit_after>
|
from django.test import TestCase
from django.conf import settings
from phonenumber_field.modelfields import PhoneNumberField
from whats_fresh.models import *
from django.contrib.gis.db import models
class VendorProductJoinTestCase(TestCase):
def setUp(self):
self.expected_fields = {
'vendor_id': models.ForeignKey,
'product_id': models.ForeignKey,
'preparation_id': models.ForeignKey,
'vendor_price': models.TextField,
'available': models.NullBooleanField,
'id': models.AutoField
}
def test_fields_exist(self):
model = models.get_model('whats_fresh', 'VendorProduct')
for field, field_type in self.expected_fields.items():
self.assertEqual(
field_type, type(model._meta.get_field_by_name(field)[0]))
def test_no_additional_fields(self):
fields = VendorProduct._meta.get_all_field_names()
self.assertTrue(sorted(fields) == sorted(self.expected_fields.keys()))
|
from django.test import TestCase
from django.conf import settings
from phonenumber_field.modelfields import PhoneNumberField
from whats_fresh.models import *
from django.contrib.gis.db import models
class VendorProductJoinTestCase(TestCase):
def setUp(self):
self.expected_fields = {
'vendor_id': models.ForeignKey,
'product_id': models.ForeignKey,
'preparation_id': models.ForeignKey,
'vendor_price': models.TextField,
'available': models.NullBooleanField,
'id': models.AutoField
}
def test_fields_exist(self):
model = models.get_model('whats_fresh', 'VendorProducts')
for field, field_type in self.expected_fields.items():
self.assertEqual(
field_type, type(model._meta.get_field_by_name(field)[0]))
def test_no_additional_fields(self):
fields = VendorProducts._meta.get_all_field_names()
self.assertTrue(sorted(fields) == sorted(self.expected_fields.keys()))
Change tested model name to VendorProductfrom django.test import TestCase
from django.conf import settings
from phonenumber_field.modelfields import PhoneNumberField
from whats_fresh.models import *
from django.contrib.gis.db import models
class VendorProductJoinTestCase(TestCase):
def setUp(self):
self.expected_fields = {
'vendor_id': models.ForeignKey,
'product_id': models.ForeignKey,
'preparation_id': models.ForeignKey,
'vendor_price': models.TextField,
'available': models.NullBooleanField,
'id': models.AutoField
}
def test_fields_exist(self):
model = models.get_model('whats_fresh', 'VendorProduct')
for field, field_type in self.expected_fields.items():
self.assertEqual(
field_type, type(model._meta.get_field_by_name(field)[0]))
def test_no_additional_fields(self):
fields = VendorProduct._meta.get_all_field_names()
self.assertTrue(sorted(fields) == sorted(self.expected_fields.keys()))
|
<commit_before>from django.test import TestCase
from django.conf import settings
from phonenumber_field.modelfields import PhoneNumberField
from whats_fresh.models import *
from django.contrib.gis.db import models
class VendorProductJoinTestCase(TestCase):
def setUp(self):
self.expected_fields = {
'vendor_id': models.ForeignKey,
'product_id': models.ForeignKey,
'preparation_id': models.ForeignKey,
'vendor_price': models.TextField,
'available': models.NullBooleanField,
'id': models.AutoField
}
def test_fields_exist(self):
model = models.get_model('whats_fresh', 'VendorProducts')
for field, field_type in self.expected_fields.items():
self.assertEqual(
field_type, type(model._meta.get_field_by_name(field)[0]))
def test_no_additional_fields(self):
fields = VendorProducts._meta.get_all_field_names()
self.assertTrue(sorted(fields) == sorted(self.expected_fields.keys()))
<commit_msg>Change tested model name to VendorProduct<commit_after>from django.test import TestCase
from django.conf import settings
from phonenumber_field.modelfields import PhoneNumberField
from whats_fresh.models import *
from django.contrib.gis.db import models
class VendorProductJoinTestCase(TestCase):
def setUp(self):
self.expected_fields = {
'vendor_id': models.ForeignKey,
'product_id': models.ForeignKey,
'preparation_id': models.ForeignKey,
'vendor_price': models.TextField,
'available': models.NullBooleanField,
'id': models.AutoField
}
def test_fields_exist(self):
model = models.get_model('whats_fresh', 'VendorProduct')
for field, field_type in self.expected_fields.items():
self.assertEqual(
field_type, type(model._meta.get_field_by_name(field)[0]))
def test_no_additional_fields(self):
fields = VendorProduct._meta.get_all_field_names()
self.assertTrue(sorted(fields) == sorted(self.expected_fields.keys()))
|
11ed05498634f6171a695bbc7456dfb5fcf0e55c
|
rbm2m/config.py
|
rbm2m/config.py
|
# -*- coding: utf-8 -*-
import os
class Config(object):
DEBUG = False
TESTING = False
REDIS_URI = 'redis://@localhost:6379/0'
BASIC_AUTH_USERNAME = os.environ['RBM2M_LOGIN']
BASIC_AUTH_PASSWORD = os.environ['RBM2_PASSWORD']
class ProductionConfig(Config):
# TODO: ?charset=utf8
SQLALCHEMY_DATABASE_URI = os.environ.get('RBM2M_DATABASE_URI')
REDIS_URI = os.environ.get('RBM2M_REDIS_URI')
class DevelopmentConfig(Config):
SQLALCHEMY_DATABASE_URI = 'mysql://rbm2m:rbm2m@127.0.0.1/rbm2m'
SQLALCHEMY_ECHO = False
DEBUG = True
class TestingConfig(Config):
TESTING = True
SQLALCHEMY_DATABASE_URI = 'mysql://rbm2m:rbm2m@localhost/rbm2m_test'
|
# -*- coding: utf-8 -*-
import os
class Config(object):
DEBUG = False
TESTING = False
REDIS_URI = 'redis://@localhost:6379/0'
BASIC_AUTH_USERNAME = os.environ['RBM2M_LOGIN']
BASIC_AUTH_PASSWORD = os.environ['RBM2M_PASSWORD']
class ProductionConfig(Config):
# TODO: ?charset=utf8
SQLALCHEMY_DATABASE_URI = os.environ.get('RBM2M_DATABASE_URI')
REDIS_URI = os.environ.get('RBM2M_REDIS_URI')
class DevelopmentConfig(Config):
SQLALCHEMY_DATABASE_URI = 'mysql://rbm2m:rbm2m@127.0.0.1/rbm2m'
SQLALCHEMY_ECHO = False
DEBUG = True
class TestingConfig(Config):
TESTING = True
SQLALCHEMY_DATABASE_URI = 'mysql://rbm2m:rbm2m@localhost/rbm2m_test'
|
Fix password env variable name
|
Fix password env variable name
|
Python
|
apache-2.0
|
notapresent/rbm2m,notapresent/rbm2m
|
# -*- coding: utf-8 -*-
import os
class Config(object):
DEBUG = False
TESTING = False
REDIS_URI = 'redis://@localhost:6379/0'
BASIC_AUTH_USERNAME = os.environ['RBM2M_LOGIN']
BASIC_AUTH_PASSWORD = os.environ['RBM2_PASSWORD']
class ProductionConfig(Config):
# TODO: ?charset=utf8
SQLALCHEMY_DATABASE_URI = os.environ.get('RBM2M_DATABASE_URI')
REDIS_URI = os.environ.get('RBM2M_REDIS_URI')
class DevelopmentConfig(Config):
SQLALCHEMY_DATABASE_URI = 'mysql://rbm2m:rbm2m@127.0.0.1/rbm2m'
SQLALCHEMY_ECHO = False
DEBUG = True
class TestingConfig(Config):
TESTING = True
SQLALCHEMY_DATABASE_URI = 'mysql://rbm2m:rbm2m@localhost/rbm2m_test'
Fix password env variable name
|
# -*- coding: utf-8 -*-
import os
class Config(object):
DEBUG = False
TESTING = False
REDIS_URI = 'redis://@localhost:6379/0'
BASIC_AUTH_USERNAME = os.environ['RBM2M_LOGIN']
BASIC_AUTH_PASSWORD = os.environ['RBM2M_PASSWORD']
class ProductionConfig(Config):
# TODO: ?charset=utf8
SQLALCHEMY_DATABASE_URI = os.environ.get('RBM2M_DATABASE_URI')
REDIS_URI = os.environ.get('RBM2M_REDIS_URI')
class DevelopmentConfig(Config):
SQLALCHEMY_DATABASE_URI = 'mysql://rbm2m:rbm2m@127.0.0.1/rbm2m'
SQLALCHEMY_ECHO = False
DEBUG = True
class TestingConfig(Config):
TESTING = True
SQLALCHEMY_DATABASE_URI = 'mysql://rbm2m:rbm2m@localhost/rbm2m_test'
|
<commit_before># -*- coding: utf-8 -*-
import os
class Config(object):
DEBUG = False
TESTING = False
REDIS_URI = 'redis://@localhost:6379/0'
BASIC_AUTH_USERNAME = os.environ['RBM2M_LOGIN']
BASIC_AUTH_PASSWORD = os.environ['RBM2_PASSWORD']
class ProductionConfig(Config):
# TODO: ?charset=utf8
SQLALCHEMY_DATABASE_URI = os.environ.get('RBM2M_DATABASE_URI')
REDIS_URI = os.environ.get('RBM2M_REDIS_URI')
class DevelopmentConfig(Config):
SQLALCHEMY_DATABASE_URI = 'mysql://rbm2m:rbm2m@127.0.0.1/rbm2m'
SQLALCHEMY_ECHO = False
DEBUG = True
class TestingConfig(Config):
TESTING = True
SQLALCHEMY_DATABASE_URI = 'mysql://rbm2m:rbm2m@localhost/rbm2m_test'
<commit_msg>Fix password env variable name<commit_after>
|
# -*- coding: utf-8 -*-
import os
class Config(object):
DEBUG = False
TESTING = False
REDIS_URI = 'redis://@localhost:6379/0'
BASIC_AUTH_USERNAME = os.environ['RBM2M_LOGIN']
BASIC_AUTH_PASSWORD = os.environ['RBM2M_PASSWORD']
class ProductionConfig(Config):
# TODO: ?charset=utf8
SQLALCHEMY_DATABASE_URI = os.environ.get('RBM2M_DATABASE_URI')
REDIS_URI = os.environ.get('RBM2M_REDIS_URI')
class DevelopmentConfig(Config):
SQLALCHEMY_DATABASE_URI = 'mysql://rbm2m:rbm2m@127.0.0.1/rbm2m'
SQLALCHEMY_ECHO = False
DEBUG = True
class TestingConfig(Config):
TESTING = True
SQLALCHEMY_DATABASE_URI = 'mysql://rbm2m:rbm2m@localhost/rbm2m_test'
|
# -*- coding: utf-8 -*-
import os
class Config(object):
DEBUG = False
TESTING = False
REDIS_URI = 'redis://@localhost:6379/0'
BASIC_AUTH_USERNAME = os.environ['RBM2M_LOGIN']
BASIC_AUTH_PASSWORD = os.environ['RBM2_PASSWORD']
class ProductionConfig(Config):
# TODO: ?charset=utf8
SQLALCHEMY_DATABASE_URI = os.environ.get('RBM2M_DATABASE_URI')
REDIS_URI = os.environ.get('RBM2M_REDIS_URI')
class DevelopmentConfig(Config):
SQLALCHEMY_DATABASE_URI = 'mysql://rbm2m:rbm2m@127.0.0.1/rbm2m'
SQLALCHEMY_ECHO = False
DEBUG = True
class TestingConfig(Config):
TESTING = True
SQLALCHEMY_DATABASE_URI = 'mysql://rbm2m:rbm2m@localhost/rbm2m_test'
Fix password env variable name# -*- coding: utf-8 -*-
import os
class Config(object):
DEBUG = False
TESTING = False
REDIS_URI = 'redis://@localhost:6379/0'
BASIC_AUTH_USERNAME = os.environ['RBM2M_LOGIN']
BASIC_AUTH_PASSWORD = os.environ['RBM2M_PASSWORD']
class ProductionConfig(Config):
# TODO: ?charset=utf8
SQLALCHEMY_DATABASE_URI = os.environ.get('RBM2M_DATABASE_URI')
REDIS_URI = os.environ.get('RBM2M_REDIS_URI')
class DevelopmentConfig(Config):
SQLALCHEMY_DATABASE_URI = 'mysql://rbm2m:rbm2m@127.0.0.1/rbm2m'
SQLALCHEMY_ECHO = False
DEBUG = True
class TestingConfig(Config):
TESTING = True
SQLALCHEMY_DATABASE_URI = 'mysql://rbm2m:rbm2m@localhost/rbm2m_test'
|
<commit_before># -*- coding: utf-8 -*-
import os
class Config(object):
DEBUG = False
TESTING = False
REDIS_URI = 'redis://@localhost:6379/0'
BASIC_AUTH_USERNAME = os.environ['RBM2M_LOGIN']
BASIC_AUTH_PASSWORD = os.environ['RBM2_PASSWORD']
class ProductionConfig(Config):
# TODO: ?charset=utf8
SQLALCHEMY_DATABASE_URI = os.environ.get('RBM2M_DATABASE_URI')
REDIS_URI = os.environ.get('RBM2M_REDIS_URI')
class DevelopmentConfig(Config):
SQLALCHEMY_DATABASE_URI = 'mysql://rbm2m:rbm2m@127.0.0.1/rbm2m'
SQLALCHEMY_ECHO = False
DEBUG = True
class TestingConfig(Config):
TESTING = True
SQLALCHEMY_DATABASE_URI = 'mysql://rbm2m:rbm2m@localhost/rbm2m_test'
<commit_msg>Fix password env variable name<commit_after># -*- coding: utf-8 -*-
import os
class Config(object):
DEBUG = False
TESTING = False
REDIS_URI = 'redis://@localhost:6379/0'
BASIC_AUTH_USERNAME = os.environ['RBM2M_LOGIN']
BASIC_AUTH_PASSWORD = os.environ['RBM2M_PASSWORD']
class ProductionConfig(Config):
# TODO: ?charset=utf8
SQLALCHEMY_DATABASE_URI = os.environ.get('RBM2M_DATABASE_URI')
REDIS_URI = os.environ.get('RBM2M_REDIS_URI')
class DevelopmentConfig(Config):
SQLALCHEMY_DATABASE_URI = 'mysql://rbm2m:rbm2m@127.0.0.1/rbm2m'
SQLALCHEMY_ECHO = False
DEBUG = True
class TestingConfig(Config):
TESTING = True
SQLALCHEMY_DATABASE_URI = 'mysql://rbm2m:rbm2m@localhost/rbm2m_test'
|
f2e4e7114c61550e5ff6cd690c7a60d71de74ad4
|
apps/urls.py
|
apps/urls.py
|
# -*- coding: utf-8 -*-
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from cronos.announcements.feeds import AnnouncementsFeed
feeds = {
'announcements': AnnouncementsFeed,
}
handler500 = 'cronos.login.views.server_error'
urlpatterns = patterns('',
(r'^$', 'cronos.accounts.views.index'),
(r'^about/', 'cronos.accounts.views.about'),
(r'^announcements/', 'cronos.announcements.views.announcements'),
(r'^dionysos/', 'cronos.dionysos.views.dionysos'),
(r'^eclass/', 'cronos.eclass.views.eclass'),
(r'^feeds/(?P<url>.*)/$', 'django.contrib.syndication.views.feed', {'feed_dict': feeds}),
(r'^library/', 'cronos.library.views.library'),
(r'^login/', 'cronos.login.views.cronos_login'),
(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/login'}),
(r'^preferences/', 'cronos.accounts.views.accounts_settings'),
(r'^refrigerators/', 'cronos.refrigerators.views.refrigerators'),
(r'^teachers/', 'cronos.teachers.views.teachers'),
)
urlpatterns += staticfiles_urlpatterns()
|
# -*- coding: utf-8 -*-
#from apps.announcements.feeds import AnnouncementsFeed
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
#feeds = {
# 'announcements': AnnouncementsFeed,
#}
handler500 = 'apps.login.views.server_error'
urlpatterns = patterns('',
(r'^$', 'apps.accounts.views.index'),
(r'^about/', 'apps.accounts.views.about'),
# (r'^announcements/', 'apps.announcements.views.announcements'),
(r'^dionysos/', 'apps.dionysos.views.dionysos'),
(r'^eclass/', 'apps.eclass.views.eclass'),
# (r'^feeds/(?P<url>.*)/$', 'django.contrib.syndication.views.feed', {'feed_dict': feeds}),
(r'^library/', 'apps.library.views.library'),
(r'^login/', 'apps.login.views.cronos_login'),
(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/login'}),
(r'^preferences/', 'apps.accounts.views.accounts_settings'),
(r'^refrigerators/', 'apps.refrigerators.views.refrigerators'),
(r'^teachers/', 'apps.teachers.views.teachers'),
)
urlpatterns += staticfiles_urlpatterns()
|
Disable feeds/announcements temporarily, it should be back alive now
|
Disable feeds/announcements temporarily, it should be back alive now
|
Python
|
agpl-3.0
|
LinuxTeam-teilar/cronos.teilar.gr,LinuxTeam-teilar/cronos.teilar.gr,LinuxTeam-teilar/cronos.teilar.gr
|
# -*- coding: utf-8 -*-
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from cronos.announcements.feeds import AnnouncementsFeed
feeds = {
'announcements': AnnouncementsFeed,
}
handler500 = 'cronos.login.views.server_error'
urlpatterns = patterns('',
(r'^$', 'cronos.accounts.views.index'),
(r'^about/', 'cronos.accounts.views.about'),
(r'^announcements/', 'cronos.announcements.views.announcements'),
(r'^dionysos/', 'cronos.dionysos.views.dionysos'),
(r'^eclass/', 'cronos.eclass.views.eclass'),
(r'^feeds/(?P<url>.*)/$', 'django.contrib.syndication.views.feed', {'feed_dict': feeds}),
(r'^library/', 'cronos.library.views.library'),
(r'^login/', 'cronos.login.views.cronos_login'),
(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/login'}),
(r'^preferences/', 'cronos.accounts.views.accounts_settings'),
(r'^refrigerators/', 'cronos.refrigerators.views.refrigerators'),
(r'^teachers/', 'cronos.teachers.views.teachers'),
)
urlpatterns += staticfiles_urlpatterns()
Disable feeds/announcements temporarily, it should be back alive now
|
# -*- coding: utf-8 -*-
#from apps.announcements.feeds import AnnouncementsFeed
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
#feeds = {
# 'announcements': AnnouncementsFeed,
#}
handler500 = 'apps.login.views.server_error'
urlpatterns = patterns('',
(r'^$', 'apps.accounts.views.index'),
(r'^about/', 'apps.accounts.views.about'),
# (r'^announcements/', 'apps.announcements.views.announcements'),
(r'^dionysos/', 'apps.dionysos.views.dionysos'),
(r'^eclass/', 'apps.eclass.views.eclass'),
# (r'^feeds/(?P<url>.*)/$', 'django.contrib.syndication.views.feed', {'feed_dict': feeds}),
(r'^library/', 'apps.library.views.library'),
(r'^login/', 'apps.login.views.cronos_login'),
(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/login'}),
(r'^preferences/', 'apps.accounts.views.accounts_settings'),
(r'^refrigerators/', 'apps.refrigerators.views.refrigerators'),
(r'^teachers/', 'apps.teachers.views.teachers'),
)
urlpatterns += staticfiles_urlpatterns()
|
<commit_before># -*- coding: utf-8 -*-
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from cronos.announcements.feeds import AnnouncementsFeed
feeds = {
'announcements': AnnouncementsFeed,
}
handler500 = 'cronos.login.views.server_error'
urlpatterns = patterns('',
(r'^$', 'cronos.accounts.views.index'),
(r'^about/', 'cronos.accounts.views.about'),
(r'^announcements/', 'cronos.announcements.views.announcements'),
(r'^dionysos/', 'cronos.dionysos.views.dionysos'),
(r'^eclass/', 'cronos.eclass.views.eclass'),
(r'^feeds/(?P<url>.*)/$', 'django.contrib.syndication.views.feed', {'feed_dict': feeds}),
(r'^library/', 'cronos.library.views.library'),
(r'^login/', 'cronos.login.views.cronos_login'),
(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/login'}),
(r'^preferences/', 'cronos.accounts.views.accounts_settings'),
(r'^refrigerators/', 'cronos.refrigerators.views.refrigerators'),
(r'^teachers/', 'cronos.teachers.views.teachers'),
)
urlpatterns += staticfiles_urlpatterns()
<commit_msg>Disable feeds/announcements temporarily, it should be back alive now<commit_after>
|
# -*- coding: utf-8 -*-
#from apps.announcements.feeds import AnnouncementsFeed
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
#feeds = {
# 'announcements': AnnouncementsFeed,
#}
handler500 = 'apps.login.views.server_error'
urlpatterns = patterns('',
(r'^$', 'apps.accounts.views.index'),
(r'^about/', 'apps.accounts.views.about'),
# (r'^announcements/', 'apps.announcements.views.announcements'),
(r'^dionysos/', 'apps.dionysos.views.dionysos'),
(r'^eclass/', 'apps.eclass.views.eclass'),
# (r'^feeds/(?P<url>.*)/$', 'django.contrib.syndication.views.feed', {'feed_dict': feeds}),
(r'^library/', 'apps.library.views.library'),
(r'^login/', 'apps.login.views.cronos_login'),
(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/login'}),
(r'^preferences/', 'apps.accounts.views.accounts_settings'),
(r'^refrigerators/', 'apps.refrigerators.views.refrigerators'),
(r'^teachers/', 'apps.teachers.views.teachers'),
)
urlpatterns += staticfiles_urlpatterns()
|
# -*- coding: utf-8 -*-
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from cronos.announcements.feeds import AnnouncementsFeed
feeds = {
'announcements': AnnouncementsFeed,
}
handler500 = 'cronos.login.views.server_error'
urlpatterns = patterns('',
(r'^$', 'cronos.accounts.views.index'),
(r'^about/', 'cronos.accounts.views.about'),
(r'^announcements/', 'cronos.announcements.views.announcements'),
(r'^dionysos/', 'cronos.dionysos.views.dionysos'),
(r'^eclass/', 'cronos.eclass.views.eclass'),
(r'^feeds/(?P<url>.*)/$', 'django.contrib.syndication.views.feed', {'feed_dict': feeds}),
(r'^library/', 'cronos.library.views.library'),
(r'^login/', 'cronos.login.views.cronos_login'),
(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/login'}),
(r'^preferences/', 'cronos.accounts.views.accounts_settings'),
(r'^refrigerators/', 'cronos.refrigerators.views.refrigerators'),
(r'^teachers/', 'cronos.teachers.views.teachers'),
)
urlpatterns += staticfiles_urlpatterns()
Disable feeds/announcements temporarily, it should be back alive now# -*- coding: utf-8 -*-
#from apps.announcements.feeds import AnnouncementsFeed
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
#feeds = {
# 'announcements': AnnouncementsFeed,
#}
handler500 = 'apps.login.views.server_error'
urlpatterns = patterns('',
(r'^$', 'apps.accounts.views.index'),
(r'^about/', 'apps.accounts.views.about'),
# (r'^announcements/', 'apps.announcements.views.announcements'),
(r'^dionysos/', 'apps.dionysos.views.dionysos'),
(r'^eclass/', 'apps.eclass.views.eclass'),
# (r'^feeds/(?P<url>.*)/$', 'django.contrib.syndication.views.feed', {'feed_dict': feeds}),
(r'^library/', 'apps.library.views.library'),
(r'^login/', 'apps.login.views.cronos_login'),
(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/login'}),
(r'^preferences/', 'apps.accounts.views.accounts_settings'),
(r'^refrigerators/', 'apps.refrigerators.views.refrigerators'),
(r'^teachers/', 'apps.teachers.views.teachers'),
)
urlpatterns += staticfiles_urlpatterns()
|
<commit_before># -*- coding: utf-8 -*-
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from cronos.announcements.feeds import AnnouncementsFeed
feeds = {
'announcements': AnnouncementsFeed,
}
handler500 = 'cronos.login.views.server_error'
urlpatterns = patterns('',
(r'^$', 'cronos.accounts.views.index'),
(r'^about/', 'cronos.accounts.views.about'),
(r'^announcements/', 'cronos.announcements.views.announcements'),
(r'^dionysos/', 'cronos.dionysos.views.dionysos'),
(r'^eclass/', 'cronos.eclass.views.eclass'),
(r'^feeds/(?P<url>.*)/$', 'django.contrib.syndication.views.feed', {'feed_dict': feeds}),
(r'^library/', 'cronos.library.views.library'),
(r'^login/', 'cronos.login.views.cronos_login'),
(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/login'}),
(r'^preferences/', 'cronos.accounts.views.accounts_settings'),
(r'^refrigerators/', 'cronos.refrigerators.views.refrigerators'),
(r'^teachers/', 'cronos.teachers.views.teachers'),
)
urlpatterns += staticfiles_urlpatterns()
<commit_msg>Disable feeds/announcements temporarily, it should be back alive now<commit_after># -*- coding: utf-8 -*-
#from apps.announcements.feeds import AnnouncementsFeed
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
#feeds = {
# 'announcements': AnnouncementsFeed,
#}
handler500 = 'apps.login.views.server_error'
urlpatterns = patterns('',
(r'^$', 'apps.accounts.views.index'),
(r'^about/', 'apps.accounts.views.about'),
# (r'^announcements/', 'apps.announcements.views.announcements'),
(r'^dionysos/', 'apps.dionysos.views.dionysos'),
(r'^eclass/', 'apps.eclass.views.eclass'),
# (r'^feeds/(?P<url>.*)/$', 'django.contrib.syndication.views.feed', {'feed_dict': feeds}),
(r'^library/', 'apps.library.views.library'),
(r'^login/', 'apps.login.views.cronos_login'),
(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/login'}),
(r'^preferences/', 'apps.accounts.views.accounts_settings'),
(r'^refrigerators/', 'apps.refrigerators.views.refrigerators'),
(r'^teachers/', 'apps.teachers.views.teachers'),
)
urlpatterns += staticfiles_urlpatterns()
|
a1200d38f4ba1b3f2d4570d9fd4d56c1e006eb83
|
tests/mpd/protocol/test_connection.py
|
tests/mpd/protocol/test_connection.py
|
from __future__ import absolute_import, unicode_literals
from mock import patch
from tests.mpd import protocol
class ConnectionHandlerTest(protocol.BaseTestCase):
def test_close_closes_the_client_connection(self):
with patch.object(self.session, 'close') as close_mock:
self.send_request('close')
close_mock.assertEqualResponsecalled_once_with()
self.assertEqualResponse('OK')
def test_empty_request(self):
self.send_request('')
self.assertEqualResponse('ACK [5@0] {} No command given')
self.send_request(' ')
self.assertEqualResponse('ACK [5@0] {} No command given')
def test_kill(self):
self.send_request('kill')
self.assertEqualResponse(
'ACK [4@0] {kill} you don\'t have permission for "kill"')
def test_ping(self):
self.send_request('ping')
self.assertEqualResponse('OK')
|
from __future__ import absolute_import, unicode_literals
from mock import patch
from tests.mpd import protocol
class ConnectionHandlerTest(protocol.BaseTestCase):
def test_close_closes_the_client_connection(self):
with patch.object(self.session, 'close') as close_mock:
self.send_request('close')
close_mock.assert_called_once_with()
self.assertEqualResponse('OK')
def test_empty_request(self):
self.send_request('')
self.assertEqualResponse('ACK [5@0] {} No command given')
self.send_request(' ')
self.assertEqualResponse('ACK [5@0] {} No command given')
def test_kill(self):
self.send_request('kill')
self.assertEqualResponse(
'ACK [4@0] {kill} you don\'t have permission for "kill"')
def test_ping(self):
self.send_request('ping')
self.assertEqualResponse('OK')
|
Fix typo in mock usage
|
tests: Fix typo in mock usage
The error was made evident by a newer mock version that no longer
swallowed the wrong assert as regular use of a spec-less mock.
|
Python
|
apache-2.0
|
jcass77/mopidy,tkem/mopidy,ali/mopidy,mokieyue/mopidy,quartz55/mopidy,hkariti/mopidy,pacificIT/mopidy,quartz55/mopidy,dbrgn/mopidy,dbrgn/mopidy,mokieyue/mopidy,jmarsik/mopidy,pacificIT/mopidy,jodal/mopidy,mopidy/mopidy,bacontext/mopidy,dbrgn/mopidy,diandiankan/mopidy,vrs01/mopidy,bacontext/mopidy,jcass77/mopidy,bencevans/mopidy,mopidy/mopidy,vrs01/mopidy,hkariti/mopidy,jmarsik/mopidy,ali/mopidy,SuperStarPL/mopidy,ali/mopidy,kingosticks/mopidy,pacificIT/mopidy,pacificIT/mopidy,diandiankan/mopidy,dbrgn/mopidy,tkem/mopidy,hkariti/mopidy,mopidy/mopidy,tkem/mopidy,kingosticks/mopidy,vrs01/mopidy,quartz55/mopidy,SuperStarPL/mopidy,jmarsik/mopidy,adamcik/mopidy,jodal/mopidy,ali/mopidy,jcass77/mopidy,bacontext/mopidy,kingosticks/mopidy,mokieyue/mopidy,vrs01/mopidy,swak/mopidy,adamcik/mopidy,swak/mopidy,ZenithDK/mopidy,bencevans/mopidy,adamcik/mopidy,mokieyue/mopidy,jmarsik/mopidy,hkariti/mopidy,swak/mopidy,swak/mopidy,bencevans/mopidy,ZenithDK/mopidy,bacontext/mopidy,quartz55/mopidy,SuperStarPL/mopidy,ZenithDK/mopidy,ZenithDK/mopidy,bencevans/mopidy,jodal/mopidy,SuperStarPL/mopidy,diandiankan/mopidy,tkem/mopidy,diandiankan/mopidy
|
from __future__ import absolute_import, unicode_literals
from mock import patch
from tests.mpd import protocol
class ConnectionHandlerTest(protocol.BaseTestCase):
def test_close_closes_the_client_connection(self):
with patch.object(self.session, 'close') as close_mock:
self.send_request('close')
close_mock.assertEqualResponsecalled_once_with()
self.assertEqualResponse('OK')
def test_empty_request(self):
self.send_request('')
self.assertEqualResponse('ACK [5@0] {} No command given')
self.send_request(' ')
self.assertEqualResponse('ACK [5@0] {} No command given')
def test_kill(self):
self.send_request('kill')
self.assertEqualResponse(
'ACK [4@0] {kill} you don\'t have permission for "kill"')
def test_ping(self):
self.send_request('ping')
self.assertEqualResponse('OK')
tests: Fix typo in mock usage
The error was made evident by a newer mock version that no longer
swallowed the wrong assert as regular use of a spec-less mock.
|
from __future__ import absolute_import, unicode_literals
from mock import patch
from tests.mpd import protocol
class ConnectionHandlerTest(protocol.BaseTestCase):
def test_close_closes_the_client_connection(self):
with patch.object(self.session, 'close') as close_mock:
self.send_request('close')
close_mock.assert_called_once_with()
self.assertEqualResponse('OK')
def test_empty_request(self):
self.send_request('')
self.assertEqualResponse('ACK [5@0] {} No command given')
self.send_request(' ')
self.assertEqualResponse('ACK [5@0] {} No command given')
def test_kill(self):
self.send_request('kill')
self.assertEqualResponse(
'ACK [4@0] {kill} you don\'t have permission for "kill"')
def test_ping(self):
self.send_request('ping')
self.assertEqualResponse('OK')
|
<commit_before>from __future__ import absolute_import, unicode_literals
from mock import patch
from tests.mpd import protocol
class ConnectionHandlerTest(protocol.BaseTestCase):
def test_close_closes_the_client_connection(self):
with patch.object(self.session, 'close') as close_mock:
self.send_request('close')
close_mock.assertEqualResponsecalled_once_with()
self.assertEqualResponse('OK')
def test_empty_request(self):
self.send_request('')
self.assertEqualResponse('ACK [5@0] {} No command given')
self.send_request(' ')
self.assertEqualResponse('ACK [5@0] {} No command given')
def test_kill(self):
self.send_request('kill')
self.assertEqualResponse(
'ACK [4@0] {kill} you don\'t have permission for "kill"')
def test_ping(self):
self.send_request('ping')
self.assertEqualResponse('OK')
<commit_msg>tests: Fix typo in mock usage
The error was made evident by a newer mock version that no longer
swallowed the wrong assert as regular use of a spec-less mock.<commit_after>
|
from __future__ import absolute_import, unicode_literals
from mock import patch
from tests.mpd import protocol
class ConnectionHandlerTest(protocol.BaseTestCase):
def test_close_closes_the_client_connection(self):
with patch.object(self.session, 'close') as close_mock:
self.send_request('close')
close_mock.assert_called_once_with()
self.assertEqualResponse('OK')
def test_empty_request(self):
self.send_request('')
self.assertEqualResponse('ACK [5@0] {} No command given')
self.send_request(' ')
self.assertEqualResponse('ACK [5@0] {} No command given')
def test_kill(self):
self.send_request('kill')
self.assertEqualResponse(
'ACK [4@0] {kill} you don\'t have permission for "kill"')
def test_ping(self):
self.send_request('ping')
self.assertEqualResponse('OK')
|
from __future__ import absolute_import, unicode_literals
from mock import patch
from tests.mpd import protocol
class ConnectionHandlerTest(protocol.BaseTestCase):
def test_close_closes_the_client_connection(self):
with patch.object(self.session, 'close') as close_mock:
self.send_request('close')
close_mock.assertEqualResponsecalled_once_with()
self.assertEqualResponse('OK')
def test_empty_request(self):
self.send_request('')
self.assertEqualResponse('ACK [5@0] {} No command given')
self.send_request(' ')
self.assertEqualResponse('ACK [5@0] {} No command given')
def test_kill(self):
self.send_request('kill')
self.assertEqualResponse(
'ACK [4@0] {kill} you don\'t have permission for "kill"')
def test_ping(self):
self.send_request('ping')
self.assertEqualResponse('OK')
tests: Fix typo in mock usage
The error was made evident by a newer mock version that no longer
swallowed the wrong assert as regular use of a spec-less mock.from __future__ import absolute_import, unicode_literals
from mock import patch
from tests.mpd import protocol
class ConnectionHandlerTest(protocol.BaseTestCase):
def test_close_closes_the_client_connection(self):
with patch.object(self.session, 'close') as close_mock:
self.send_request('close')
close_mock.assert_called_once_with()
self.assertEqualResponse('OK')
def test_empty_request(self):
self.send_request('')
self.assertEqualResponse('ACK [5@0] {} No command given')
self.send_request(' ')
self.assertEqualResponse('ACK [5@0] {} No command given')
def test_kill(self):
self.send_request('kill')
self.assertEqualResponse(
'ACK [4@0] {kill} you don\'t have permission for "kill"')
def test_ping(self):
self.send_request('ping')
self.assertEqualResponse('OK')
|
<commit_before>from __future__ import absolute_import, unicode_literals
from mock import patch
from tests.mpd import protocol
class ConnectionHandlerTest(protocol.BaseTestCase):
def test_close_closes_the_client_connection(self):
with patch.object(self.session, 'close') as close_mock:
self.send_request('close')
close_mock.assertEqualResponsecalled_once_with()
self.assertEqualResponse('OK')
def test_empty_request(self):
self.send_request('')
self.assertEqualResponse('ACK [5@0] {} No command given')
self.send_request(' ')
self.assertEqualResponse('ACK [5@0] {} No command given')
def test_kill(self):
self.send_request('kill')
self.assertEqualResponse(
'ACK [4@0] {kill} you don\'t have permission for "kill"')
def test_ping(self):
self.send_request('ping')
self.assertEqualResponse('OK')
<commit_msg>tests: Fix typo in mock usage
The error was made evident by a newer mock version that no longer
swallowed the wrong assert as regular use of a spec-less mock.<commit_after>from __future__ import absolute_import, unicode_literals
from mock import patch
from tests.mpd import protocol
class ConnectionHandlerTest(protocol.BaseTestCase):
def test_close_closes_the_client_connection(self):
with patch.object(self.session, 'close') as close_mock:
self.send_request('close')
close_mock.assert_called_once_with()
self.assertEqualResponse('OK')
def test_empty_request(self):
self.send_request('')
self.assertEqualResponse('ACK [5@0] {} No command given')
self.send_request(' ')
self.assertEqualResponse('ACK [5@0] {} No command given')
def test_kill(self):
self.send_request('kill')
self.assertEqualResponse(
'ACK [4@0] {kill} you don\'t have permission for "kill"')
def test_ping(self):
self.send_request('ping')
self.assertEqualResponse('OK')
|
c97dc5e977e57df26962a1f3e7bf0dc4b3440508
|
kaleo/views.py
|
kaleo/views.py
|
from django import http
from django.utils import simplejson as json
from django.views.decorators.http import require_http_methods
from account.models import EmailAddress
from kaleo.forms import InviteForm
from kaleo.models import JoinInvitation
@require_http_methods(["POST"])
def invite(request):
if not request.user.is_authenticated():
data = {"status": "ERROR", "message": "not authenticated"}
return http.HttpResponseBadRequest(json.dumps(data), content_type="application/json")
form = InviteForm(request.POST)
if form.is_valid():
email = form.cleaned_data["email_address"]
if EmailAddress.objects.filter(email=email, verified=True).exists():
data = {"status": "ERROR", "errors": '<ul class="errorlist"><li>%s is already on this site</li></ul>' % email}
elif JoinInvitation.objects.filter(from_user=request.user, signup_code__email=email).exists():
data = {"status": "ERROR", "errors": '<ul class="errorlist"><li>You have already invited %s</li></ul>' % email}
else:
JoinInvitation.invite(request.user, email)
data = {
"status": "OK",
"email": email,
"invitations_remaining": request.user.invitationstat.invites_remaining()
}
else:
data = {"status": "ERROR", "errors": str(form.errors["email_address"])}
return http.HttpResponse(json.dumps(data), content_type="application/json")
|
import json
from django import http
from django.views.decorators.http import require_POST
from account.models import EmailAddress
from django.contrib.auth.decorators import login_required
from kaleo.forms import InviteForm
from kaleo.models import JoinInvitation
@login_required
@require_POST
def invite(request):
form = InviteForm(request.POST)
if form.is_valid():
email = form.cleaned_data["email_address"]
if EmailAddress.objects.filter(email=email, verified=True).exists():
data = {"status": "ERROR", "errors": '<ul class="errorlist"><li>%s is already on this site</li></ul>' % email}
elif JoinInvitation.objects.filter(from_user=request.user, signup_code__email=email).exists():
data = {"status": "ERROR", "errors": '<ul class="errorlist"><li>You have already invited %s</li></ul>' % email}
else:
JoinInvitation.invite(request.user, email)
data = {
"status": "OK",
"email": email,
"invitations_remaining": request.user.invitationstat.invites_remaining()
}
else:
data = {"status": "ERROR", "errors": str(form.errors["email_address"])}
return http.HttpResponse(json.dumps(data), content_type="application/json")
|
Update view to use different imports/decorators
|
Update view to use different imports/decorators
1. We can generally expect the json module to be
available now, so no need to use what ships with
django
2. require_POST is just simpler and more direct
3. Using login_required is clearer as well instead
of using custom logic.
|
Python
|
bsd-3-clause
|
rizumu/pinax-invitations,pinax/pinax-invitations,ntucker/kaleo,jacobwegner/pinax-invitations,eldarion/kaleo,abramia/kaleo,JPWKU/kaleo
|
from django import http
from django.utils import simplejson as json
from django.views.decorators.http import require_http_methods
from account.models import EmailAddress
from kaleo.forms import InviteForm
from kaleo.models import JoinInvitation
@require_http_methods(["POST"])
def invite(request):
if not request.user.is_authenticated():
data = {"status": "ERROR", "message": "not authenticated"}
return http.HttpResponseBadRequest(json.dumps(data), content_type="application/json")
form = InviteForm(request.POST)
if form.is_valid():
email = form.cleaned_data["email_address"]
if EmailAddress.objects.filter(email=email, verified=True).exists():
data = {"status": "ERROR", "errors": '<ul class="errorlist"><li>%s is already on this site</li></ul>' % email}
elif JoinInvitation.objects.filter(from_user=request.user, signup_code__email=email).exists():
data = {"status": "ERROR", "errors": '<ul class="errorlist"><li>You have already invited %s</li></ul>' % email}
else:
JoinInvitation.invite(request.user, email)
data = {
"status": "OK",
"email": email,
"invitations_remaining": request.user.invitationstat.invites_remaining()
}
else:
data = {"status": "ERROR", "errors": str(form.errors["email_address"])}
return http.HttpResponse(json.dumps(data), content_type="application/json")
Update view to use different imports/decorators
1. We can generally expect the json module to be
available now, so no need to use what ships with
django
2. require_POST is just simpler and more direct
3. Using login_required is clearer as well instead
of using custom logic.
|
import json
from django import http
from django.views.decorators.http import require_POST
from account.models import EmailAddress
from django.contrib.auth.decorators import login_required
from kaleo.forms import InviteForm
from kaleo.models import JoinInvitation
@login_required
@require_POST
def invite(request):
form = InviteForm(request.POST)
if form.is_valid():
email = form.cleaned_data["email_address"]
if EmailAddress.objects.filter(email=email, verified=True).exists():
data = {"status": "ERROR", "errors": '<ul class="errorlist"><li>%s is already on this site</li></ul>' % email}
elif JoinInvitation.objects.filter(from_user=request.user, signup_code__email=email).exists():
data = {"status": "ERROR", "errors": '<ul class="errorlist"><li>You have already invited %s</li></ul>' % email}
else:
JoinInvitation.invite(request.user, email)
data = {
"status": "OK",
"email": email,
"invitations_remaining": request.user.invitationstat.invites_remaining()
}
else:
data = {"status": "ERROR", "errors": str(form.errors["email_address"])}
return http.HttpResponse(json.dumps(data), content_type="application/json")
|
<commit_before>from django import http
from django.utils import simplejson as json
from django.views.decorators.http import require_http_methods
from account.models import EmailAddress
from kaleo.forms import InviteForm
from kaleo.models import JoinInvitation
@require_http_methods(["POST"])
def invite(request):
if not request.user.is_authenticated():
data = {"status": "ERROR", "message": "not authenticated"}
return http.HttpResponseBadRequest(json.dumps(data), content_type="application/json")
form = InviteForm(request.POST)
if form.is_valid():
email = form.cleaned_data["email_address"]
if EmailAddress.objects.filter(email=email, verified=True).exists():
data = {"status": "ERROR", "errors": '<ul class="errorlist"><li>%s is already on this site</li></ul>' % email}
elif JoinInvitation.objects.filter(from_user=request.user, signup_code__email=email).exists():
data = {"status": "ERROR", "errors": '<ul class="errorlist"><li>You have already invited %s</li></ul>' % email}
else:
JoinInvitation.invite(request.user, email)
data = {
"status": "OK",
"email": email,
"invitations_remaining": request.user.invitationstat.invites_remaining()
}
else:
data = {"status": "ERROR", "errors": str(form.errors["email_address"])}
return http.HttpResponse(json.dumps(data), content_type="application/json")
<commit_msg>Update view to use different imports/decorators
1. We can generally expect the json module to be
available now, so no need to use what ships with
django
2. require_POST is just simpler and more direct
3. Using login_required is clearer as well instead
of using custom logic.<commit_after>
|
import json
from django import http
from django.views.decorators.http import require_POST
from account.models import EmailAddress
from django.contrib.auth.decorators import login_required
from kaleo.forms import InviteForm
from kaleo.models import JoinInvitation
@login_required
@require_POST
def invite(request):
form = InviteForm(request.POST)
if form.is_valid():
email = form.cleaned_data["email_address"]
if EmailAddress.objects.filter(email=email, verified=True).exists():
data = {"status": "ERROR", "errors": '<ul class="errorlist"><li>%s is already on this site</li></ul>' % email}
elif JoinInvitation.objects.filter(from_user=request.user, signup_code__email=email).exists():
data = {"status": "ERROR", "errors": '<ul class="errorlist"><li>You have already invited %s</li></ul>' % email}
else:
JoinInvitation.invite(request.user, email)
data = {
"status": "OK",
"email": email,
"invitations_remaining": request.user.invitationstat.invites_remaining()
}
else:
data = {"status": "ERROR", "errors": str(form.errors["email_address"])}
return http.HttpResponse(json.dumps(data), content_type="application/json")
|
from django import http
from django.utils import simplejson as json
from django.views.decorators.http import require_http_methods
from account.models import EmailAddress
from kaleo.forms import InviteForm
from kaleo.models import JoinInvitation
@require_http_methods(["POST"])
def invite(request):
if not request.user.is_authenticated():
data = {"status": "ERROR", "message": "not authenticated"}
return http.HttpResponseBadRequest(json.dumps(data), content_type="application/json")
form = InviteForm(request.POST)
if form.is_valid():
email = form.cleaned_data["email_address"]
if EmailAddress.objects.filter(email=email, verified=True).exists():
data = {"status": "ERROR", "errors": '<ul class="errorlist"><li>%s is already on this site</li></ul>' % email}
elif JoinInvitation.objects.filter(from_user=request.user, signup_code__email=email).exists():
data = {"status": "ERROR", "errors": '<ul class="errorlist"><li>You have already invited %s</li></ul>' % email}
else:
JoinInvitation.invite(request.user, email)
data = {
"status": "OK",
"email": email,
"invitations_remaining": request.user.invitationstat.invites_remaining()
}
else:
data = {"status": "ERROR", "errors": str(form.errors["email_address"])}
return http.HttpResponse(json.dumps(data), content_type="application/json")
Update view to use different imports/decorators
1. We can generally expect the json module to be
available now, so no need to use what ships with
django
2. require_POST is just simpler and more direct
3. Using login_required is clearer as well instead
of using custom logic.import json
from django import http
from django.views.decorators.http import require_POST
from account.models import EmailAddress
from django.contrib.auth.decorators import login_required
from kaleo.forms import InviteForm
from kaleo.models import JoinInvitation
@login_required
@require_POST
def invite(request):
form = InviteForm(request.POST)
if form.is_valid():
email = form.cleaned_data["email_address"]
if EmailAddress.objects.filter(email=email, verified=True).exists():
data = {"status": "ERROR", "errors": '<ul class="errorlist"><li>%s is already on this site</li></ul>' % email}
elif JoinInvitation.objects.filter(from_user=request.user, signup_code__email=email).exists():
data = {"status": "ERROR", "errors": '<ul class="errorlist"><li>You have already invited %s</li></ul>' % email}
else:
JoinInvitation.invite(request.user, email)
data = {
"status": "OK",
"email": email,
"invitations_remaining": request.user.invitationstat.invites_remaining()
}
else:
data = {"status": "ERROR", "errors": str(form.errors["email_address"])}
return http.HttpResponse(json.dumps(data), content_type="application/json")
|
<commit_before>from django import http
from django.utils import simplejson as json
from django.views.decorators.http import require_http_methods
from account.models import EmailAddress
from kaleo.forms import InviteForm
from kaleo.models import JoinInvitation
@require_http_methods(["POST"])
def invite(request):
if not request.user.is_authenticated():
data = {"status": "ERROR", "message": "not authenticated"}
return http.HttpResponseBadRequest(json.dumps(data), content_type="application/json")
form = InviteForm(request.POST)
if form.is_valid():
email = form.cleaned_data["email_address"]
if EmailAddress.objects.filter(email=email, verified=True).exists():
data = {"status": "ERROR", "errors": '<ul class="errorlist"><li>%s is already on this site</li></ul>' % email}
elif JoinInvitation.objects.filter(from_user=request.user, signup_code__email=email).exists():
data = {"status": "ERROR", "errors": '<ul class="errorlist"><li>You have already invited %s</li></ul>' % email}
else:
JoinInvitation.invite(request.user, email)
data = {
"status": "OK",
"email": email,
"invitations_remaining": request.user.invitationstat.invites_remaining()
}
else:
data = {"status": "ERROR", "errors": str(form.errors["email_address"])}
return http.HttpResponse(json.dumps(data), content_type="application/json")
<commit_msg>Update view to use different imports/decorators
1. We can generally expect the json module to be
available now, so no need to use what ships with
django
2. require_POST is just simpler and more direct
3. Using login_required is clearer as well instead
of using custom logic.<commit_after>import json
from django import http
from django.views.decorators.http import require_POST
from account.models import EmailAddress
from django.contrib.auth.decorators import login_required
from kaleo.forms import InviteForm
from kaleo.models import JoinInvitation
@login_required
@require_POST
def invite(request):
form = InviteForm(request.POST)
if form.is_valid():
email = form.cleaned_data["email_address"]
if EmailAddress.objects.filter(email=email, verified=True).exists():
data = {"status": "ERROR", "errors": '<ul class="errorlist"><li>%s is already on this site</li></ul>' % email}
elif JoinInvitation.objects.filter(from_user=request.user, signup_code__email=email).exists():
data = {"status": "ERROR", "errors": '<ul class="errorlist"><li>You have already invited %s</li></ul>' % email}
else:
JoinInvitation.invite(request.user, email)
data = {
"status": "OK",
"email": email,
"invitations_remaining": request.user.invitationstat.invites_remaining()
}
else:
data = {"status": "ERROR", "errors": str(form.errors["email_address"])}
return http.HttpResponse(json.dumps(data), content_type="application/json")
|
46ced52179fca8fe5674d1d32cfb895522d75ee6
|
tools/perf/benchmarks/polymer_load.py
|
tools/perf/benchmarks/polymer_load.py
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import polymer_load
import page_sets
from telemetry import benchmark
@benchmark.Enabled('android')
class PolymerLoadPica(benchmark.Benchmark):
"""Measures time to polymer-ready for Pica (News Reader)."""
test = polymer_load.PolymerLoadMeasurement
page_set = page_sets.PicaPageSet
@benchmark.Enabled('android')
class PolymerLoadTopeka(benchmark.Benchmark):
"""Measures time to polymer-ready for Topeka (Quiz App)."""
test = polymer_load.PolymerLoadMeasurement
page_set = page_sets.TopekaPageSet
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import polymer_load
import page_sets
from telemetry import benchmark
@benchmark.Enabled('android')
class PolymerLoadPica(benchmark.Benchmark):
"""Measures time to polymer-ready for Pica (News Reader)."""
test = polymer_load.PolymerLoadMeasurement
page_set = page_sets.PicaPageSet
# There is something weird about this test (or a test that precedes it)
# that causes it to fail in telemetry_perf_unittests when it is not run
# as the first of the benchmark_smoke_unittest test cases.
# See crbug.com/428207.
#@benchmark.Enabled('android')
@benchmark.Disabled
class PolymerLoadTopeka(benchmark.Benchmark):
"""Measures time to polymer-ready for Topeka (Quiz App)."""
test = polymer_load.PolymerLoadMeasurement
page_set = page_sets.TopekaPageSet
|
Disable the polymer.load_topeka perf benchmark
|
Disable the polymer.load_topeka perf benchmark
There's something weird about this test or one of the other
test cases in the benchmark_smoke_unittest test suite that is
causing this test to hang when it is not run first.
I'm disabling this test for now so we can move on with the typ
cutover and get parallel test execution on the other platforms.
R=tonyg@chromium.org, ykyyip@chromium.org, dtu@chromium.org
BUG=428207
Review URL: https://codereview.chromium.org/683393002
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#301946}
|
Python
|
bsd-3-clause
|
markYoungH/chromium.src,axinging/chromium-crosswalk,Chilledheart/chromium,M4sse/chromium.src,jaruba/chromium.src,markYoungH/chromium.src,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,M4sse/chromium.src,dushu1203/chromium.src,Just-D/chromium-1,Just-D/chromium-1,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,ltilve/chromium,Just-D/chromium-1,M4sse/chromium.src,krieger-od/nwjs_chromium.src,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,dushu1203/chromium.src,Fireblend/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,chuan9/chromium-crosswalk,dednal/chromium.src,krieger-od/nwjs_chromium.src,PeterWangIntel/chromium-crosswalk,Jonekee/chromium.src,dushu1203/chromium.src,mohamed--abdel-maksoud/chromium.src,TheTypoMaster/chromium-crosswalk,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,ltilve/chromium,krieger-od/nwjs_chromium.src,M4sse/chromium.src,jaruba/chromium.src,jaruba/chromium.src,dushu1203/chromium.src,Jonekee/chromium.src,Pluto-tv/chromium-crosswalk,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,axinging/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,dednal/chromium.src,ltilve/chromium,dushu1203/chromium.src,dednal/chromium.src,krieger-od/nwjs_chromium.src,chuan9/chromium-crosswalk,dednal/chromium.src,Chilledheart/chromium,axinging/chromium-crosswalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,dednal/chromium.src,Chilledheart/chromium,PeterWangIntel/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,M4sse/chromium.src,markYoungH/chromium.src,Just-D/chromium-1,Fireblend/chromium-crosswalk,markYoungH/chromium.src,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,ltilve/chromium,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,hgl888/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,dednal/chromium.src,hgl888/chromium-crosswalk,jaruba/chromium.src,Pluto-tv/chromium-crosswalk,ltilve/chromium,dednal/chromium.src,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,hgl888/chromium-crosswalk,Pluto-tv/chromium-crosswalk,markYoungH/chromium.src,PeterWangIntel/chromium-crosswalk,ltilve/chromium,Chilledheart/chromium,markYoungH/chromium.src,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Jonekee/chromium.src,Jonekee/chromium.src,ltilve/chromium,Fireblend/chromium-crosswalk,markYoungH/chromium.src,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,ltilve/chromium,markYoungH/chromium.src,jaruba/chromium.src,M4sse/chromium.src,krieger-od/nwjs_chromium.src,PeterWangIntel/chromium-crosswalk,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,jaruba/chromium.src,dednal/chromium.src,jaruba/chromium.src,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,chuan9/chromium-crosswalk,markYoungH/chromium.src,ltilve/chromium,Fireblend/chromium-crosswalk,dushu1203/chromium.src,chuan9/chromium-crosswalk,dednal/chromium.src,M4sse/chromium.src,Fireblend/chromium-crosswalk,chuan9/chromium-crosswalk,jaruba/chromium.src,Chilledheart/chromium,markYoungH/chromium.src,dushu1203/chromium.src,dushu1203/chromium.src,hgl888/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,Jonekee/chromium.src,Jonekee/chromium.src,Jonekee/chromium.src,Fireblend/chromium-crosswalk,krieger-od/nwjs_chromium.src,Chilledheart/chromium,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,M4sse/chromium.src,Just-D/chromium-1,jaruba/chromium.src,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,Chilledheart/chromium,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,jaruba/chromium.src,fujunwei/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,M4sse/chromium.src,chuan9/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,M4sse/chromium.src,markYoungH/chromium.src,PeterWangIntel/chromium-crosswalk,Just-D/chromium-1,Jonekee/chromium.src,Just-D/chromium-1,PeterWangIntel/chromium-crosswalk,Fireblend/chromium-crosswalk,Just-D/chromium-1,Fireblend/chromium-crosswalk
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import polymer_load
import page_sets
from telemetry import benchmark
@benchmark.Enabled('android')
class PolymerLoadPica(benchmark.Benchmark):
"""Measures time to polymer-ready for Pica (News Reader)."""
test = polymer_load.PolymerLoadMeasurement
page_set = page_sets.PicaPageSet
@benchmark.Enabled('android')
class PolymerLoadTopeka(benchmark.Benchmark):
"""Measures time to polymer-ready for Topeka (Quiz App)."""
test = polymer_load.PolymerLoadMeasurement
page_set = page_sets.TopekaPageSet
Disable the polymer.load_topeka perf benchmark
There's something weird about this test or one of the other
test cases in the benchmark_smoke_unittest test suite that is
causing this test to hang when it is not run first.
I'm disabling this test for now so we can move on with the typ
cutover and get parallel test execution on the other platforms.
R=tonyg@chromium.org, ykyyip@chromium.org, dtu@chromium.org
BUG=428207
Review URL: https://codereview.chromium.org/683393002
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#301946}
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import polymer_load
import page_sets
from telemetry import benchmark
@benchmark.Enabled('android')
class PolymerLoadPica(benchmark.Benchmark):
"""Measures time to polymer-ready for Pica (News Reader)."""
test = polymer_load.PolymerLoadMeasurement
page_set = page_sets.PicaPageSet
# There is something weird about this test (or a test that precedes it)
# that causes it to fail in telemetry_perf_unittests when it is not run
# as the first of the benchmark_smoke_unittest test cases.
# See crbug.com/428207.
#@benchmark.Enabled('android')
@benchmark.Disabled
class PolymerLoadTopeka(benchmark.Benchmark):
"""Measures time to polymer-ready for Topeka (Quiz App)."""
test = polymer_load.PolymerLoadMeasurement
page_set = page_sets.TopekaPageSet
|
<commit_before># Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import polymer_load
import page_sets
from telemetry import benchmark
@benchmark.Enabled('android')
class PolymerLoadPica(benchmark.Benchmark):
"""Measures time to polymer-ready for Pica (News Reader)."""
test = polymer_load.PolymerLoadMeasurement
page_set = page_sets.PicaPageSet
@benchmark.Enabled('android')
class PolymerLoadTopeka(benchmark.Benchmark):
"""Measures time to polymer-ready for Topeka (Quiz App)."""
test = polymer_load.PolymerLoadMeasurement
page_set = page_sets.TopekaPageSet
<commit_msg>Disable the polymer.load_topeka perf benchmark
There's something weird about this test or one of the other
test cases in the benchmark_smoke_unittest test suite that is
causing this test to hang when it is not run first.
I'm disabling this test for now so we can move on with the typ
cutover and get parallel test execution on the other platforms.
R=tonyg@chromium.org, ykyyip@chromium.org, dtu@chromium.org
BUG=428207
Review URL: https://codereview.chromium.org/683393002
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#301946}<commit_after>
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import polymer_load
import page_sets
from telemetry import benchmark
@benchmark.Enabled('android')
class PolymerLoadPica(benchmark.Benchmark):
"""Measures time to polymer-ready for Pica (News Reader)."""
test = polymer_load.PolymerLoadMeasurement
page_set = page_sets.PicaPageSet
# There is something weird about this test (or a test that precedes it)
# that causes it to fail in telemetry_perf_unittests when it is not run
# as the first of the benchmark_smoke_unittest test cases.
# See crbug.com/428207.
#@benchmark.Enabled('android')
@benchmark.Disabled
class PolymerLoadTopeka(benchmark.Benchmark):
"""Measures time to polymer-ready for Topeka (Quiz App)."""
test = polymer_load.PolymerLoadMeasurement
page_set = page_sets.TopekaPageSet
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import polymer_load
import page_sets
from telemetry import benchmark
@benchmark.Enabled('android')
class PolymerLoadPica(benchmark.Benchmark):
"""Measures time to polymer-ready for Pica (News Reader)."""
test = polymer_load.PolymerLoadMeasurement
page_set = page_sets.PicaPageSet
@benchmark.Enabled('android')
class PolymerLoadTopeka(benchmark.Benchmark):
"""Measures time to polymer-ready for Topeka (Quiz App)."""
test = polymer_load.PolymerLoadMeasurement
page_set = page_sets.TopekaPageSet
Disable the polymer.load_topeka perf benchmark
There's something weird about this test or one of the other
test cases in the benchmark_smoke_unittest test suite that is
causing this test to hang when it is not run first.
I'm disabling this test for now so we can move on with the typ
cutover and get parallel test execution on the other platforms.
R=tonyg@chromium.org, ykyyip@chromium.org, dtu@chromium.org
BUG=428207
Review URL: https://codereview.chromium.org/683393002
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#301946}# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import polymer_load
import page_sets
from telemetry import benchmark
@benchmark.Enabled('android')
class PolymerLoadPica(benchmark.Benchmark):
"""Measures time to polymer-ready for Pica (News Reader)."""
test = polymer_load.PolymerLoadMeasurement
page_set = page_sets.PicaPageSet
# There is something weird about this test (or a test that precedes it)
# that causes it to fail in telemetry_perf_unittests when it is not run
# as the first of the benchmark_smoke_unittest test cases.
# See crbug.com/428207.
#@benchmark.Enabled('android')
@benchmark.Disabled
class PolymerLoadTopeka(benchmark.Benchmark):
"""Measures time to polymer-ready for Topeka (Quiz App)."""
test = polymer_load.PolymerLoadMeasurement
page_set = page_sets.TopekaPageSet
|
<commit_before># Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import polymer_load
import page_sets
from telemetry import benchmark
@benchmark.Enabled('android')
class PolymerLoadPica(benchmark.Benchmark):
"""Measures time to polymer-ready for Pica (News Reader)."""
test = polymer_load.PolymerLoadMeasurement
page_set = page_sets.PicaPageSet
@benchmark.Enabled('android')
class PolymerLoadTopeka(benchmark.Benchmark):
"""Measures time to polymer-ready for Topeka (Quiz App)."""
test = polymer_load.PolymerLoadMeasurement
page_set = page_sets.TopekaPageSet
<commit_msg>Disable the polymer.load_topeka perf benchmark
There's something weird about this test or one of the other
test cases in the benchmark_smoke_unittest test suite that is
causing this test to hang when it is not run first.
I'm disabling this test for now so we can move on with the typ
cutover and get parallel test execution on the other platforms.
R=tonyg@chromium.org, ykyyip@chromium.org, dtu@chromium.org
BUG=428207
Review URL: https://codereview.chromium.org/683393002
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#301946}<commit_after># Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import polymer_load
import page_sets
from telemetry import benchmark
@benchmark.Enabled('android')
class PolymerLoadPica(benchmark.Benchmark):
"""Measures time to polymer-ready for Pica (News Reader)."""
test = polymer_load.PolymerLoadMeasurement
page_set = page_sets.PicaPageSet
# There is something weird about this test (or a test that precedes it)
# that causes it to fail in telemetry_perf_unittests when it is not run
# as the first of the benchmark_smoke_unittest test cases.
# See crbug.com/428207.
#@benchmark.Enabled('android')
@benchmark.Disabled
class PolymerLoadTopeka(benchmark.Benchmark):
"""Measures time to polymer-ready for Topeka (Quiz App)."""
test = polymer_load.PolymerLoadMeasurement
page_set = page_sets.TopekaPageSet
|
3139ae7dceb3605e70db2cbcde0d732dcb68bc2a
|
serfnode/handler/config.py
|
serfnode/handler/config.py
|
import os
import uuid
from mischief.actors.pipe import get_local_ip
import yaml
def read_serfnode_yml():
with open('/serfnode.yml') as input:
conf = yaml.load(input) or {}
return conf.get('serfnode') or {}
yml = read_serfnode_yml()
role = os.environ.get('ROLE') or yml.get('ROLE') or 'no_role'
peer = os.environ.get('PEER') or yml.get('PEER')
ip = (os.environ.get('SERF_IP') or yml.get('SERF_IP') or
get_local_ip('8.8.8.8'))
bind_port = os.environ.get('SERF_PORT') or yml.get('SERF_PORT') or 7946
node = os.environ.get('NODE_NAME') or uuid.uuid4().hex
rpc_port = os.environ.get('RPC_PORT') or 7373
service = os.environ.get('SERVICE_IP') or yml.get('SERVICE_IP')
service_port = os.environ.get('SERVICE_PORT') or yml.get('SERVICE_PORT') or 0
|
import os
import uuid
from mischief.actors.pipe import get_local_ip
import yaml
def read_serfnode_yml():
with open('/serfnode.yml') as input:
conf = yaml.load(input) or {}
return conf.get('serfnode') or {}
yml = read_serfnode_yml()
role = os.environ.get('ROLE') or yml.get('role') or 'no_role'
peer = os.environ.get('PEER') or yml.get('peer')
ip = (os.environ.get('SERF_IP') or yml.get('serf_ip') or
get_local_ip('8.8.8.8'))
bind_port = os.environ.get('SERF_PORT') or yml.get('serf_port') or 7946
node = os.environ.get('NODE_NAME') or uuid.uuid4().hex
rpc_port = os.environ.get('RPC_PORT') or 7373
service = os.environ.get('SERVICE_IP') or yml.get('service_ip')
service_port = os.environ.get('SERVICE_PORT') or yml.get('service_port') or 0
|
Make yaml fields lowercase in serfnode section
|
Make yaml fields lowercase in serfnode section
|
Python
|
mit
|
waltermoreira/serfnode,waltermoreira/serfnode,waltermoreira/serfnode
|
import os
import uuid
from mischief.actors.pipe import get_local_ip
import yaml
def read_serfnode_yml():
with open('/serfnode.yml') as input:
conf = yaml.load(input) or {}
return conf.get('serfnode') or {}
yml = read_serfnode_yml()
role = os.environ.get('ROLE') or yml.get('ROLE') or 'no_role'
peer = os.environ.get('PEER') or yml.get('PEER')
ip = (os.environ.get('SERF_IP') or yml.get('SERF_IP') or
get_local_ip('8.8.8.8'))
bind_port = os.environ.get('SERF_PORT') or yml.get('SERF_PORT') or 7946
node = os.environ.get('NODE_NAME') or uuid.uuid4().hex
rpc_port = os.environ.get('RPC_PORT') or 7373
service = os.environ.get('SERVICE_IP') or yml.get('SERVICE_IP')
service_port = os.environ.get('SERVICE_PORT') or yml.get('SERVICE_PORT') or 0
Make yaml fields lowercase in serfnode section
|
import os
import uuid
from mischief.actors.pipe import get_local_ip
import yaml
def read_serfnode_yml():
with open('/serfnode.yml') as input:
conf = yaml.load(input) or {}
return conf.get('serfnode') or {}
yml = read_serfnode_yml()
role = os.environ.get('ROLE') or yml.get('role') or 'no_role'
peer = os.environ.get('PEER') or yml.get('peer')
ip = (os.environ.get('SERF_IP') or yml.get('serf_ip') or
get_local_ip('8.8.8.8'))
bind_port = os.environ.get('SERF_PORT') or yml.get('serf_port') or 7946
node = os.environ.get('NODE_NAME') or uuid.uuid4().hex
rpc_port = os.environ.get('RPC_PORT') or 7373
service = os.environ.get('SERVICE_IP') or yml.get('service_ip')
service_port = os.environ.get('SERVICE_PORT') or yml.get('service_port') or 0
|
<commit_before>import os
import uuid
from mischief.actors.pipe import get_local_ip
import yaml
def read_serfnode_yml():
with open('/serfnode.yml') as input:
conf = yaml.load(input) or {}
return conf.get('serfnode') or {}
yml = read_serfnode_yml()
role = os.environ.get('ROLE') or yml.get('ROLE') or 'no_role'
peer = os.environ.get('PEER') or yml.get('PEER')
ip = (os.environ.get('SERF_IP') or yml.get('SERF_IP') or
get_local_ip('8.8.8.8'))
bind_port = os.environ.get('SERF_PORT') or yml.get('SERF_PORT') or 7946
node = os.environ.get('NODE_NAME') or uuid.uuid4().hex
rpc_port = os.environ.get('RPC_PORT') or 7373
service = os.environ.get('SERVICE_IP') or yml.get('SERVICE_IP')
service_port = os.environ.get('SERVICE_PORT') or yml.get('SERVICE_PORT') or 0
<commit_msg>Make yaml fields lowercase in serfnode section<commit_after>
|
import os
import uuid
from mischief.actors.pipe import get_local_ip
import yaml
def read_serfnode_yml():
with open('/serfnode.yml') as input:
conf = yaml.load(input) or {}
return conf.get('serfnode') or {}
yml = read_serfnode_yml()
role = os.environ.get('ROLE') or yml.get('role') or 'no_role'
peer = os.environ.get('PEER') or yml.get('peer')
ip = (os.environ.get('SERF_IP') or yml.get('serf_ip') or
get_local_ip('8.8.8.8'))
bind_port = os.environ.get('SERF_PORT') or yml.get('serf_port') or 7946
node = os.environ.get('NODE_NAME') or uuid.uuid4().hex
rpc_port = os.environ.get('RPC_PORT') or 7373
service = os.environ.get('SERVICE_IP') or yml.get('service_ip')
service_port = os.environ.get('SERVICE_PORT') or yml.get('service_port') or 0
|
import os
import uuid
from mischief.actors.pipe import get_local_ip
import yaml
def read_serfnode_yml():
with open('/serfnode.yml') as input:
conf = yaml.load(input) or {}
return conf.get('serfnode') or {}
yml = read_serfnode_yml()
role = os.environ.get('ROLE') or yml.get('ROLE') or 'no_role'
peer = os.environ.get('PEER') or yml.get('PEER')
ip = (os.environ.get('SERF_IP') or yml.get('SERF_IP') or
get_local_ip('8.8.8.8'))
bind_port = os.environ.get('SERF_PORT') or yml.get('SERF_PORT') or 7946
node = os.environ.get('NODE_NAME') or uuid.uuid4().hex
rpc_port = os.environ.get('RPC_PORT') or 7373
service = os.environ.get('SERVICE_IP') or yml.get('SERVICE_IP')
service_port = os.environ.get('SERVICE_PORT') or yml.get('SERVICE_PORT') or 0
Make yaml fields lowercase in serfnode sectionimport os
import uuid
from mischief.actors.pipe import get_local_ip
import yaml
def read_serfnode_yml():
with open('/serfnode.yml') as input:
conf = yaml.load(input) or {}
return conf.get('serfnode') or {}
yml = read_serfnode_yml()
role = os.environ.get('ROLE') or yml.get('role') or 'no_role'
peer = os.environ.get('PEER') or yml.get('peer')
ip = (os.environ.get('SERF_IP') or yml.get('serf_ip') or
get_local_ip('8.8.8.8'))
bind_port = os.environ.get('SERF_PORT') or yml.get('serf_port') or 7946
node = os.environ.get('NODE_NAME') or uuid.uuid4().hex
rpc_port = os.environ.get('RPC_PORT') or 7373
service = os.environ.get('SERVICE_IP') or yml.get('service_ip')
service_port = os.environ.get('SERVICE_PORT') or yml.get('service_port') or 0
|
<commit_before>import os
import uuid
from mischief.actors.pipe import get_local_ip
import yaml
def read_serfnode_yml():
with open('/serfnode.yml') as input:
conf = yaml.load(input) or {}
return conf.get('serfnode') or {}
yml = read_serfnode_yml()
role = os.environ.get('ROLE') or yml.get('ROLE') or 'no_role'
peer = os.environ.get('PEER') or yml.get('PEER')
ip = (os.environ.get('SERF_IP') or yml.get('SERF_IP') or
get_local_ip('8.8.8.8'))
bind_port = os.environ.get('SERF_PORT') or yml.get('SERF_PORT') or 7946
node = os.environ.get('NODE_NAME') or uuid.uuid4().hex
rpc_port = os.environ.get('RPC_PORT') or 7373
service = os.environ.get('SERVICE_IP') or yml.get('SERVICE_IP')
service_port = os.environ.get('SERVICE_PORT') or yml.get('SERVICE_PORT') or 0
<commit_msg>Make yaml fields lowercase in serfnode section<commit_after>import os
import uuid
from mischief.actors.pipe import get_local_ip
import yaml
def read_serfnode_yml():
with open('/serfnode.yml') as input:
conf = yaml.load(input) or {}
return conf.get('serfnode') or {}
yml = read_serfnode_yml()
role = os.environ.get('ROLE') or yml.get('role') or 'no_role'
peer = os.environ.get('PEER') or yml.get('peer')
ip = (os.environ.get('SERF_IP') or yml.get('serf_ip') or
get_local_ip('8.8.8.8'))
bind_port = os.environ.get('SERF_PORT') or yml.get('serf_port') or 7946
node = os.environ.get('NODE_NAME') or uuid.uuid4().hex
rpc_port = os.environ.get('RPC_PORT') or 7373
service = os.environ.get('SERVICE_IP') or yml.get('service_ip')
service_port = os.environ.get('SERVICE_PORT') or yml.get('service_port') or 0
|
2f863726c246982a5ce6f34219b530a7236abcd9
|
server/adventures/tests.py
|
server/adventures/tests.py
|
from django.test import TestCase
from .models import Author, Publisher, Edition, Setting, Adventure
class AuthorTests(TestCase):
def test_create_author(self):
gygax = Author.objects.create(name='Gary Gygax')
self.assertEqual(Author.objects.first(), gygax)
self.assertEqual(Author.objects.count(), 1)
class PublisherTests(TestCase):
def test_create_author(self):
wotc = Publisher.objects.create(name='Wizards of the Coast')
self.assertEqual(Publisher.objects.first(), wotc)
self.assertEqual(Publisher.objects.count(), 1)
|
from django.test import TestCase
from .models import Author, Publisher, Edition, Setting, Adventure
class AuthorTests(TestCase):
def test_create_author(self):
gygax = Author.objects.create(name='Gary Gygax')
self.assertEqual(Author.objects.first(), gygax)
self.assertEqual(Author.objects.count(), 1)
class PublisherTests(TestCase):
def test_create_author(self):
wotc = Publisher.objects.create(name='Wizards of the Coast')
self.assertEqual(Publisher.objects.first(), wotc)
self.assertEqual(Publisher.objects.count(), 1)
class EditionTests(TestCase):
def test_create_author(self):
odandd = Edition.objects.create(name='OD&D')
self.assertEqual(Edition.objects.first(), odandd)
self.assertEqual(Edition.objects.count(), 1)
|
Add Edition model creation test
|
Add Edition model creation test
|
Python
|
mit
|
petertrotman/adventurelookup,petertrotman/adventurelookup,petertrotman/adventurelookup,petertrotman/adventurelookup
|
from django.test import TestCase
from .models import Author, Publisher, Edition, Setting, Adventure
class AuthorTests(TestCase):
def test_create_author(self):
gygax = Author.objects.create(name='Gary Gygax')
self.assertEqual(Author.objects.first(), gygax)
self.assertEqual(Author.objects.count(), 1)
class PublisherTests(TestCase):
def test_create_author(self):
wotc = Publisher.objects.create(name='Wizards of the Coast')
self.assertEqual(Publisher.objects.first(), wotc)
self.assertEqual(Publisher.objects.count(), 1)
Add Edition model creation test
|
from django.test import TestCase
from .models import Author, Publisher, Edition, Setting, Adventure
class AuthorTests(TestCase):
def test_create_author(self):
gygax = Author.objects.create(name='Gary Gygax')
self.assertEqual(Author.objects.first(), gygax)
self.assertEqual(Author.objects.count(), 1)
class PublisherTests(TestCase):
def test_create_author(self):
wotc = Publisher.objects.create(name='Wizards of the Coast')
self.assertEqual(Publisher.objects.first(), wotc)
self.assertEqual(Publisher.objects.count(), 1)
class EditionTests(TestCase):
def test_create_author(self):
odandd = Edition.objects.create(name='OD&D')
self.assertEqual(Edition.objects.first(), odandd)
self.assertEqual(Edition.objects.count(), 1)
|
<commit_before>from django.test import TestCase
from .models import Author, Publisher, Edition, Setting, Adventure
class AuthorTests(TestCase):
def test_create_author(self):
gygax = Author.objects.create(name='Gary Gygax')
self.assertEqual(Author.objects.first(), gygax)
self.assertEqual(Author.objects.count(), 1)
class PublisherTests(TestCase):
def test_create_author(self):
wotc = Publisher.objects.create(name='Wizards of the Coast')
self.assertEqual(Publisher.objects.first(), wotc)
self.assertEqual(Publisher.objects.count(), 1)
<commit_msg>Add Edition model creation test<commit_after>
|
from django.test import TestCase
from .models import Author, Publisher, Edition, Setting, Adventure
class AuthorTests(TestCase):
def test_create_author(self):
gygax = Author.objects.create(name='Gary Gygax')
self.assertEqual(Author.objects.first(), gygax)
self.assertEqual(Author.objects.count(), 1)
class PublisherTests(TestCase):
def test_create_author(self):
wotc = Publisher.objects.create(name='Wizards of the Coast')
self.assertEqual(Publisher.objects.first(), wotc)
self.assertEqual(Publisher.objects.count(), 1)
class EditionTests(TestCase):
def test_create_author(self):
odandd = Edition.objects.create(name='OD&D')
self.assertEqual(Edition.objects.first(), odandd)
self.assertEqual(Edition.objects.count(), 1)
|
from django.test import TestCase
from .models import Author, Publisher, Edition, Setting, Adventure
class AuthorTests(TestCase):
def test_create_author(self):
gygax = Author.objects.create(name='Gary Gygax')
self.assertEqual(Author.objects.first(), gygax)
self.assertEqual(Author.objects.count(), 1)
class PublisherTests(TestCase):
def test_create_author(self):
wotc = Publisher.objects.create(name='Wizards of the Coast')
self.assertEqual(Publisher.objects.first(), wotc)
self.assertEqual(Publisher.objects.count(), 1)
Add Edition model creation testfrom django.test import TestCase
from .models import Author, Publisher, Edition, Setting, Adventure
class AuthorTests(TestCase):
def test_create_author(self):
gygax = Author.objects.create(name='Gary Gygax')
self.assertEqual(Author.objects.first(), gygax)
self.assertEqual(Author.objects.count(), 1)
class PublisherTests(TestCase):
def test_create_author(self):
wotc = Publisher.objects.create(name='Wizards of the Coast')
self.assertEqual(Publisher.objects.first(), wotc)
self.assertEqual(Publisher.objects.count(), 1)
class EditionTests(TestCase):
def test_create_author(self):
odandd = Edition.objects.create(name='OD&D')
self.assertEqual(Edition.objects.first(), odandd)
self.assertEqual(Edition.objects.count(), 1)
|
<commit_before>from django.test import TestCase
from .models import Author, Publisher, Edition, Setting, Adventure
class AuthorTests(TestCase):
def test_create_author(self):
gygax = Author.objects.create(name='Gary Gygax')
self.assertEqual(Author.objects.first(), gygax)
self.assertEqual(Author.objects.count(), 1)
class PublisherTests(TestCase):
def test_create_author(self):
wotc = Publisher.objects.create(name='Wizards of the Coast')
self.assertEqual(Publisher.objects.first(), wotc)
self.assertEqual(Publisher.objects.count(), 1)
<commit_msg>Add Edition model creation test<commit_after>from django.test import TestCase
from .models import Author, Publisher, Edition, Setting, Adventure
class AuthorTests(TestCase):
def test_create_author(self):
gygax = Author.objects.create(name='Gary Gygax')
self.assertEqual(Author.objects.first(), gygax)
self.assertEqual(Author.objects.count(), 1)
class PublisherTests(TestCase):
def test_create_author(self):
wotc = Publisher.objects.create(name='Wizards of the Coast')
self.assertEqual(Publisher.objects.first(), wotc)
self.assertEqual(Publisher.objects.count(), 1)
class EditionTests(TestCase):
def test_create_author(self):
odandd = Edition.objects.create(name='OD&D')
self.assertEqual(Edition.objects.first(), odandd)
self.assertEqual(Edition.objects.count(), 1)
|
5af054dd16b592e925763cac6635fed35457102c
|
clowder/utility/print_utilities.py
|
clowder/utility/print_utilities.py
|
"""Print utilities"""
import os
import emoji
from termcolor import colored
from clowder.utility.git_utilities import (
git_current_sha,
git_current_branch,
git_is_detached,
git_is_dirty
)
def print_project_status(root_directory, path, name):
"""Print repo status"""
repo_path = os.path.join(root_directory, path)
git_path = os.path.join(repo_path, '.git')
if not os.path.isdir(git_path):
return
if git_is_dirty(repo_path):
color = 'red'
symbol = '*'
else:
color = 'green'
symbol = ''
project_output = colored(symbol + name, color)
if git_is_detached(repo_path):
current_ref = git_current_sha(repo_path)
current_ref_output = colored('(HEAD @ ' + current_ref + ')', 'magenta')
else:
current_branch = git_current_branch(repo_path)
current_ref_output = colored('(' + current_branch + ')', 'magenta')
path_output = colored(path, 'cyan')
print(get_cat() + ' ' + project_output)
print(current_ref_output + ' ' + path_output)
def get_cat():
"""Return a cat emoji"""
return emoji.emojize(':cat:', use_aliases=True)
|
"""Print utilities"""
import os
import emoji
from termcolor import colored
from clowder.utility.git_utilities import (
git_current_sha,
git_current_branch,
git_is_detached,
git_is_dirty
)
def print_project_status(root_directory, path, name):
"""Print repo status"""
repo_path = os.path.join(root_directory, path)
git_path = os.path.join(repo_path, '.git')
if not os.path.isdir(git_path):
return
if git_is_dirty(repo_path):
color = 'red'
symbol = '*'
else:
color = 'green'
symbol = ''
project_output = colored(symbol + name, color)
if git_is_detached(repo_path):
current_ref = git_current_sha(repo_path)
current_ref_output = colored('(HEAD @ ' + current_ref + ')', 'magenta')
else:
current_branch = git_current_branch(repo_path)
current_ref_output = colored('(' + current_branch + ')', 'magenta')
path_output = colored(path, 'cyan')
print(get_cat() + ' ' + project_output)
print(current_ref_output + ' ' + path_output)
def get_cat():
"""Return a cat emoji"""
return emoji.emojize(':cat:', use_aliases=True)
|
Add extra space between cat emoji and text
|
Add extra space between cat emoji and text
|
Python
|
mit
|
JrGoodle/clowder,JrGoodle/clowder,JrGoodle/clowder
|
"""Print utilities"""
import os
import emoji
from termcolor import colored
from clowder.utility.git_utilities import (
git_current_sha,
git_current_branch,
git_is_detached,
git_is_dirty
)
def print_project_status(root_directory, path, name):
"""Print repo status"""
repo_path = os.path.join(root_directory, path)
git_path = os.path.join(repo_path, '.git')
if not os.path.isdir(git_path):
return
if git_is_dirty(repo_path):
color = 'red'
symbol = '*'
else:
color = 'green'
symbol = ''
project_output = colored(symbol + name, color)
if git_is_detached(repo_path):
current_ref = git_current_sha(repo_path)
current_ref_output = colored('(HEAD @ ' + current_ref + ')', 'magenta')
else:
current_branch = git_current_branch(repo_path)
current_ref_output = colored('(' + current_branch + ')', 'magenta')
path_output = colored(path, 'cyan')
print(get_cat() + ' ' + project_output)
print(current_ref_output + ' ' + path_output)
def get_cat():
"""Return a cat emoji"""
return emoji.emojize(':cat:', use_aliases=True)
Add extra space between cat emoji and text
|
"""Print utilities"""
import os
import emoji
from termcolor import colored
from clowder.utility.git_utilities import (
git_current_sha,
git_current_branch,
git_is_detached,
git_is_dirty
)
def print_project_status(root_directory, path, name):
"""Print repo status"""
repo_path = os.path.join(root_directory, path)
git_path = os.path.join(repo_path, '.git')
if not os.path.isdir(git_path):
return
if git_is_dirty(repo_path):
color = 'red'
symbol = '*'
else:
color = 'green'
symbol = ''
project_output = colored(symbol + name, color)
if git_is_detached(repo_path):
current_ref = git_current_sha(repo_path)
current_ref_output = colored('(HEAD @ ' + current_ref + ')', 'magenta')
else:
current_branch = git_current_branch(repo_path)
current_ref_output = colored('(' + current_branch + ')', 'magenta')
path_output = colored(path, 'cyan')
print(get_cat() + ' ' + project_output)
print(current_ref_output + ' ' + path_output)
def get_cat():
"""Return a cat emoji"""
return emoji.emojize(':cat:', use_aliases=True)
|
<commit_before>"""Print utilities"""
import os
import emoji
from termcolor import colored
from clowder.utility.git_utilities import (
git_current_sha,
git_current_branch,
git_is_detached,
git_is_dirty
)
def print_project_status(root_directory, path, name):
"""Print repo status"""
repo_path = os.path.join(root_directory, path)
git_path = os.path.join(repo_path, '.git')
if not os.path.isdir(git_path):
return
if git_is_dirty(repo_path):
color = 'red'
symbol = '*'
else:
color = 'green'
symbol = ''
project_output = colored(symbol + name, color)
if git_is_detached(repo_path):
current_ref = git_current_sha(repo_path)
current_ref_output = colored('(HEAD @ ' + current_ref + ')', 'magenta')
else:
current_branch = git_current_branch(repo_path)
current_ref_output = colored('(' + current_branch + ')', 'magenta')
path_output = colored(path, 'cyan')
print(get_cat() + ' ' + project_output)
print(current_ref_output + ' ' + path_output)
def get_cat():
"""Return a cat emoji"""
return emoji.emojize(':cat:', use_aliases=True)
<commit_msg>Add extra space between cat emoji and text<commit_after>
|
"""Print utilities"""
import os
import emoji
from termcolor import colored
from clowder.utility.git_utilities import (
git_current_sha,
git_current_branch,
git_is_detached,
git_is_dirty
)
def print_project_status(root_directory, path, name):
"""Print repo status"""
repo_path = os.path.join(root_directory, path)
git_path = os.path.join(repo_path, '.git')
if not os.path.isdir(git_path):
return
if git_is_dirty(repo_path):
color = 'red'
symbol = '*'
else:
color = 'green'
symbol = ''
project_output = colored(symbol + name, color)
if git_is_detached(repo_path):
current_ref = git_current_sha(repo_path)
current_ref_output = colored('(HEAD @ ' + current_ref + ')', 'magenta')
else:
current_branch = git_current_branch(repo_path)
current_ref_output = colored('(' + current_branch + ')', 'magenta')
path_output = colored(path, 'cyan')
print(get_cat() + ' ' + project_output)
print(current_ref_output + ' ' + path_output)
def get_cat():
"""Return a cat emoji"""
return emoji.emojize(':cat:', use_aliases=True)
|
"""Print utilities"""
import os
import emoji
from termcolor import colored
from clowder.utility.git_utilities import (
git_current_sha,
git_current_branch,
git_is_detached,
git_is_dirty
)
def print_project_status(root_directory, path, name):
"""Print repo status"""
repo_path = os.path.join(root_directory, path)
git_path = os.path.join(repo_path, '.git')
if not os.path.isdir(git_path):
return
if git_is_dirty(repo_path):
color = 'red'
symbol = '*'
else:
color = 'green'
symbol = ''
project_output = colored(symbol + name, color)
if git_is_detached(repo_path):
current_ref = git_current_sha(repo_path)
current_ref_output = colored('(HEAD @ ' + current_ref + ')', 'magenta')
else:
current_branch = git_current_branch(repo_path)
current_ref_output = colored('(' + current_branch + ')', 'magenta')
path_output = colored(path, 'cyan')
print(get_cat() + ' ' + project_output)
print(current_ref_output + ' ' + path_output)
def get_cat():
"""Return a cat emoji"""
return emoji.emojize(':cat:', use_aliases=True)
Add extra space between cat emoji and text"""Print utilities"""
import os
import emoji
from termcolor import colored
from clowder.utility.git_utilities import (
git_current_sha,
git_current_branch,
git_is_detached,
git_is_dirty
)
def print_project_status(root_directory, path, name):
"""Print repo status"""
repo_path = os.path.join(root_directory, path)
git_path = os.path.join(repo_path, '.git')
if not os.path.isdir(git_path):
return
if git_is_dirty(repo_path):
color = 'red'
symbol = '*'
else:
color = 'green'
symbol = ''
project_output = colored(symbol + name, color)
if git_is_detached(repo_path):
current_ref = git_current_sha(repo_path)
current_ref_output = colored('(HEAD @ ' + current_ref + ')', 'magenta')
else:
current_branch = git_current_branch(repo_path)
current_ref_output = colored('(' + current_branch + ')', 'magenta')
path_output = colored(path, 'cyan')
print(get_cat() + ' ' + project_output)
print(current_ref_output + ' ' + path_output)
def get_cat():
"""Return a cat emoji"""
return emoji.emojize(':cat:', use_aliases=True)
|
<commit_before>"""Print utilities"""
import os
import emoji
from termcolor import colored
from clowder.utility.git_utilities import (
git_current_sha,
git_current_branch,
git_is_detached,
git_is_dirty
)
def print_project_status(root_directory, path, name):
"""Print repo status"""
repo_path = os.path.join(root_directory, path)
git_path = os.path.join(repo_path, '.git')
if not os.path.isdir(git_path):
return
if git_is_dirty(repo_path):
color = 'red'
symbol = '*'
else:
color = 'green'
symbol = ''
project_output = colored(symbol + name, color)
if git_is_detached(repo_path):
current_ref = git_current_sha(repo_path)
current_ref_output = colored('(HEAD @ ' + current_ref + ')', 'magenta')
else:
current_branch = git_current_branch(repo_path)
current_ref_output = colored('(' + current_branch + ')', 'magenta')
path_output = colored(path, 'cyan')
print(get_cat() + ' ' + project_output)
print(current_ref_output + ' ' + path_output)
def get_cat():
"""Return a cat emoji"""
return emoji.emojize(':cat:', use_aliases=True)
<commit_msg>Add extra space between cat emoji and text<commit_after>"""Print utilities"""
import os
import emoji
from termcolor import colored
from clowder.utility.git_utilities import (
git_current_sha,
git_current_branch,
git_is_detached,
git_is_dirty
)
def print_project_status(root_directory, path, name):
"""Print repo status"""
repo_path = os.path.join(root_directory, path)
git_path = os.path.join(repo_path, '.git')
if not os.path.isdir(git_path):
return
if git_is_dirty(repo_path):
color = 'red'
symbol = '*'
else:
color = 'green'
symbol = ''
project_output = colored(symbol + name, color)
if git_is_detached(repo_path):
current_ref = git_current_sha(repo_path)
current_ref_output = colored('(HEAD @ ' + current_ref + ')', 'magenta')
else:
current_branch = git_current_branch(repo_path)
current_ref_output = colored('(' + current_branch + ')', 'magenta')
path_output = colored(path, 'cyan')
print(get_cat() + ' ' + project_output)
print(current_ref_output + ' ' + path_output)
def get_cat():
"""Return a cat emoji"""
return emoji.emojize(':cat:', use_aliases=True)
|
c501bba28d4a77ba03f6f1277be13913307f04e1
|
clowder/utility/print_utilities.py
|
clowder/utility/print_utilities.py
|
"""Print utilities"""
import os
import emoji
from termcolor import colored
from clowder.utility.git_utilities import (
git_current_sha,
git_current_branch,
git_is_detached,
git_is_dirty
)
def print_project_status(root_directory, path, name):
"""Print repo status"""
repo_path = os.path.join(root_directory, path)
git_path = os.path.join(repo_path, '.git')
if not os.path.isdir(git_path):
return
if git_is_dirty(repo_path):
color = 'red'
symbol = '*'
else:
color = 'green'
symbol = ''
project_output = colored(symbol + name, color)
if git_is_detached(repo_path):
current_ref = git_current_sha(repo_path)
current_ref_output = colored('(HEAD @ ' + current_ref + ')', 'magenta')
else:
current_branch = git_current_branch(repo_path)
current_ref_output = colored('(' + current_branch + ')', 'magenta')
path_output = colored(path, 'cyan')
print(project_output + ' ' + current_ref_output + ' ' + path_output)
def print_group(name):
name_output = colored(name, attrs=['bold'])
print(get_cat() + ' ' + name_output)
def get_cat():
"""Return a cat emoji"""
return emoji.emojize(':cat:', use_aliases=True)
|
"""Print utilities"""
import os
import emoji
from termcolor import colored, cprint
from clowder.utility.git_utilities import (
git_current_sha,
git_current_branch,
git_is_detached,
git_is_dirty
)
def print_project_status(root_directory, path, name):
"""Print repo status"""
repo_path = os.path.join(root_directory, path)
git_path = os.path.join(repo_path, '.git')
if not os.path.isdir(git_path):
cprint(name, 'green')
return
if git_is_dirty(repo_path):
color = 'red'
symbol = '*'
else:
color = 'green'
symbol = ''
project_output = colored(symbol + name, color)
if git_is_detached(repo_path):
current_ref = git_current_sha(repo_path)
current_ref_output = colored('(HEAD @ ' + current_ref + ')', 'magenta')
else:
current_branch = git_current_branch(repo_path)
current_ref_output = colored('(' + current_branch + ')', 'magenta')
path_output = colored(path, 'cyan')
print(project_output + ' ' + current_ref_output + ' ' + path_output)
def print_group(name):
name_output = colored(name, attrs=['bold'])
print(get_cat() + ' ' + name_output)
def get_cat():
"""Return a cat emoji"""
return emoji.emojize(':cat:', use_aliases=True)
|
Print project name even if it doesn't exist on disk
|
Print project name even if it doesn't exist on disk
|
Python
|
mit
|
JrGoodle/clowder,JrGoodle/clowder,JrGoodle/clowder
|
"""Print utilities"""
import os
import emoji
from termcolor import colored
from clowder.utility.git_utilities import (
git_current_sha,
git_current_branch,
git_is_detached,
git_is_dirty
)
def print_project_status(root_directory, path, name):
"""Print repo status"""
repo_path = os.path.join(root_directory, path)
git_path = os.path.join(repo_path, '.git')
if not os.path.isdir(git_path):
return
if git_is_dirty(repo_path):
color = 'red'
symbol = '*'
else:
color = 'green'
symbol = ''
project_output = colored(symbol + name, color)
if git_is_detached(repo_path):
current_ref = git_current_sha(repo_path)
current_ref_output = colored('(HEAD @ ' + current_ref + ')', 'magenta')
else:
current_branch = git_current_branch(repo_path)
current_ref_output = colored('(' + current_branch + ')', 'magenta')
path_output = colored(path, 'cyan')
print(project_output + ' ' + current_ref_output + ' ' + path_output)
def print_group(name):
name_output = colored(name, attrs=['bold'])
print(get_cat() + ' ' + name_output)
def get_cat():
"""Return a cat emoji"""
return emoji.emojize(':cat:', use_aliases=True)
Print project name even if it doesn't exist on disk
|
"""Print utilities"""
import os
import emoji
from termcolor import colored, cprint
from clowder.utility.git_utilities import (
git_current_sha,
git_current_branch,
git_is_detached,
git_is_dirty
)
def print_project_status(root_directory, path, name):
"""Print repo status"""
repo_path = os.path.join(root_directory, path)
git_path = os.path.join(repo_path, '.git')
if not os.path.isdir(git_path):
cprint(name, 'green')
return
if git_is_dirty(repo_path):
color = 'red'
symbol = '*'
else:
color = 'green'
symbol = ''
project_output = colored(symbol + name, color)
if git_is_detached(repo_path):
current_ref = git_current_sha(repo_path)
current_ref_output = colored('(HEAD @ ' + current_ref + ')', 'magenta')
else:
current_branch = git_current_branch(repo_path)
current_ref_output = colored('(' + current_branch + ')', 'magenta')
path_output = colored(path, 'cyan')
print(project_output + ' ' + current_ref_output + ' ' + path_output)
def print_group(name):
name_output = colored(name, attrs=['bold'])
print(get_cat() + ' ' + name_output)
def get_cat():
"""Return a cat emoji"""
return emoji.emojize(':cat:', use_aliases=True)
|
<commit_before>"""Print utilities"""
import os
import emoji
from termcolor import colored
from clowder.utility.git_utilities import (
git_current_sha,
git_current_branch,
git_is_detached,
git_is_dirty
)
def print_project_status(root_directory, path, name):
"""Print repo status"""
repo_path = os.path.join(root_directory, path)
git_path = os.path.join(repo_path, '.git')
if not os.path.isdir(git_path):
return
if git_is_dirty(repo_path):
color = 'red'
symbol = '*'
else:
color = 'green'
symbol = ''
project_output = colored(symbol + name, color)
if git_is_detached(repo_path):
current_ref = git_current_sha(repo_path)
current_ref_output = colored('(HEAD @ ' + current_ref + ')', 'magenta')
else:
current_branch = git_current_branch(repo_path)
current_ref_output = colored('(' + current_branch + ')', 'magenta')
path_output = colored(path, 'cyan')
print(project_output + ' ' + current_ref_output + ' ' + path_output)
def print_group(name):
name_output = colored(name, attrs=['bold'])
print(get_cat() + ' ' + name_output)
def get_cat():
"""Return a cat emoji"""
return emoji.emojize(':cat:', use_aliases=True)
<commit_msg>Print project name even if it doesn't exist on disk<commit_after>
|
"""Print utilities"""
import os
import emoji
from termcolor import colored, cprint
from clowder.utility.git_utilities import (
git_current_sha,
git_current_branch,
git_is_detached,
git_is_dirty
)
def print_project_status(root_directory, path, name):
"""Print repo status"""
repo_path = os.path.join(root_directory, path)
git_path = os.path.join(repo_path, '.git')
if not os.path.isdir(git_path):
cprint(name, 'green')
return
if git_is_dirty(repo_path):
color = 'red'
symbol = '*'
else:
color = 'green'
symbol = ''
project_output = colored(symbol + name, color)
if git_is_detached(repo_path):
current_ref = git_current_sha(repo_path)
current_ref_output = colored('(HEAD @ ' + current_ref + ')', 'magenta')
else:
current_branch = git_current_branch(repo_path)
current_ref_output = colored('(' + current_branch + ')', 'magenta')
path_output = colored(path, 'cyan')
print(project_output + ' ' + current_ref_output + ' ' + path_output)
def print_group(name):
name_output = colored(name, attrs=['bold'])
print(get_cat() + ' ' + name_output)
def get_cat():
"""Return a cat emoji"""
return emoji.emojize(':cat:', use_aliases=True)
|
"""Print utilities"""
import os
import emoji
from termcolor import colored
from clowder.utility.git_utilities import (
git_current_sha,
git_current_branch,
git_is_detached,
git_is_dirty
)
def print_project_status(root_directory, path, name):
"""Print repo status"""
repo_path = os.path.join(root_directory, path)
git_path = os.path.join(repo_path, '.git')
if not os.path.isdir(git_path):
return
if git_is_dirty(repo_path):
color = 'red'
symbol = '*'
else:
color = 'green'
symbol = ''
project_output = colored(symbol + name, color)
if git_is_detached(repo_path):
current_ref = git_current_sha(repo_path)
current_ref_output = colored('(HEAD @ ' + current_ref + ')', 'magenta')
else:
current_branch = git_current_branch(repo_path)
current_ref_output = colored('(' + current_branch + ')', 'magenta')
path_output = colored(path, 'cyan')
print(project_output + ' ' + current_ref_output + ' ' + path_output)
def print_group(name):
name_output = colored(name, attrs=['bold'])
print(get_cat() + ' ' + name_output)
def get_cat():
"""Return a cat emoji"""
return emoji.emojize(':cat:', use_aliases=True)
Print project name even if it doesn't exist on disk"""Print utilities"""
import os
import emoji
from termcolor import colored, cprint
from clowder.utility.git_utilities import (
git_current_sha,
git_current_branch,
git_is_detached,
git_is_dirty
)
def print_project_status(root_directory, path, name):
"""Print repo status"""
repo_path = os.path.join(root_directory, path)
git_path = os.path.join(repo_path, '.git')
if not os.path.isdir(git_path):
cprint(name, 'green')
return
if git_is_dirty(repo_path):
color = 'red'
symbol = '*'
else:
color = 'green'
symbol = ''
project_output = colored(symbol + name, color)
if git_is_detached(repo_path):
current_ref = git_current_sha(repo_path)
current_ref_output = colored('(HEAD @ ' + current_ref + ')', 'magenta')
else:
current_branch = git_current_branch(repo_path)
current_ref_output = colored('(' + current_branch + ')', 'magenta')
path_output = colored(path, 'cyan')
print(project_output + ' ' + current_ref_output + ' ' + path_output)
def print_group(name):
name_output = colored(name, attrs=['bold'])
print(get_cat() + ' ' + name_output)
def get_cat():
"""Return a cat emoji"""
return emoji.emojize(':cat:', use_aliases=True)
|
<commit_before>"""Print utilities"""
import os
import emoji
from termcolor import colored
from clowder.utility.git_utilities import (
git_current_sha,
git_current_branch,
git_is_detached,
git_is_dirty
)
def print_project_status(root_directory, path, name):
"""Print repo status"""
repo_path = os.path.join(root_directory, path)
git_path = os.path.join(repo_path, '.git')
if not os.path.isdir(git_path):
return
if git_is_dirty(repo_path):
color = 'red'
symbol = '*'
else:
color = 'green'
symbol = ''
project_output = colored(symbol + name, color)
if git_is_detached(repo_path):
current_ref = git_current_sha(repo_path)
current_ref_output = colored('(HEAD @ ' + current_ref + ')', 'magenta')
else:
current_branch = git_current_branch(repo_path)
current_ref_output = colored('(' + current_branch + ')', 'magenta')
path_output = colored(path, 'cyan')
print(project_output + ' ' + current_ref_output + ' ' + path_output)
def print_group(name):
name_output = colored(name, attrs=['bold'])
print(get_cat() + ' ' + name_output)
def get_cat():
"""Return a cat emoji"""
return emoji.emojize(':cat:', use_aliases=True)
<commit_msg>Print project name even if it doesn't exist on disk<commit_after>"""Print utilities"""
import os
import emoji
from termcolor import colored, cprint
from clowder.utility.git_utilities import (
git_current_sha,
git_current_branch,
git_is_detached,
git_is_dirty
)
def print_project_status(root_directory, path, name):
"""Print repo status"""
repo_path = os.path.join(root_directory, path)
git_path = os.path.join(repo_path, '.git')
if not os.path.isdir(git_path):
cprint(name, 'green')
return
if git_is_dirty(repo_path):
color = 'red'
symbol = '*'
else:
color = 'green'
symbol = ''
project_output = colored(symbol + name, color)
if git_is_detached(repo_path):
current_ref = git_current_sha(repo_path)
current_ref_output = colored('(HEAD @ ' + current_ref + ')', 'magenta')
else:
current_branch = git_current_branch(repo_path)
current_ref_output = colored('(' + current_branch + ')', 'magenta')
path_output = colored(path, 'cyan')
print(project_output + ' ' + current_ref_output + ' ' + path_output)
def print_group(name):
name_output = colored(name, attrs=['bold'])
print(get_cat() + ' ' + name_output)
def get_cat():
"""Return a cat emoji"""
return emoji.emojize(':cat:', use_aliases=True)
|
8112291023edff1a3803f2a3a404d83e69e1ee34
|
astral/api/tests/__init__.py
|
astral/api/tests/__init__.py
|
import tornado.testing
from astral.api.app import NodeWebAPI
from astral.models import drop_all, setup_all, create_all, session
class BaseTest(tornado.testing.AsyncHTTPTestCase):
def get_app(self):
return NodeWebAPI()
def get_http_port(self):
return 8000
def setUp(self):
super(BaseTest, self).setUp()
setup_all()
create_all()
def tearDown(self):
super(BaseTest, self).tearDown()
session.rollback()
#drop_all()
|
import tornado.testing
from astral.api.app import NodeWebAPI
from astral.models import drop_all, setup_all, create_all, session
class BaseTest(tornado.testing.AsyncHTTPTestCase):
def get_app(self):
return NodeWebAPI()
def get_http_port(self):
return 8000
def setUp(self):
super(BaseTest, self).setUp()
setup_all()
create_all()
def tearDown(self):
super(BaseTest, self).tearDown()
session.rollback()
drop_all()
|
Drop all tables after tests - looks like we're back in business.
|
Drop all tables after tests - looks like we're back in business.
|
Python
|
mit
|
peplin/astral
|
import tornado.testing
from astral.api.app import NodeWebAPI
from astral.models import drop_all, setup_all, create_all, session
class BaseTest(tornado.testing.AsyncHTTPTestCase):
def get_app(self):
return NodeWebAPI()
def get_http_port(self):
return 8000
def setUp(self):
super(BaseTest, self).setUp()
setup_all()
create_all()
def tearDown(self):
super(BaseTest, self).tearDown()
session.rollback()
#drop_all()
Drop all tables after tests - looks like we're back in business.
|
import tornado.testing
from astral.api.app import NodeWebAPI
from astral.models import drop_all, setup_all, create_all, session
class BaseTest(tornado.testing.AsyncHTTPTestCase):
def get_app(self):
return NodeWebAPI()
def get_http_port(self):
return 8000
def setUp(self):
super(BaseTest, self).setUp()
setup_all()
create_all()
def tearDown(self):
super(BaseTest, self).tearDown()
session.rollback()
drop_all()
|
<commit_before>import tornado.testing
from astral.api.app import NodeWebAPI
from astral.models import drop_all, setup_all, create_all, session
class BaseTest(tornado.testing.AsyncHTTPTestCase):
def get_app(self):
return NodeWebAPI()
def get_http_port(self):
return 8000
def setUp(self):
super(BaseTest, self).setUp()
setup_all()
create_all()
def tearDown(self):
super(BaseTest, self).tearDown()
session.rollback()
#drop_all()
<commit_msg>Drop all tables after tests - looks like we're back in business.<commit_after>
|
import tornado.testing
from astral.api.app import NodeWebAPI
from astral.models import drop_all, setup_all, create_all, session
class BaseTest(tornado.testing.AsyncHTTPTestCase):
def get_app(self):
return NodeWebAPI()
def get_http_port(self):
return 8000
def setUp(self):
super(BaseTest, self).setUp()
setup_all()
create_all()
def tearDown(self):
super(BaseTest, self).tearDown()
session.rollback()
drop_all()
|
import tornado.testing
from astral.api.app import NodeWebAPI
from astral.models import drop_all, setup_all, create_all, session
class BaseTest(tornado.testing.AsyncHTTPTestCase):
def get_app(self):
return NodeWebAPI()
def get_http_port(self):
return 8000
def setUp(self):
super(BaseTest, self).setUp()
setup_all()
create_all()
def tearDown(self):
super(BaseTest, self).tearDown()
session.rollback()
#drop_all()
Drop all tables after tests - looks like we're back in business.import tornado.testing
from astral.api.app import NodeWebAPI
from astral.models import drop_all, setup_all, create_all, session
class BaseTest(tornado.testing.AsyncHTTPTestCase):
def get_app(self):
return NodeWebAPI()
def get_http_port(self):
return 8000
def setUp(self):
super(BaseTest, self).setUp()
setup_all()
create_all()
def tearDown(self):
super(BaseTest, self).tearDown()
session.rollback()
drop_all()
|
<commit_before>import tornado.testing
from astral.api.app import NodeWebAPI
from astral.models import drop_all, setup_all, create_all, session
class BaseTest(tornado.testing.AsyncHTTPTestCase):
def get_app(self):
return NodeWebAPI()
def get_http_port(self):
return 8000
def setUp(self):
super(BaseTest, self).setUp()
setup_all()
create_all()
def tearDown(self):
super(BaseTest, self).tearDown()
session.rollback()
#drop_all()
<commit_msg>Drop all tables after tests - looks like we're back in business.<commit_after>import tornado.testing
from astral.api.app import NodeWebAPI
from astral.models import drop_all, setup_all, create_all, session
class BaseTest(tornado.testing.AsyncHTTPTestCase):
def get_app(self):
return NodeWebAPI()
def get_http_port(self):
return 8000
def setUp(self):
super(BaseTest, self).setUp()
setup_all()
create_all()
def tearDown(self):
super(BaseTest, self).tearDown()
session.rollback()
drop_all()
|
16169dbe2fe392197e9a926e572bb3fc704ef2bd
|
generator/test/runner.py
|
generator/test/runner.py
|
#!/usr/bin/env python3
"""
Main entry point to run all tests
"""
import sys
from pathlib import Path
from unittest import TestLoader, TestSuite, TextTestRunner
PATH = Path(__file__).absolute()
sys.path.append(PATH.parents[1].joinpath('rpc_spec/InterfaceParser').as_posix())
sys.path.append(PATH.parents[1].as_posix())
try:
from test_enums import TestEnumsProducer
from test_functions import TestFunctionsProducer
from test_structs import TestStructsProducer
from test_code_format_and_quality import CodeFormatAndQuality
except ImportError as message:
print('{}. probably you did not initialize submodule'.format(message))
sys.exit(1)
def main():
"""
Main entry point to run all tests
"""
suite = TestSuite()
suite.addTests(TestLoader().loadTestsFromTestCase(TestFunctionsProducer))
suite.addTests(TestLoader().loadTestsFromTestCase(TestEnumsProducer))
suite.addTests(TestLoader().loadTestsFromTestCase(TestStructsProducer))
suite.addTests(TestLoader().loadTestsFromTestCase(CodeFormatAndQuality))
runner = TextTestRunner(verbosity=2)
runner.run(suite)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python3
"""
Main entry point to run all tests
"""
import sys
from pathlib import Path
from unittest import TestLoader, TestSuite, TextTestRunner
PATH = Path(__file__).absolute()
sys.path.append(PATH.parents[1].joinpath('rpc_spec/InterfaceParser').as_posix())
sys.path.append(PATH.parents[1].as_posix())
try:
from test_enums import TestEnumsProducer
from test_functions import TestFunctionsProducer
from test_structs import TestStructsProducer
from test_code_format_and_quality import CodeFormatAndQuality
except ImportError as message:
print('{}. probably you did not initialize submodule'.format(message))
sys.exit(1)
def main():
"""
Main entry point to run all tests
"""
suite = TestSuite()
suite.addTests(TestLoader().loadTestsFromTestCase(TestFunctionsProducer))
suite.addTests(TestLoader().loadTestsFromTestCase(TestEnumsProducer))
suite.addTests(TestLoader().loadTestsFromTestCase(TestStructsProducer))
suite.addTests(TestLoader().loadTestsFromTestCase(CodeFormatAndQuality))
ret = not runner.run(suite).wasSuccessful()
sys.exit(ret)
if __name__ == '__main__':
main()
|
Exit with 1 when the RPC generator tests fail
|
Exit with 1 when the RPC generator tests fail
|
Python
|
bsd-3-clause
|
smartdevicelink/sdl_android
|
#!/usr/bin/env python3
"""
Main entry point to run all tests
"""
import sys
from pathlib import Path
from unittest import TestLoader, TestSuite, TextTestRunner
PATH = Path(__file__).absolute()
sys.path.append(PATH.parents[1].joinpath('rpc_spec/InterfaceParser').as_posix())
sys.path.append(PATH.parents[1].as_posix())
try:
from test_enums import TestEnumsProducer
from test_functions import TestFunctionsProducer
from test_structs import TestStructsProducer
from test_code_format_and_quality import CodeFormatAndQuality
except ImportError as message:
print('{}. probably you did not initialize submodule'.format(message))
sys.exit(1)
def main():
"""
Main entry point to run all tests
"""
suite = TestSuite()
suite.addTests(TestLoader().loadTestsFromTestCase(TestFunctionsProducer))
suite.addTests(TestLoader().loadTestsFromTestCase(TestEnumsProducer))
suite.addTests(TestLoader().loadTestsFromTestCase(TestStructsProducer))
suite.addTests(TestLoader().loadTestsFromTestCase(CodeFormatAndQuality))
runner = TextTestRunner(verbosity=2)
runner.run(suite)
if __name__ == '__main__':
main()
Exit with 1 when the RPC generator tests fail
|
#!/usr/bin/env python3
"""
Main entry point to run all tests
"""
import sys
from pathlib import Path
from unittest import TestLoader, TestSuite, TextTestRunner
PATH = Path(__file__).absolute()
sys.path.append(PATH.parents[1].joinpath('rpc_spec/InterfaceParser').as_posix())
sys.path.append(PATH.parents[1].as_posix())
try:
from test_enums import TestEnumsProducer
from test_functions import TestFunctionsProducer
from test_structs import TestStructsProducer
from test_code_format_and_quality import CodeFormatAndQuality
except ImportError as message:
print('{}. probably you did not initialize submodule'.format(message))
sys.exit(1)
def main():
"""
Main entry point to run all tests
"""
suite = TestSuite()
suite.addTests(TestLoader().loadTestsFromTestCase(TestFunctionsProducer))
suite.addTests(TestLoader().loadTestsFromTestCase(TestEnumsProducer))
suite.addTests(TestLoader().loadTestsFromTestCase(TestStructsProducer))
suite.addTests(TestLoader().loadTestsFromTestCase(CodeFormatAndQuality))
ret = not runner.run(suite).wasSuccessful()
sys.exit(ret)
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python3
"""
Main entry point to run all tests
"""
import sys
from pathlib import Path
from unittest import TestLoader, TestSuite, TextTestRunner
PATH = Path(__file__).absolute()
sys.path.append(PATH.parents[1].joinpath('rpc_spec/InterfaceParser').as_posix())
sys.path.append(PATH.parents[1].as_posix())
try:
from test_enums import TestEnumsProducer
from test_functions import TestFunctionsProducer
from test_structs import TestStructsProducer
from test_code_format_and_quality import CodeFormatAndQuality
except ImportError as message:
print('{}. probably you did not initialize submodule'.format(message))
sys.exit(1)
def main():
"""
Main entry point to run all tests
"""
suite = TestSuite()
suite.addTests(TestLoader().loadTestsFromTestCase(TestFunctionsProducer))
suite.addTests(TestLoader().loadTestsFromTestCase(TestEnumsProducer))
suite.addTests(TestLoader().loadTestsFromTestCase(TestStructsProducer))
suite.addTests(TestLoader().loadTestsFromTestCase(CodeFormatAndQuality))
runner = TextTestRunner(verbosity=2)
runner.run(suite)
if __name__ == '__main__':
main()
<commit_msg>Exit with 1 when the RPC generator tests fail<commit_after>
|
#!/usr/bin/env python3
"""
Main entry point to run all tests
"""
import sys
from pathlib import Path
from unittest import TestLoader, TestSuite, TextTestRunner
PATH = Path(__file__).absolute()
sys.path.append(PATH.parents[1].joinpath('rpc_spec/InterfaceParser').as_posix())
sys.path.append(PATH.parents[1].as_posix())
try:
from test_enums import TestEnumsProducer
from test_functions import TestFunctionsProducer
from test_structs import TestStructsProducer
from test_code_format_and_quality import CodeFormatAndQuality
except ImportError as message:
print('{}. probably you did not initialize submodule'.format(message))
sys.exit(1)
def main():
"""
Main entry point to run all tests
"""
suite = TestSuite()
suite.addTests(TestLoader().loadTestsFromTestCase(TestFunctionsProducer))
suite.addTests(TestLoader().loadTestsFromTestCase(TestEnumsProducer))
suite.addTests(TestLoader().loadTestsFromTestCase(TestStructsProducer))
suite.addTests(TestLoader().loadTestsFromTestCase(CodeFormatAndQuality))
ret = not runner.run(suite).wasSuccessful()
sys.exit(ret)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python3
"""
Main entry point to run all tests
"""
import sys
from pathlib import Path
from unittest import TestLoader, TestSuite, TextTestRunner
PATH = Path(__file__).absolute()
sys.path.append(PATH.parents[1].joinpath('rpc_spec/InterfaceParser').as_posix())
sys.path.append(PATH.parents[1].as_posix())
try:
from test_enums import TestEnumsProducer
from test_functions import TestFunctionsProducer
from test_structs import TestStructsProducer
from test_code_format_and_quality import CodeFormatAndQuality
except ImportError as message:
print('{}. probably you did not initialize submodule'.format(message))
sys.exit(1)
def main():
"""
Main entry point to run all tests
"""
suite = TestSuite()
suite.addTests(TestLoader().loadTestsFromTestCase(TestFunctionsProducer))
suite.addTests(TestLoader().loadTestsFromTestCase(TestEnumsProducer))
suite.addTests(TestLoader().loadTestsFromTestCase(TestStructsProducer))
suite.addTests(TestLoader().loadTestsFromTestCase(CodeFormatAndQuality))
runner = TextTestRunner(verbosity=2)
runner.run(suite)
if __name__ == '__main__':
main()
Exit with 1 when the RPC generator tests fail#!/usr/bin/env python3
"""
Main entry point to run all tests
"""
import sys
from pathlib import Path
from unittest import TestLoader, TestSuite, TextTestRunner
PATH = Path(__file__).absolute()
sys.path.append(PATH.parents[1].joinpath('rpc_spec/InterfaceParser').as_posix())
sys.path.append(PATH.parents[1].as_posix())
try:
from test_enums import TestEnumsProducer
from test_functions import TestFunctionsProducer
from test_structs import TestStructsProducer
from test_code_format_and_quality import CodeFormatAndQuality
except ImportError as message:
print('{}. probably you did not initialize submodule'.format(message))
sys.exit(1)
def main():
"""
Main entry point to run all tests
"""
suite = TestSuite()
suite.addTests(TestLoader().loadTestsFromTestCase(TestFunctionsProducer))
suite.addTests(TestLoader().loadTestsFromTestCase(TestEnumsProducer))
suite.addTests(TestLoader().loadTestsFromTestCase(TestStructsProducer))
suite.addTests(TestLoader().loadTestsFromTestCase(CodeFormatAndQuality))
ret = not runner.run(suite).wasSuccessful()
sys.exit(ret)
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python3
"""
Main entry point to run all tests
"""
import sys
from pathlib import Path
from unittest import TestLoader, TestSuite, TextTestRunner
PATH = Path(__file__).absolute()
sys.path.append(PATH.parents[1].joinpath('rpc_spec/InterfaceParser').as_posix())
sys.path.append(PATH.parents[1].as_posix())
try:
from test_enums import TestEnumsProducer
from test_functions import TestFunctionsProducer
from test_structs import TestStructsProducer
from test_code_format_and_quality import CodeFormatAndQuality
except ImportError as message:
print('{}. probably you did not initialize submodule'.format(message))
sys.exit(1)
def main():
"""
Main entry point to run all tests
"""
suite = TestSuite()
suite.addTests(TestLoader().loadTestsFromTestCase(TestFunctionsProducer))
suite.addTests(TestLoader().loadTestsFromTestCase(TestEnumsProducer))
suite.addTests(TestLoader().loadTestsFromTestCase(TestStructsProducer))
suite.addTests(TestLoader().loadTestsFromTestCase(CodeFormatAndQuality))
runner = TextTestRunner(verbosity=2)
runner.run(suite)
if __name__ == '__main__':
main()
<commit_msg>Exit with 1 when the RPC generator tests fail<commit_after>#!/usr/bin/env python3
"""
Main entry point to run all tests
"""
import sys
from pathlib import Path
from unittest import TestLoader, TestSuite, TextTestRunner
PATH = Path(__file__).absolute()
sys.path.append(PATH.parents[1].joinpath('rpc_spec/InterfaceParser').as_posix())
sys.path.append(PATH.parents[1].as_posix())
try:
from test_enums import TestEnumsProducer
from test_functions import TestFunctionsProducer
from test_structs import TestStructsProducer
from test_code_format_and_quality import CodeFormatAndQuality
except ImportError as message:
print('{}. probably you did not initialize submodule'.format(message))
sys.exit(1)
def main():
"""
Main entry point to run all tests
"""
suite = TestSuite()
suite.addTests(TestLoader().loadTestsFromTestCase(TestFunctionsProducer))
suite.addTests(TestLoader().loadTestsFromTestCase(TestEnumsProducer))
suite.addTests(TestLoader().loadTestsFromTestCase(TestStructsProducer))
suite.addTests(TestLoader().loadTestsFromTestCase(CodeFormatAndQuality))
ret = not runner.run(suite).wasSuccessful()
sys.exit(ret)
if __name__ == '__main__':
main()
|
101499a9bde83340887f8c7bc0254a59b9aa5fc1
|
db/__init__.py
|
db/__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .common import session_scope
def commit_db_item(self, db_item):
with session_scope() as session:
session.merge(db_item)
session.commit()
|
Add utility function to commit database item
|
Add utility function to commit database item
|
Python
|
mit
|
leaffan/pynhldb
|
Add utility function to commit database item
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .common import session_scope
def commit_db_item(self, db_item):
with session_scope() as session:
session.merge(db_item)
session.commit()
|
<commit_before><commit_msg>Add utility function to commit database item<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .common import session_scope
def commit_db_item(self, db_item):
with session_scope() as session:
session.merge(db_item)
session.commit()
|
Add utility function to commit database item#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .common import session_scope
def commit_db_item(self, db_item):
with session_scope() as session:
session.merge(db_item)
session.commit()
|
<commit_before><commit_msg>Add utility function to commit database item<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .common import session_scope
def commit_db_item(self, db_item):
with session_scope() as session:
session.merge(db_item)
session.commit()
|
|
2dd484154d25351079da5eaa84cb2d1a0224ce53
|
Instanssi/admin_base/views.py
|
Instanssi/admin_base/views.py
|
# -*- coding: utf-8 -*-
from django.http import HttpResponseRedirect
from django.contrib.auth.decorators import login_required
@login_required(login_url='/control/auth/login/')
def index(request):
return HttpResponseRedirect("/control/events/")
@login_required(login_url='/control/auth/login/')
def eventchange(request, event_id):
# Get redirect path
if 'r' in request.GET:
r = request.GET['r']
if r[0] != "/":
r = "/control/"
else:
r = "/control/"
# Set session variable
try:
request.session['m_event_id'] = int(event_id)
except:
raise Http404
# Redirect
return HttpResponseRedirect(r)
|
# -*- coding: utf-8 -*-
from django.http import HttpResponseRedirect
from django.contrib.auth.decorators import login_required
from Instanssi.admin_base.misc.eventsel import get_selected_event
@login_required(login_url='/control/auth/login/')
def index(request):
# Select latest event as default
print get_selected_event(request)
# Redirect to events page
return HttpResponseRedirect("/control/events/")
@login_required(login_url='/control/auth/login/')
def eventchange(request, event_id):
# Get redirect path
if 'r' in request.GET:
r = request.GET['r']
if r[0] != "/":
r = "/control/"
else:
r = "/control/"
# Set session variable
try:
request.session['m_event_id'] = int(event_id)
except:
raise Http404
# Redirect
return HttpResponseRedirect(r)
|
Select latest event when logged in.
|
admin_base: Select latest event when logged in.
|
Python
|
mit
|
Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org
|
# -*- coding: utf-8 -*-
from django.http import HttpResponseRedirect
from django.contrib.auth.decorators import login_required
@login_required(login_url='/control/auth/login/')
def index(request):
return HttpResponseRedirect("/control/events/")
@login_required(login_url='/control/auth/login/')
def eventchange(request, event_id):
# Get redirect path
if 'r' in request.GET:
r = request.GET['r']
if r[0] != "/":
r = "/control/"
else:
r = "/control/"
# Set session variable
try:
request.session['m_event_id'] = int(event_id)
except:
raise Http404
# Redirect
return HttpResponseRedirect(r)admin_base: Select latest event when logged in.
|
# -*- coding: utf-8 -*-
from django.http import HttpResponseRedirect
from django.contrib.auth.decorators import login_required
from Instanssi.admin_base.misc.eventsel import get_selected_event
@login_required(login_url='/control/auth/login/')
def index(request):
# Select latest event as default
print get_selected_event(request)
# Redirect to events page
return HttpResponseRedirect("/control/events/")
@login_required(login_url='/control/auth/login/')
def eventchange(request, event_id):
# Get redirect path
if 'r' in request.GET:
r = request.GET['r']
if r[0] != "/":
r = "/control/"
else:
r = "/control/"
# Set session variable
try:
request.session['m_event_id'] = int(event_id)
except:
raise Http404
# Redirect
return HttpResponseRedirect(r)
|
<commit_before># -*- coding: utf-8 -*-
from django.http import HttpResponseRedirect
from django.contrib.auth.decorators import login_required
@login_required(login_url='/control/auth/login/')
def index(request):
return HttpResponseRedirect("/control/events/")
@login_required(login_url='/control/auth/login/')
def eventchange(request, event_id):
# Get redirect path
if 'r' in request.GET:
r = request.GET['r']
if r[0] != "/":
r = "/control/"
else:
r = "/control/"
# Set session variable
try:
request.session['m_event_id'] = int(event_id)
except:
raise Http404
# Redirect
return HttpResponseRedirect(r)<commit_msg>admin_base: Select latest event when logged in.<commit_after>
|
# -*- coding: utf-8 -*-
from django.http import HttpResponseRedirect
from django.contrib.auth.decorators import login_required
from Instanssi.admin_base.misc.eventsel import get_selected_event
@login_required(login_url='/control/auth/login/')
def index(request):
# Select latest event as default
print get_selected_event(request)
# Redirect to events page
return HttpResponseRedirect("/control/events/")
@login_required(login_url='/control/auth/login/')
def eventchange(request, event_id):
# Get redirect path
if 'r' in request.GET:
r = request.GET['r']
if r[0] != "/":
r = "/control/"
else:
r = "/control/"
# Set session variable
try:
request.session['m_event_id'] = int(event_id)
except:
raise Http404
# Redirect
return HttpResponseRedirect(r)
|
# -*- coding: utf-8 -*-
from django.http import HttpResponseRedirect
from django.contrib.auth.decorators import login_required
@login_required(login_url='/control/auth/login/')
def index(request):
return HttpResponseRedirect("/control/events/")
@login_required(login_url='/control/auth/login/')
def eventchange(request, event_id):
# Get redirect path
if 'r' in request.GET:
r = request.GET['r']
if r[0] != "/":
r = "/control/"
else:
r = "/control/"
# Set session variable
try:
request.session['m_event_id'] = int(event_id)
except:
raise Http404
# Redirect
return HttpResponseRedirect(r)admin_base: Select latest event when logged in.# -*- coding: utf-8 -*-
from django.http import HttpResponseRedirect
from django.contrib.auth.decorators import login_required
from Instanssi.admin_base.misc.eventsel import get_selected_event
@login_required(login_url='/control/auth/login/')
def index(request):
# Select latest event as default
print get_selected_event(request)
# Redirect to events page
return HttpResponseRedirect("/control/events/")
@login_required(login_url='/control/auth/login/')
def eventchange(request, event_id):
# Get redirect path
if 'r' in request.GET:
r = request.GET['r']
if r[0] != "/":
r = "/control/"
else:
r = "/control/"
# Set session variable
try:
request.session['m_event_id'] = int(event_id)
except:
raise Http404
# Redirect
return HttpResponseRedirect(r)
|
<commit_before># -*- coding: utf-8 -*-
from django.http import HttpResponseRedirect
from django.contrib.auth.decorators import login_required
@login_required(login_url='/control/auth/login/')
def index(request):
return HttpResponseRedirect("/control/events/")
@login_required(login_url='/control/auth/login/')
def eventchange(request, event_id):
# Get redirect path
if 'r' in request.GET:
r = request.GET['r']
if r[0] != "/":
r = "/control/"
else:
r = "/control/"
# Set session variable
try:
request.session['m_event_id'] = int(event_id)
except:
raise Http404
# Redirect
return HttpResponseRedirect(r)<commit_msg>admin_base: Select latest event when logged in.<commit_after># -*- coding: utf-8 -*-
from django.http import HttpResponseRedirect
from django.contrib.auth.decorators import login_required
from Instanssi.admin_base.misc.eventsel import get_selected_event
@login_required(login_url='/control/auth/login/')
def index(request):
# Select latest event as default
print get_selected_event(request)
# Redirect to events page
return HttpResponseRedirect("/control/events/")
@login_required(login_url='/control/auth/login/')
def eventchange(request, event_id):
# Get redirect path
if 'r' in request.GET:
r = request.GET['r']
if r[0] != "/":
r = "/control/"
else:
r = "/control/"
# Set session variable
try:
request.session['m_event_id'] = int(event_id)
except:
raise Http404
# Redirect
return HttpResponseRedirect(r)
|
c6fb5b4361101148a300049de862b90a7d74c6be
|
base/urls.py
|
base/urls.py
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from components.views import SiteView
# Uncomment the next two lines to enable the admin:
# admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', name='site-home', view=SiteView.as_view()),
url(r'^', include('components.appearances.urls')),
url(r'^', include('components.events.urls')),
url(r'^', include('components.merchandise.goods.urls')),
# Examples:
# url(r'^$', 'base.views.home', name='home'),
# url(r'^base/', include('base.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from components.views import SiteView
# Uncomment the next two lines to enable the admin:
# admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', name='site-home', view=SiteView.as_view()),
url(r'^', include('components.appearances.urls')),
url(r'^', include('components.events.urls')),
url(r'^', include('components.merchandise.goods.urls')),
url(r'^', include('components.merchandise.music.urls')),
url(r'^', include('components.people.urls')),
# Examples:
# url(r'^$', 'base.views.home', name='home'),
# url(r'^base/', include('base.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
|
Add people and music to the main URL configuration.
|
Add people and music to the main URL configuration.
|
Python
|
apache-2.0
|
hello-base/web,hello-base/web,hello-base/web,hello-base/web
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from components.views import SiteView
# Uncomment the next two lines to enable the admin:
# admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', name='site-home', view=SiteView.as_view()),
url(r'^', include('components.appearances.urls')),
url(r'^', include('components.events.urls')),
url(r'^', include('components.merchandise.goods.urls')),
# Examples:
# url(r'^$', 'base.views.home', name='home'),
# url(r'^base/', include('base.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
Add people and music to the main URL configuration.
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from components.views import SiteView
# Uncomment the next two lines to enable the admin:
# admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', name='site-home', view=SiteView.as_view()),
url(r'^', include('components.appearances.urls')),
url(r'^', include('components.events.urls')),
url(r'^', include('components.merchandise.goods.urls')),
url(r'^', include('components.merchandise.music.urls')),
url(r'^', include('components.people.urls')),
# Examples:
# url(r'^$', 'base.views.home', name='home'),
# url(r'^base/', include('base.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
|
<commit_before>from django.conf.urls import patterns, include, url
from django.contrib import admin
from components.views import SiteView
# Uncomment the next two lines to enable the admin:
# admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', name='site-home', view=SiteView.as_view()),
url(r'^', include('components.appearances.urls')),
url(r'^', include('components.events.urls')),
url(r'^', include('components.merchandise.goods.urls')),
# Examples:
# url(r'^$', 'base.views.home', name='home'),
# url(r'^base/', include('base.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
<commit_msg>Add people and music to the main URL configuration.<commit_after>
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from components.views import SiteView
# Uncomment the next two lines to enable the admin:
# admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', name='site-home', view=SiteView.as_view()),
url(r'^', include('components.appearances.urls')),
url(r'^', include('components.events.urls')),
url(r'^', include('components.merchandise.goods.urls')),
url(r'^', include('components.merchandise.music.urls')),
url(r'^', include('components.people.urls')),
# Examples:
# url(r'^$', 'base.views.home', name='home'),
# url(r'^base/', include('base.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from components.views import SiteView
# Uncomment the next two lines to enable the admin:
# admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', name='site-home', view=SiteView.as_view()),
url(r'^', include('components.appearances.urls')),
url(r'^', include('components.events.urls')),
url(r'^', include('components.merchandise.goods.urls')),
# Examples:
# url(r'^$', 'base.views.home', name='home'),
# url(r'^base/', include('base.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
Add people and music to the main URL configuration.from django.conf.urls import patterns, include, url
from django.contrib import admin
from components.views import SiteView
# Uncomment the next two lines to enable the admin:
# admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', name='site-home', view=SiteView.as_view()),
url(r'^', include('components.appearances.urls')),
url(r'^', include('components.events.urls')),
url(r'^', include('components.merchandise.goods.urls')),
url(r'^', include('components.merchandise.music.urls')),
url(r'^', include('components.people.urls')),
# Examples:
# url(r'^$', 'base.views.home', name='home'),
# url(r'^base/', include('base.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
|
<commit_before>from django.conf.urls import patterns, include, url
from django.contrib import admin
from components.views import SiteView
# Uncomment the next two lines to enable the admin:
# admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', name='site-home', view=SiteView.as_view()),
url(r'^', include('components.appearances.urls')),
url(r'^', include('components.events.urls')),
url(r'^', include('components.merchandise.goods.urls')),
# Examples:
# url(r'^$', 'base.views.home', name='home'),
# url(r'^base/', include('base.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
<commit_msg>Add people and music to the main URL configuration.<commit_after>from django.conf.urls import patterns, include, url
from django.contrib import admin
from components.views import SiteView
# Uncomment the next two lines to enable the admin:
# admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', name='site-home', view=SiteView.as_view()),
url(r'^', include('components.appearances.urls')),
url(r'^', include('components.events.urls')),
url(r'^', include('components.merchandise.goods.urls')),
url(r'^', include('components.merchandise.music.urls')),
url(r'^', include('components.people.urls')),
# Examples:
# url(r'^$', 'base.views.home', name='home'),
# url(r'^base/', include('base.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
|
b74f974777b82582b13a14eff0eac483b1f91386
|
gilt/__init__.py
|
gilt/__init__.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2016 Cisco Systems, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import pbr.version
version_info = pbr.version.VersionInfo('gilt') # noqa
__version__ = version_info.release_string()
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2016 Cisco Systems, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import pbr.version
version_info = pbr.version.VersionInfo('python-gilt') # noqa
__version__ = version_info.release_string()
|
Use proper package name for pbr
|
Use proper package name for pbr
|
Python
|
mit
|
metacloud/gilt
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2016 Cisco Systems, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import pbr.version
version_info = pbr.version.VersionInfo('gilt') # noqa
__version__ = version_info.release_string()
Use proper package name for pbr
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2016 Cisco Systems, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import pbr.version
version_info = pbr.version.VersionInfo('python-gilt') # noqa
__version__ = version_info.release_string()
|
<commit_before># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2016 Cisco Systems, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import pbr.version
version_info = pbr.version.VersionInfo('gilt') # noqa
__version__ = version_info.release_string()
<commit_msg>Use proper package name for pbr<commit_after>
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2016 Cisco Systems, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import pbr.version
version_info = pbr.version.VersionInfo('python-gilt') # noqa
__version__ = version_info.release_string()
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2016 Cisco Systems, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import pbr.version
version_info = pbr.version.VersionInfo('gilt') # noqa
__version__ = version_info.release_string()
Use proper package name for pbr# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2016 Cisco Systems, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import pbr.version
version_info = pbr.version.VersionInfo('python-gilt') # noqa
__version__ = version_info.release_string()
|
<commit_before># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2016 Cisco Systems, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import pbr.version
version_info = pbr.version.VersionInfo('gilt') # noqa
__version__ = version_info.release_string()
<commit_msg>Use proper package name for pbr<commit_after># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2016 Cisco Systems, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import pbr.version
version_info = pbr.version.VersionInfo('python-gilt') # noqa
__version__ = version_info.release_string()
|
a6c991e2519edeb0a644e83b93242c7312b9e700
|
localore/search/views.py
|
localore/search/views.py
|
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.http import JsonResponse
from django.shortcuts import render
from wagtail.wagtailcore.models import Page
from wagtail.wagtailsearch.models import Query
def search(request):
do_json = 'json' in request.GET
search_query = request.GET.get('query', None)
page = request.GET.get('page', 1)
# Search
if search_query:
search_results = Page.objects.live().search(search_query)
query = Query.get(search_query)
# Record hit
query.add_hit()
else:
search_results = Page.objects.none()
# Pagination
paginator = Paginator(search_results, 10)
try:
search_results = paginator.page(page)
except PageNotAnInteger:
search_results = paginator.page(1)
except EmptyPage:
search_results = paginator.page(paginator.num_pages)
response = {
'search_query': search_query,
'search_results': search_results,
}
if do_json:
response['search_results'] = [
dict(
(attr, getattr(result.specific, attr))
for attr in ['title', 'url']
if hasattr(result.specific, attr)
) for result in response['search_results']
]
return JsonResponse(response)
else:
return render(request, 'search/search.html', response)
|
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.http import JsonResponse
from django.shortcuts import render
from wagtail.wagtailcore.models import Page
from wagtail.wagtailsearch.models import Query
def search(request):
do_json = 'json' in request.GET
search_query = request.GET.get('query', None)
page = request.GET.get('page', 1)
# Search
if search_query:
search_results = Page.objects.live().search(search_query)
query = Query.get(search_query)
# Record hit
query.add_hit()
else:
search_results = Page.objects.none()
# Pagination
paginator = Paginator(search_results, 10)
try:
search_results = paginator.page(page)
except PageNotAnInteger:
search_results = paginator.page(1)
except EmptyPage:
search_results = paginator.page(paginator.num_pages)
response = {
'search_query': search_query,
'search_results': search_results,
}
if do_json:
search_results_serializable = []
for res in response['search_results']:
res_serializable = {}
res_serializable['title'] = res.specific.title
res_serializable['url'] = res.specific.url
res_serializable['content_type'] = res.specific.content_type.name
search_results_serializable.append(res_serializable)
response['search_results'] = search_results_serializable
return JsonResponse(response)
else:
return render(request, 'search/search.html', response)
|
Add content_type to search results JSON.
|
Add content_type to search results JSON.
|
Python
|
mpl-2.0
|
ghostwords/localore,ghostwords/localore,ghostwords/localore
|
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.http import JsonResponse
from django.shortcuts import render
from wagtail.wagtailcore.models import Page
from wagtail.wagtailsearch.models import Query
def search(request):
do_json = 'json' in request.GET
search_query = request.GET.get('query', None)
page = request.GET.get('page', 1)
# Search
if search_query:
search_results = Page.objects.live().search(search_query)
query = Query.get(search_query)
# Record hit
query.add_hit()
else:
search_results = Page.objects.none()
# Pagination
paginator = Paginator(search_results, 10)
try:
search_results = paginator.page(page)
except PageNotAnInteger:
search_results = paginator.page(1)
except EmptyPage:
search_results = paginator.page(paginator.num_pages)
response = {
'search_query': search_query,
'search_results': search_results,
}
if do_json:
response['search_results'] = [
dict(
(attr, getattr(result.specific, attr))
for attr in ['title', 'url']
if hasattr(result.specific, attr)
) for result in response['search_results']
]
return JsonResponse(response)
else:
return render(request, 'search/search.html', response)
Add content_type to search results JSON.
|
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.http import JsonResponse
from django.shortcuts import render
from wagtail.wagtailcore.models import Page
from wagtail.wagtailsearch.models import Query
def search(request):
do_json = 'json' in request.GET
search_query = request.GET.get('query', None)
page = request.GET.get('page', 1)
# Search
if search_query:
search_results = Page.objects.live().search(search_query)
query = Query.get(search_query)
# Record hit
query.add_hit()
else:
search_results = Page.objects.none()
# Pagination
paginator = Paginator(search_results, 10)
try:
search_results = paginator.page(page)
except PageNotAnInteger:
search_results = paginator.page(1)
except EmptyPage:
search_results = paginator.page(paginator.num_pages)
response = {
'search_query': search_query,
'search_results': search_results,
}
if do_json:
search_results_serializable = []
for res in response['search_results']:
res_serializable = {}
res_serializable['title'] = res.specific.title
res_serializable['url'] = res.specific.url
res_serializable['content_type'] = res.specific.content_type.name
search_results_serializable.append(res_serializable)
response['search_results'] = search_results_serializable
return JsonResponse(response)
else:
return render(request, 'search/search.html', response)
|
<commit_before>from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.http import JsonResponse
from django.shortcuts import render
from wagtail.wagtailcore.models import Page
from wagtail.wagtailsearch.models import Query
def search(request):
do_json = 'json' in request.GET
search_query = request.GET.get('query', None)
page = request.GET.get('page', 1)
# Search
if search_query:
search_results = Page.objects.live().search(search_query)
query = Query.get(search_query)
# Record hit
query.add_hit()
else:
search_results = Page.objects.none()
# Pagination
paginator = Paginator(search_results, 10)
try:
search_results = paginator.page(page)
except PageNotAnInteger:
search_results = paginator.page(1)
except EmptyPage:
search_results = paginator.page(paginator.num_pages)
response = {
'search_query': search_query,
'search_results': search_results,
}
if do_json:
response['search_results'] = [
dict(
(attr, getattr(result.specific, attr))
for attr in ['title', 'url']
if hasattr(result.specific, attr)
) for result in response['search_results']
]
return JsonResponse(response)
else:
return render(request, 'search/search.html', response)
<commit_msg>Add content_type to search results JSON.<commit_after>
|
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.http import JsonResponse
from django.shortcuts import render
from wagtail.wagtailcore.models import Page
from wagtail.wagtailsearch.models import Query
def search(request):
do_json = 'json' in request.GET
search_query = request.GET.get('query', None)
page = request.GET.get('page', 1)
# Search
if search_query:
search_results = Page.objects.live().search(search_query)
query = Query.get(search_query)
# Record hit
query.add_hit()
else:
search_results = Page.objects.none()
# Pagination
paginator = Paginator(search_results, 10)
try:
search_results = paginator.page(page)
except PageNotAnInteger:
search_results = paginator.page(1)
except EmptyPage:
search_results = paginator.page(paginator.num_pages)
response = {
'search_query': search_query,
'search_results': search_results,
}
if do_json:
search_results_serializable = []
for res in response['search_results']:
res_serializable = {}
res_serializable['title'] = res.specific.title
res_serializable['url'] = res.specific.url
res_serializable['content_type'] = res.specific.content_type.name
search_results_serializable.append(res_serializable)
response['search_results'] = search_results_serializable
return JsonResponse(response)
else:
return render(request, 'search/search.html', response)
|
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.http import JsonResponse
from django.shortcuts import render
from wagtail.wagtailcore.models import Page
from wagtail.wagtailsearch.models import Query
def search(request):
do_json = 'json' in request.GET
search_query = request.GET.get('query', None)
page = request.GET.get('page', 1)
# Search
if search_query:
search_results = Page.objects.live().search(search_query)
query = Query.get(search_query)
# Record hit
query.add_hit()
else:
search_results = Page.objects.none()
# Pagination
paginator = Paginator(search_results, 10)
try:
search_results = paginator.page(page)
except PageNotAnInteger:
search_results = paginator.page(1)
except EmptyPage:
search_results = paginator.page(paginator.num_pages)
response = {
'search_query': search_query,
'search_results': search_results,
}
if do_json:
response['search_results'] = [
dict(
(attr, getattr(result.specific, attr))
for attr in ['title', 'url']
if hasattr(result.specific, attr)
) for result in response['search_results']
]
return JsonResponse(response)
else:
return render(request, 'search/search.html', response)
Add content_type to search results JSON.from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.http import JsonResponse
from django.shortcuts import render
from wagtail.wagtailcore.models import Page
from wagtail.wagtailsearch.models import Query
def search(request):
do_json = 'json' in request.GET
search_query = request.GET.get('query', None)
page = request.GET.get('page', 1)
# Search
if search_query:
search_results = Page.objects.live().search(search_query)
query = Query.get(search_query)
# Record hit
query.add_hit()
else:
search_results = Page.objects.none()
# Pagination
paginator = Paginator(search_results, 10)
try:
search_results = paginator.page(page)
except PageNotAnInteger:
search_results = paginator.page(1)
except EmptyPage:
search_results = paginator.page(paginator.num_pages)
response = {
'search_query': search_query,
'search_results': search_results,
}
if do_json:
search_results_serializable = []
for res in response['search_results']:
res_serializable = {}
res_serializable['title'] = res.specific.title
res_serializable['url'] = res.specific.url
res_serializable['content_type'] = res.specific.content_type.name
search_results_serializable.append(res_serializable)
response['search_results'] = search_results_serializable
return JsonResponse(response)
else:
return render(request, 'search/search.html', response)
|
<commit_before>from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.http import JsonResponse
from django.shortcuts import render
from wagtail.wagtailcore.models import Page
from wagtail.wagtailsearch.models import Query
def search(request):
do_json = 'json' in request.GET
search_query = request.GET.get('query', None)
page = request.GET.get('page', 1)
# Search
if search_query:
search_results = Page.objects.live().search(search_query)
query = Query.get(search_query)
# Record hit
query.add_hit()
else:
search_results = Page.objects.none()
# Pagination
paginator = Paginator(search_results, 10)
try:
search_results = paginator.page(page)
except PageNotAnInteger:
search_results = paginator.page(1)
except EmptyPage:
search_results = paginator.page(paginator.num_pages)
response = {
'search_query': search_query,
'search_results': search_results,
}
if do_json:
response['search_results'] = [
dict(
(attr, getattr(result.specific, attr))
for attr in ['title', 'url']
if hasattr(result.specific, attr)
) for result in response['search_results']
]
return JsonResponse(response)
else:
return render(request, 'search/search.html', response)
<commit_msg>Add content_type to search results JSON.<commit_after>from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.http import JsonResponse
from django.shortcuts import render
from wagtail.wagtailcore.models import Page
from wagtail.wagtailsearch.models import Query
def search(request):
do_json = 'json' in request.GET
search_query = request.GET.get('query', None)
page = request.GET.get('page', 1)
# Search
if search_query:
search_results = Page.objects.live().search(search_query)
query = Query.get(search_query)
# Record hit
query.add_hit()
else:
search_results = Page.objects.none()
# Pagination
paginator = Paginator(search_results, 10)
try:
search_results = paginator.page(page)
except PageNotAnInteger:
search_results = paginator.page(1)
except EmptyPage:
search_results = paginator.page(paginator.num_pages)
response = {
'search_query': search_query,
'search_results': search_results,
}
if do_json:
search_results_serializable = []
for res in response['search_results']:
res_serializable = {}
res_serializable['title'] = res.specific.title
res_serializable['url'] = res.specific.url
res_serializable['content_type'] = res.specific.content_type.name
search_results_serializable.append(res_serializable)
response['search_results'] = search_results_serializable
return JsonResponse(response)
else:
return render(request, 'search/search.html', response)
|
73457f6c4a0dde5b5eb2c35992cd0f4d221cea06
|
machete/issues/models.py
|
machete/issues/models.py
|
import thunderdome
from machete.base import BaseVertex, BaseEdge
class Issue(BaseVertex):
description = thunderdome.String()
class Severity(BaseVertex):
name = thunderdome.String()
class AssignedTo(BaseEdge):
pass
|
import thunderdome
from machete.base import BaseVertex, BaseEdge
class Issue(BaseVertex):
"""Represents an issue in machete and associated information."""
description = thunderdome.String()
@property
def severity(self):
"""
Returns the severity associated with this issue
:rtype: machete.issues.models.Severity
"""
result = self.outV(Caliber)
# Ensure this invariant holds as each issue should have one severity
assert len(result) <= 1
return result[0]
class Severity(BaseVertex):
"""Indicates the severity of an issue"""
name = thunderdome.String()
@property
def issues(self):
"""
Return a list of issues associated with this severity.
:rtype: list
"""
return self.inV(Caliber)
class Caliber(BaseEdge):
"""Edge connecting an issue to its severity"""
@property
def issue(self):
"""
Return the issue associated with this caliber.
:rtype: machete.issues.models.Issue
"""
return self.outV()
@property
def severity(self):
"""
Return the severity associated with this caliber.
:rtype: machete.issues.models.Severity
"""
return self.inV()
class AssignedTo(BaseEdge):
"""Edge associating an issue with a particular user or users"""
pass
|
Add Properties and Caliber Relationship Between Issue and Severity
|
Add Properties and Caliber Relationship Between Issue and Severity
|
Python
|
bsd-3-clause
|
rustyrazorblade/machete,rustyrazorblade/machete,rustyrazorblade/machete
|
import thunderdome
from machete.base import BaseVertex, BaseEdge
class Issue(BaseVertex):
description = thunderdome.String()
class Severity(BaseVertex):
name = thunderdome.String()
class AssignedTo(BaseEdge):
pass
Add Properties and Caliber Relationship Between Issue and Severity
|
import thunderdome
from machete.base import BaseVertex, BaseEdge
class Issue(BaseVertex):
"""Represents an issue in machete and associated information."""
description = thunderdome.String()
@property
def severity(self):
"""
Returns the severity associated with this issue
:rtype: machete.issues.models.Severity
"""
result = self.outV(Caliber)
# Ensure this invariant holds as each issue should have one severity
assert len(result) <= 1
return result[0]
class Severity(BaseVertex):
"""Indicates the severity of an issue"""
name = thunderdome.String()
@property
def issues(self):
"""
Return a list of issues associated with this severity.
:rtype: list
"""
return self.inV(Caliber)
class Caliber(BaseEdge):
"""Edge connecting an issue to its severity"""
@property
def issue(self):
"""
Return the issue associated with this caliber.
:rtype: machete.issues.models.Issue
"""
return self.outV()
@property
def severity(self):
"""
Return the severity associated with this caliber.
:rtype: machete.issues.models.Severity
"""
return self.inV()
class AssignedTo(BaseEdge):
"""Edge associating an issue with a particular user or users"""
pass
|
<commit_before>
import thunderdome
from machete.base import BaseVertex, BaseEdge
class Issue(BaseVertex):
description = thunderdome.String()
class Severity(BaseVertex):
name = thunderdome.String()
class AssignedTo(BaseEdge):
pass
<commit_msg>Add Properties and Caliber Relationship Between Issue and Severity<commit_after>
|
import thunderdome
from machete.base import BaseVertex, BaseEdge
class Issue(BaseVertex):
"""Represents an issue in machete and associated information."""
description = thunderdome.String()
@property
def severity(self):
"""
Returns the severity associated with this issue
:rtype: machete.issues.models.Severity
"""
result = self.outV(Caliber)
# Ensure this invariant holds as each issue should have one severity
assert len(result) <= 1
return result[0]
class Severity(BaseVertex):
"""Indicates the severity of an issue"""
name = thunderdome.String()
@property
def issues(self):
"""
Return a list of issues associated with this severity.
:rtype: list
"""
return self.inV(Caliber)
class Caliber(BaseEdge):
"""Edge connecting an issue to its severity"""
@property
def issue(self):
"""
Return the issue associated with this caliber.
:rtype: machete.issues.models.Issue
"""
return self.outV()
@property
def severity(self):
"""
Return the severity associated with this caliber.
:rtype: machete.issues.models.Severity
"""
return self.inV()
class AssignedTo(BaseEdge):
"""Edge associating an issue with a particular user or users"""
pass
|
import thunderdome
from machete.base import BaseVertex, BaseEdge
class Issue(BaseVertex):
description = thunderdome.String()
class Severity(BaseVertex):
name = thunderdome.String()
class AssignedTo(BaseEdge):
pass
Add Properties and Caliber Relationship Between Issue and Severityimport thunderdome
from machete.base import BaseVertex, BaseEdge
class Issue(BaseVertex):
"""Represents an issue in machete and associated information."""
description = thunderdome.String()
@property
def severity(self):
"""
Returns the severity associated with this issue
:rtype: machete.issues.models.Severity
"""
result = self.outV(Caliber)
# Ensure this invariant holds as each issue should have one severity
assert len(result) <= 1
return result[0]
class Severity(BaseVertex):
"""Indicates the severity of an issue"""
name = thunderdome.String()
@property
def issues(self):
"""
Return a list of issues associated with this severity.
:rtype: list
"""
return self.inV(Caliber)
class Caliber(BaseEdge):
"""Edge connecting an issue to its severity"""
@property
def issue(self):
"""
Return the issue associated with this caliber.
:rtype: machete.issues.models.Issue
"""
return self.outV()
@property
def severity(self):
"""
Return the severity associated with this caliber.
:rtype: machete.issues.models.Severity
"""
return self.inV()
class AssignedTo(BaseEdge):
"""Edge associating an issue with a particular user or users"""
pass
|
<commit_before>
import thunderdome
from machete.base import BaseVertex, BaseEdge
class Issue(BaseVertex):
description = thunderdome.String()
class Severity(BaseVertex):
name = thunderdome.String()
class AssignedTo(BaseEdge):
pass
<commit_msg>Add Properties and Caliber Relationship Between Issue and Severity<commit_after>import thunderdome
from machete.base import BaseVertex, BaseEdge
class Issue(BaseVertex):
"""Represents an issue in machete and associated information."""
description = thunderdome.String()
@property
def severity(self):
"""
Returns the severity associated with this issue
:rtype: machete.issues.models.Severity
"""
result = self.outV(Caliber)
# Ensure this invariant holds as each issue should have one severity
assert len(result) <= 1
return result[0]
class Severity(BaseVertex):
"""Indicates the severity of an issue"""
name = thunderdome.String()
@property
def issues(self):
"""
Return a list of issues associated with this severity.
:rtype: list
"""
return self.inV(Caliber)
class Caliber(BaseEdge):
"""Edge connecting an issue to its severity"""
@property
def issue(self):
"""
Return the issue associated with this caliber.
:rtype: machete.issues.models.Issue
"""
return self.outV()
@property
def severity(self):
"""
Return the severity associated with this caliber.
:rtype: machete.issues.models.Severity
"""
return self.inV()
class AssignedTo(BaseEdge):
"""Edge associating an issue with a particular user or users"""
pass
|
5815a61d01cd4c8ca3087f826f5a12cb8738a423
|
berth/cli.py
|
berth/cli.py
|
"""Handles all command line actions for Berth."""
import berth.build as build
import berth.config as config
import berth.utils as utils
import click
@click.command(help='Berth use Docker containers to build packages for you, based on a YAML configuration file.')
@click.pass_context
@click.version_option(prog_name='Berth')
@click.argument('config_file', metavar='<CONFIG FILE>', type=click.File())
@click.option('-v', '--verbose', is_flag=True, help='Turn on verbose output.')
@click.option('-d', '--debug', is_flag=True, help='Turn on debug output.')
def main(context, config_file, verbose, debug):
"""Build a package."""
if debug:
utils.set_log_level(2)
elif verbose:
utils.set_log_level(1)
configuration = config.read(config_file)
if not configuration:
context.exit(1)
if not config.verify(configuration):
context.exit(1)
if not utils.pull_image(configuration['build']['image']) and \
not utils.pull_image(configuration['package'].get('image', 'dockerfile/fpm')):
context.exit(1)
build.build(configuration)
context.exit(0)
if __name__ == '__main__':
main() # pylint: disable = no-value-for-parameter
|
"""Handles all command line actions for Berth."""
import berth.build as build
import berth.config as config
import berth.utils as utils
import click
@click.command(help='Berth use Docker containers to build packages for you, based on a YAML configuration file.')
@click.pass_context
@click.version_option(prog_name='Berth')
@click.argument('config_file', metavar='<CONFIG FILE>', type=click.File())
@click.option('-v', '--verbose', is_flag=True, help='Turn on verbose output.')
@click.option('-d', '--debug', is_flag=True, help='Turn on debug output.')
def main(context, config_file, verbose, debug):
"""Build a package."""
if debug:
utils.set_log_level(2)
elif verbose:
utils.set_log_level(1)
configuration = config.read(config_file)
if not configuration:
context.exit(1)
if not config.verify(configuration):
context.exit(1)
if not utils.pull_image(configuration['build']['image']) or \
not utils.pull_image(configuration['package'].get('image', 'dockerfile/fpm')):
context.exit(1)
build.build(configuration)
context.exit(0)
if __name__ == '__main__':
main() # pylint: disable = no-value-for-parameter
|
Make sure we have both images and not just one of them before building
|
Make sure we have both images and not just one of them before building
|
Python
|
mit
|
FalconSocial/berth
|
"""Handles all command line actions for Berth."""
import berth.build as build
import berth.config as config
import berth.utils as utils
import click
@click.command(help='Berth use Docker containers to build packages for you, based on a YAML configuration file.')
@click.pass_context
@click.version_option(prog_name='Berth')
@click.argument('config_file', metavar='<CONFIG FILE>', type=click.File())
@click.option('-v', '--verbose', is_flag=True, help='Turn on verbose output.')
@click.option('-d', '--debug', is_flag=True, help='Turn on debug output.')
def main(context, config_file, verbose, debug):
"""Build a package."""
if debug:
utils.set_log_level(2)
elif verbose:
utils.set_log_level(1)
configuration = config.read(config_file)
if not configuration:
context.exit(1)
if not config.verify(configuration):
context.exit(1)
if not utils.pull_image(configuration['build']['image']) and \
not utils.pull_image(configuration['package'].get('image', 'dockerfile/fpm')):
context.exit(1)
build.build(configuration)
context.exit(0)
if __name__ == '__main__':
main() # pylint: disable = no-value-for-parameter
Make sure we have both images and not just one of them before building
|
"""Handles all command line actions for Berth."""
import berth.build as build
import berth.config as config
import berth.utils as utils
import click
@click.command(help='Berth use Docker containers to build packages for you, based on a YAML configuration file.')
@click.pass_context
@click.version_option(prog_name='Berth')
@click.argument('config_file', metavar='<CONFIG FILE>', type=click.File())
@click.option('-v', '--verbose', is_flag=True, help='Turn on verbose output.')
@click.option('-d', '--debug', is_flag=True, help='Turn on debug output.')
def main(context, config_file, verbose, debug):
"""Build a package."""
if debug:
utils.set_log_level(2)
elif verbose:
utils.set_log_level(1)
configuration = config.read(config_file)
if not configuration:
context.exit(1)
if not config.verify(configuration):
context.exit(1)
if not utils.pull_image(configuration['build']['image']) or \
not utils.pull_image(configuration['package'].get('image', 'dockerfile/fpm')):
context.exit(1)
build.build(configuration)
context.exit(0)
if __name__ == '__main__':
main() # pylint: disable = no-value-for-parameter
|
<commit_before>"""Handles all command line actions for Berth."""
import berth.build as build
import berth.config as config
import berth.utils as utils
import click
@click.command(help='Berth use Docker containers to build packages for you, based on a YAML configuration file.')
@click.pass_context
@click.version_option(prog_name='Berth')
@click.argument('config_file', metavar='<CONFIG FILE>', type=click.File())
@click.option('-v', '--verbose', is_flag=True, help='Turn on verbose output.')
@click.option('-d', '--debug', is_flag=True, help='Turn on debug output.')
def main(context, config_file, verbose, debug):
"""Build a package."""
if debug:
utils.set_log_level(2)
elif verbose:
utils.set_log_level(1)
configuration = config.read(config_file)
if not configuration:
context.exit(1)
if not config.verify(configuration):
context.exit(1)
if not utils.pull_image(configuration['build']['image']) and \
not utils.pull_image(configuration['package'].get('image', 'dockerfile/fpm')):
context.exit(1)
build.build(configuration)
context.exit(0)
if __name__ == '__main__':
main() # pylint: disable = no-value-for-parameter
<commit_msg>Make sure we have both images and not just one of them before building<commit_after>
|
"""Handles all command line actions for Berth."""
import berth.build as build
import berth.config as config
import berth.utils as utils
import click
@click.command(help='Berth use Docker containers to build packages for you, based on a YAML configuration file.')
@click.pass_context
@click.version_option(prog_name='Berth')
@click.argument('config_file', metavar='<CONFIG FILE>', type=click.File())
@click.option('-v', '--verbose', is_flag=True, help='Turn on verbose output.')
@click.option('-d', '--debug', is_flag=True, help='Turn on debug output.')
def main(context, config_file, verbose, debug):
"""Build a package."""
if debug:
utils.set_log_level(2)
elif verbose:
utils.set_log_level(1)
configuration = config.read(config_file)
if not configuration:
context.exit(1)
if not config.verify(configuration):
context.exit(1)
if not utils.pull_image(configuration['build']['image']) or \
not utils.pull_image(configuration['package'].get('image', 'dockerfile/fpm')):
context.exit(1)
build.build(configuration)
context.exit(0)
if __name__ == '__main__':
main() # pylint: disable = no-value-for-parameter
|
"""Handles all command line actions for Berth."""
import berth.build as build
import berth.config as config
import berth.utils as utils
import click
@click.command(help='Berth use Docker containers to build packages for you, based on a YAML configuration file.')
@click.pass_context
@click.version_option(prog_name='Berth')
@click.argument('config_file', metavar='<CONFIG FILE>', type=click.File())
@click.option('-v', '--verbose', is_flag=True, help='Turn on verbose output.')
@click.option('-d', '--debug', is_flag=True, help='Turn on debug output.')
def main(context, config_file, verbose, debug):
"""Build a package."""
if debug:
utils.set_log_level(2)
elif verbose:
utils.set_log_level(1)
configuration = config.read(config_file)
if not configuration:
context.exit(1)
if not config.verify(configuration):
context.exit(1)
if not utils.pull_image(configuration['build']['image']) and \
not utils.pull_image(configuration['package'].get('image', 'dockerfile/fpm')):
context.exit(1)
build.build(configuration)
context.exit(0)
if __name__ == '__main__':
main() # pylint: disable = no-value-for-parameter
Make sure we have both images and not just one of them before building"""Handles all command line actions for Berth."""
import berth.build as build
import berth.config as config
import berth.utils as utils
import click
@click.command(help='Berth use Docker containers to build packages for you, based on a YAML configuration file.')
@click.pass_context
@click.version_option(prog_name='Berth')
@click.argument('config_file', metavar='<CONFIG FILE>', type=click.File())
@click.option('-v', '--verbose', is_flag=True, help='Turn on verbose output.')
@click.option('-d', '--debug', is_flag=True, help='Turn on debug output.')
def main(context, config_file, verbose, debug):
"""Build a package."""
if debug:
utils.set_log_level(2)
elif verbose:
utils.set_log_level(1)
configuration = config.read(config_file)
if not configuration:
context.exit(1)
if not config.verify(configuration):
context.exit(1)
if not utils.pull_image(configuration['build']['image']) or \
not utils.pull_image(configuration['package'].get('image', 'dockerfile/fpm')):
context.exit(1)
build.build(configuration)
context.exit(0)
if __name__ == '__main__':
main() # pylint: disable = no-value-for-parameter
|
<commit_before>"""Handles all command line actions for Berth."""
import berth.build as build
import berth.config as config
import berth.utils as utils
import click
@click.command(help='Berth use Docker containers to build packages for you, based on a YAML configuration file.')
@click.pass_context
@click.version_option(prog_name='Berth')
@click.argument('config_file', metavar='<CONFIG FILE>', type=click.File())
@click.option('-v', '--verbose', is_flag=True, help='Turn on verbose output.')
@click.option('-d', '--debug', is_flag=True, help='Turn on debug output.')
def main(context, config_file, verbose, debug):
"""Build a package."""
if debug:
utils.set_log_level(2)
elif verbose:
utils.set_log_level(1)
configuration = config.read(config_file)
if not configuration:
context.exit(1)
if not config.verify(configuration):
context.exit(1)
if not utils.pull_image(configuration['build']['image']) and \
not utils.pull_image(configuration['package'].get('image', 'dockerfile/fpm')):
context.exit(1)
build.build(configuration)
context.exit(0)
if __name__ == '__main__':
main() # pylint: disable = no-value-for-parameter
<commit_msg>Make sure we have both images and not just one of them before building<commit_after>"""Handles all command line actions for Berth."""
import berth.build as build
import berth.config as config
import berth.utils as utils
import click
@click.command(help='Berth use Docker containers to build packages for you, based on a YAML configuration file.')
@click.pass_context
@click.version_option(prog_name='Berth')
@click.argument('config_file', metavar='<CONFIG FILE>', type=click.File())
@click.option('-v', '--verbose', is_flag=True, help='Turn on verbose output.')
@click.option('-d', '--debug', is_flag=True, help='Turn on debug output.')
def main(context, config_file, verbose, debug):
"""Build a package."""
if debug:
utils.set_log_level(2)
elif verbose:
utils.set_log_level(1)
configuration = config.read(config_file)
if not configuration:
context.exit(1)
if not config.verify(configuration):
context.exit(1)
if not utils.pull_image(configuration['build']['image']) or \
not utils.pull_image(configuration['package'].get('image', 'dockerfile/fpm')):
context.exit(1)
build.build(configuration)
context.exit(0)
if __name__ == '__main__':
main() # pylint: disable = no-value-for-parameter
|
e22aee1c56289a81ca3d4b5fdf0f97cc8235d870
|
twitter_feed/templatetags/twitter_tags.py
|
twitter_feed/templatetags/twitter_tags.py
|
from django import template
from twitter.models import Tweet
register = template.Library()
@register.assignment_tag
def latest_tweets(number_of_tweets=2):
try:
tweets = Tweet.objects.filter(
user__active=True).order_by('-time')[:number_of_tweets]
except (ValueError, AssertionError):
raise template.TemplateSyntaxError(
"Tag latest_tweets requires a single positive integer argument, given %r."
% number_of_tweets)
return tweets
|
from django import template
from twitter.models import Tweet
register = template.Library()
@register.assignment_tag
def latest_tweets(number_of_tweets=2):
try:
tweets = Tweet.objects.filter(
user__active=True).order_by('-time')[:number_of_tweets]
except (ValueError, AssertionError, TypeError):
raise template.TemplateSyntaxError(
"Tag latest_tweets requires a single positive integer argument, given %r."
% number_of_tweets)
return tweets
|
Handle TypeError for python 3.4.
|
Handle TypeError for python 3.4.
|
Python
|
mit
|
CIGIHub/wagtail-twitter-feed
|
from django import template
from twitter.models import Tweet
register = template.Library()
@register.assignment_tag
def latest_tweets(number_of_tweets=2):
try:
tweets = Tweet.objects.filter(
user__active=True).order_by('-time')[:number_of_tweets]
except (ValueError, AssertionError):
raise template.TemplateSyntaxError(
"Tag latest_tweets requires a single positive integer argument, given %r."
% number_of_tweets)
return tweets
Handle TypeError for python 3.4.
|
from django import template
from twitter.models import Tweet
register = template.Library()
@register.assignment_tag
def latest_tweets(number_of_tweets=2):
try:
tweets = Tweet.objects.filter(
user__active=True).order_by('-time')[:number_of_tweets]
except (ValueError, AssertionError, TypeError):
raise template.TemplateSyntaxError(
"Tag latest_tweets requires a single positive integer argument, given %r."
% number_of_tweets)
return tweets
|
<commit_before>from django import template
from twitter.models import Tweet
register = template.Library()
@register.assignment_tag
def latest_tweets(number_of_tweets=2):
try:
tweets = Tweet.objects.filter(
user__active=True).order_by('-time')[:number_of_tweets]
except (ValueError, AssertionError):
raise template.TemplateSyntaxError(
"Tag latest_tweets requires a single positive integer argument, given %r."
% number_of_tweets)
return tweets
<commit_msg>Handle TypeError for python 3.4.<commit_after>
|
from django import template
from twitter.models import Tweet
register = template.Library()
@register.assignment_tag
def latest_tweets(number_of_tweets=2):
try:
tweets = Tweet.objects.filter(
user__active=True).order_by('-time')[:number_of_tweets]
except (ValueError, AssertionError, TypeError):
raise template.TemplateSyntaxError(
"Tag latest_tweets requires a single positive integer argument, given %r."
% number_of_tweets)
return tweets
|
from django import template
from twitter.models import Tweet
register = template.Library()
@register.assignment_tag
def latest_tweets(number_of_tweets=2):
try:
tweets = Tweet.objects.filter(
user__active=True).order_by('-time')[:number_of_tweets]
except (ValueError, AssertionError):
raise template.TemplateSyntaxError(
"Tag latest_tweets requires a single positive integer argument, given %r."
% number_of_tweets)
return tweets
Handle TypeError for python 3.4.from django import template
from twitter.models import Tweet
register = template.Library()
@register.assignment_tag
def latest_tweets(number_of_tweets=2):
try:
tweets = Tweet.objects.filter(
user__active=True).order_by('-time')[:number_of_tweets]
except (ValueError, AssertionError, TypeError):
raise template.TemplateSyntaxError(
"Tag latest_tweets requires a single positive integer argument, given %r."
% number_of_tweets)
return tweets
|
<commit_before>from django import template
from twitter.models import Tweet
register = template.Library()
@register.assignment_tag
def latest_tweets(number_of_tweets=2):
try:
tweets = Tweet.objects.filter(
user__active=True).order_by('-time')[:number_of_tweets]
except (ValueError, AssertionError):
raise template.TemplateSyntaxError(
"Tag latest_tweets requires a single positive integer argument, given %r."
% number_of_tweets)
return tweets
<commit_msg>Handle TypeError for python 3.4.<commit_after>from django import template
from twitter.models import Tweet
register = template.Library()
@register.assignment_tag
def latest_tweets(number_of_tweets=2):
try:
tweets = Tweet.objects.filter(
user__active=True).order_by('-time')[:number_of_tweets]
except (ValueError, AssertionError, TypeError):
raise template.TemplateSyntaxError(
"Tag latest_tweets requires a single positive integer argument, given %r."
% number_of_tweets)
return tweets
|
478afbc2178850d209d5d5d4c0626581b601f208
|
cutepaste/tests/conftest.py
|
cutepaste/tests/conftest.py
|
import os
import pytest
from pyvirtualdisplay import Display
from selenium.webdriver import Chrome
from selenium.webdriver.chrome.options import Options
REPORTS_DIR = "reports"
@pytest.fixture(scope='function')
def webdriver(request):
display = Display(visible=0, size=(800, 600), use_xauth=True)
display.start()
options = Options()
options.add_argument("--no-sandbox")
driver = Chrome(chrome_options=options)
prev_failed_tests = request.session.testsfailed
yield driver
if prev_failed_tests != request.session.testsfailed:
try:
os.makedirs(REPORTS_DIR)
except os.error:
pass
test_name = request.function.__module__ + "." + request.function.__name__
driver.save_screenshot(f"reports/{test_name}.png")
with open(f"reports/{test_name}.html", "w") as f:
f.write(driver.page_source)
driver.quit()
display.stop()
|
import os
import pytest
from pyvirtualdisplay import Display
from selenium.webdriver import Chrome
from selenium.webdriver.chrome.options import Options
REPORTS_DIR = "reports"
@pytest.fixture(scope='function')
def webdriver(request):
display = Display(visible=0, size=(800, 600), use_xauth=True)
display.start()
options = Options()
options.add_argument("--no-sandbox")
driver = Chrome(chrome_options=options)
driver.implicitly_wait(1)
prev_failed_tests = request.session.testsfailed
yield driver
if prev_failed_tests != request.session.testsfailed:
try:
os.makedirs(REPORTS_DIR)
except os.error:
pass
test_name = request.function.__module__ + "." + request.function.__name__
driver.save_screenshot(f"reports/{test_name}.png")
with open(f"reports/{test_name}.html", "w") as f:
f.write(driver.page_source)
driver.quit()
display.stop()
|
Add an implicit wait of 1 second
|
Add an implicit wait of 1 second
|
Python
|
apache-2.0
|
msurdi/cutepaste,msurdi/cutepaste,msurdi/cutepaste
|
import os
import pytest
from pyvirtualdisplay import Display
from selenium.webdriver import Chrome
from selenium.webdriver.chrome.options import Options
REPORTS_DIR = "reports"
@pytest.fixture(scope='function')
def webdriver(request):
display = Display(visible=0, size=(800, 600), use_xauth=True)
display.start()
options = Options()
options.add_argument("--no-sandbox")
driver = Chrome(chrome_options=options)
prev_failed_tests = request.session.testsfailed
yield driver
if prev_failed_tests != request.session.testsfailed:
try:
os.makedirs(REPORTS_DIR)
except os.error:
pass
test_name = request.function.__module__ + "." + request.function.__name__
driver.save_screenshot(f"reports/{test_name}.png")
with open(f"reports/{test_name}.html", "w") as f:
f.write(driver.page_source)
driver.quit()
display.stop()
Add an implicit wait of 1 second
|
import os
import pytest
from pyvirtualdisplay import Display
from selenium.webdriver import Chrome
from selenium.webdriver.chrome.options import Options
REPORTS_DIR = "reports"
@pytest.fixture(scope='function')
def webdriver(request):
display = Display(visible=0, size=(800, 600), use_xauth=True)
display.start()
options = Options()
options.add_argument("--no-sandbox")
driver = Chrome(chrome_options=options)
driver.implicitly_wait(1)
prev_failed_tests = request.session.testsfailed
yield driver
if prev_failed_tests != request.session.testsfailed:
try:
os.makedirs(REPORTS_DIR)
except os.error:
pass
test_name = request.function.__module__ + "." + request.function.__name__
driver.save_screenshot(f"reports/{test_name}.png")
with open(f"reports/{test_name}.html", "w") as f:
f.write(driver.page_source)
driver.quit()
display.stop()
|
<commit_before>import os
import pytest
from pyvirtualdisplay import Display
from selenium.webdriver import Chrome
from selenium.webdriver.chrome.options import Options
REPORTS_DIR = "reports"
@pytest.fixture(scope='function')
def webdriver(request):
display = Display(visible=0, size=(800, 600), use_xauth=True)
display.start()
options = Options()
options.add_argument("--no-sandbox")
driver = Chrome(chrome_options=options)
prev_failed_tests = request.session.testsfailed
yield driver
if prev_failed_tests != request.session.testsfailed:
try:
os.makedirs(REPORTS_DIR)
except os.error:
pass
test_name = request.function.__module__ + "." + request.function.__name__
driver.save_screenshot(f"reports/{test_name}.png")
with open(f"reports/{test_name}.html", "w") as f:
f.write(driver.page_source)
driver.quit()
display.stop()
<commit_msg>Add an implicit wait of 1 second<commit_after>
|
import os
import pytest
from pyvirtualdisplay import Display
from selenium.webdriver import Chrome
from selenium.webdriver.chrome.options import Options
REPORTS_DIR = "reports"
@pytest.fixture(scope='function')
def webdriver(request):
display = Display(visible=0, size=(800, 600), use_xauth=True)
display.start()
options = Options()
options.add_argument("--no-sandbox")
driver = Chrome(chrome_options=options)
driver.implicitly_wait(1)
prev_failed_tests = request.session.testsfailed
yield driver
if prev_failed_tests != request.session.testsfailed:
try:
os.makedirs(REPORTS_DIR)
except os.error:
pass
test_name = request.function.__module__ + "." + request.function.__name__
driver.save_screenshot(f"reports/{test_name}.png")
with open(f"reports/{test_name}.html", "w") as f:
f.write(driver.page_source)
driver.quit()
display.stop()
|
import os
import pytest
from pyvirtualdisplay import Display
from selenium.webdriver import Chrome
from selenium.webdriver.chrome.options import Options
REPORTS_DIR = "reports"
@pytest.fixture(scope='function')
def webdriver(request):
display = Display(visible=0, size=(800, 600), use_xauth=True)
display.start()
options = Options()
options.add_argument("--no-sandbox")
driver = Chrome(chrome_options=options)
prev_failed_tests = request.session.testsfailed
yield driver
if prev_failed_tests != request.session.testsfailed:
try:
os.makedirs(REPORTS_DIR)
except os.error:
pass
test_name = request.function.__module__ + "." + request.function.__name__
driver.save_screenshot(f"reports/{test_name}.png")
with open(f"reports/{test_name}.html", "w") as f:
f.write(driver.page_source)
driver.quit()
display.stop()
Add an implicit wait of 1 secondimport os
import pytest
from pyvirtualdisplay import Display
from selenium.webdriver import Chrome
from selenium.webdriver.chrome.options import Options
REPORTS_DIR = "reports"
@pytest.fixture(scope='function')
def webdriver(request):
display = Display(visible=0, size=(800, 600), use_xauth=True)
display.start()
options = Options()
options.add_argument("--no-sandbox")
driver = Chrome(chrome_options=options)
driver.implicitly_wait(1)
prev_failed_tests = request.session.testsfailed
yield driver
if prev_failed_tests != request.session.testsfailed:
try:
os.makedirs(REPORTS_DIR)
except os.error:
pass
test_name = request.function.__module__ + "." + request.function.__name__
driver.save_screenshot(f"reports/{test_name}.png")
with open(f"reports/{test_name}.html", "w") as f:
f.write(driver.page_source)
driver.quit()
display.stop()
|
<commit_before>import os
import pytest
from pyvirtualdisplay import Display
from selenium.webdriver import Chrome
from selenium.webdriver.chrome.options import Options
REPORTS_DIR = "reports"
@pytest.fixture(scope='function')
def webdriver(request):
display = Display(visible=0, size=(800, 600), use_xauth=True)
display.start()
options = Options()
options.add_argument("--no-sandbox")
driver = Chrome(chrome_options=options)
prev_failed_tests = request.session.testsfailed
yield driver
if prev_failed_tests != request.session.testsfailed:
try:
os.makedirs(REPORTS_DIR)
except os.error:
pass
test_name = request.function.__module__ + "." + request.function.__name__
driver.save_screenshot(f"reports/{test_name}.png")
with open(f"reports/{test_name}.html", "w") as f:
f.write(driver.page_source)
driver.quit()
display.stop()
<commit_msg>Add an implicit wait of 1 second<commit_after>import os
import pytest
from pyvirtualdisplay import Display
from selenium.webdriver import Chrome
from selenium.webdriver.chrome.options import Options
REPORTS_DIR = "reports"
@pytest.fixture(scope='function')
def webdriver(request):
display = Display(visible=0, size=(800, 600), use_xauth=True)
display.start()
options = Options()
options.add_argument("--no-sandbox")
driver = Chrome(chrome_options=options)
driver.implicitly_wait(1)
prev_failed_tests = request.session.testsfailed
yield driver
if prev_failed_tests != request.session.testsfailed:
try:
os.makedirs(REPORTS_DIR)
except os.error:
pass
test_name = request.function.__module__ + "." + request.function.__name__
driver.save_screenshot(f"reports/{test_name}.png")
with open(f"reports/{test_name}.html", "w") as f:
f.write(driver.page_source)
driver.quit()
display.stop()
|
8c3782e676e27bf6b3512ea390ad789698ba331c
|
memegen/routes/_cache.py
|
memegen/routes/_cache.py
|
import logging
import yorm
from yorm.types import List, Object
log = logging.getLogger(__name__)
@yorm.attr(items=List.of_type(Object))
@yorm.sync("data/images/cache.yml")
class Cache:
SIZE = 9
def __init__(self):
self.items = []
def add(self, **kwargs):
if kwargs['key'] == 'custom':
return
log.info("Caching: %s", kwargs)
self.items.insert(0, kwargs)
while len(self.items) > self.SIZE:
self.items.pop()
yorm.save(self)
def get(self, index):
log.info("Getting cache index: %s", index)
try:
data = self.items[index]
except IndexError:
data = {}
log.info("Retrieved cache: %s", data)
return data
|
import logging
import yorm
from yorm.types import List, Object
log = logging.getLogger(__name__)
@yorm.attr(items=List.of_type(Object))
@yorm.sync("data/images/cache.yml")
class Cache:
SIZE = 9
def __init__(self):
self.items = []
def add(self, **kwargs):
if kwargs['key'] == 'custom' or kwargs in self.items:
return
log.info("Caching: %s", kwargs)
self.items.insert(0, kwargs)
while len(self.items) > self.SIZE:
self.items.pop()
yorm.save(self)
def get(self, index):
log.info("Getting cache index: %s", index)
try:
data = self.items[index]
except IndexError:
data = {}
log.info("Retrieved cache: %s", data)
return data
|
Disable caching of identical images
|
Disable caching of identical images
|
Python
|
mit
|
DanLindeman/memegen,DanLindeman/memegen,DanLindeman/memegen,DanLindeman/memegen
|
import logging
import yorm
from yorm.types import List, Object
log = logging.getLogger(__name__)
@yorm.attr(items=List.of_type(Object))
@yorm.sync("data/images/cache.yml")
class Cache:
SIZE = 9
def __init__(self):
self.items = []
def add(self, **kwargs):
if kwargs['key'] == 'custom':
return
log.info("Caching: %s", kwargs)
self.items.insert(0, kwargs)
while len(self.items) > self.SIZE:
self.items.pop()
yorm.save(self)
def get(self, index):
log.info("Getting cache index: %s", index)
try:
data = self.items[index]
except IndexError:
data = {}
log.info("Retrieved cache: %s", data)
return data
Disable caching of identical images
|
import logging
import yorm
from yorm.types import List, Object
log = logging.getLogger(__name__)
@yorm.attr(items=List.of_type(Object))
@yorm.sync("data/images/cache.yml")
class Cache:
SIZE = 9
def __init__(self):
self.items = []
def add(self, **kwargs):
if kwargs['key'] == 'custom' or kwargs in self.items:
return
log.info("Caching: %s", kwargs)
self.items.insert(0, kwargs)
while len(self.items) > self.SIZE:
self.items.pop()
yorm.save(self)
def get(self, index):
log.info("Getting cache index: %s", index)
try:
data = self.items[index]
except IndexError:
data = {}
log.info("Retrieved cache: %s", data)
return data
|
<commit_before>import logging
import yorm
from yorm.types import List, Object
log = logging.getLogger(__name__)
@yorm.attr(items=List.of_type(Object))
@yorm.sync("data/images/cache.yml")
class Cache:
SIZE = 9
def __init__(self):
self.items = []
def add(self, **kwargs):
if kwargs['key'] == 'custom':
return
log.info("Caching: %s", kwargs)
self.items.insert(0, kwargs)
while len(self.items) > self.SIZE:
self.items.pop()
yorm.save(self)
def get(self, index):
log.info("Getting cache index: %s", index)
try:
data = self.items[index]
except IndexError:
data = {}
log.info("Retrieved cache: %s", data)
return data
<commit_msg>Disable caching of identical images<commit_after>
|
import logging
import yorm
from yorm.types import List, Object
log = logging.getLogger(__name__)
@yorm.attr(items=List.of_type(Object))
@yorm.sync("data/images/cache.yml")
class Cache:
SIZE = 9
def __init__(self):
self.items = []
def add(self, **kwargs):
if kwargs['key'] == 'custom' or kwargs in self.items:
return
log.info("Caching: %s", kwargs)
self.items.insert(0, kwargs)
while len(self.items) > self.SIZE:
self.items.pop()
yorm.save(self)
def get(self, index):
log.info("Getting cache index: %s", index)
try:
data = self.items[index]
except IndexError:
data = {}
log.info("Retrieved cache: %s", data)
return data
|
import logging
import yorm
from yorm.types import List, Object
log = logging.getLogger(__name__)
@yorm.attr(items=List.of_type(Object))
@yorm.sync("data/images/cache.yml")
class Cache:
SIZE = 9
def __init__(self):
self.items = []
def add(self, **kwargs):
if kwargs['key'] == 'custom':
return
log.info("Caching: %s", kwargs)
self.items.insert(0, kwargs)
while len(self.items) > self.SIZE:
self.items.pop()
yorm.save(self)
def get(self, index):
log.info("Getting cache index: %s", index)
try:
data = self.items[index]
except IndexError:
data = {}
log.info("Retrieved cache: %s", data)
return data
Disable caching of identical imagesimport logging
import yorm
from yorm.types import List, Object
log = logging.getLogger(__name__)
@yorm.attr(items=List.of_type(Object))
@yorm.sync("data/images/cache.yml")
class Cache:
SIZE = 9
def __init__(self):
self.items = []
def add(self, **kwargs):
if kwargs['key'] == 'custom' or kwargs in self.items:
return
log.info("Caching: %s", kwargs)
self.items.insert(0, kwargs)
while len(self.items) > self.SIZE:
self.items.pop()
yorm.save(self)
def get(self, index):
log.info("Getting cache index: %s", index)
try:
data = self.items[index]
except IndexError:
data = {}
log.info("Retrieved cache: %s", data)
return data
|
<commit_before>import logging
import yorm
from yorm.types import List, Object
log = logging.getLogger(__name__)
@yorm.attr(items=List.of_type(Object))
@yorm.sync("data/images/cache.yml")
class Cache:
SIZE = 9
def __init__(self):
self.items = []
def add(self, **kwargs):
if kwargs['key'] == 'custom':
return
log.info("Caching: %s", kwargs)
self.items.insert(0, kwargs)
while len(self.items) > self.SIZE:
self.items.pop()
yorm.save(self)
def get(self, index):
log.info("Getting cache index: %s", index)
try:
data = self.items[index]
except IndexError:
data = {}
log.info("Retrieved cache: %s", data)
return data
<commit_msg>Disable caching of identical images<commit_after>import logging
import yorm
from yorm.types import List, Object
log = logging.getLogger(__name__)
@yorm.attr(items=List.of_type(Object))
@yorm.sync("data/images/cache.yml")
class Cache:
SIZE = 9
def __init__(self):
self.items = []
def add(self, **kwargs):
if kwargs['key'] == 'custom' or kwargs in self.items:
return
log.info("Caching: %s", kwargs)
self.items.insert(0, kwargs)
while len(self.items) > self.SIZE:
self.items.pop()
yorm.save(self)
def get(self, index):
log.info("Getting cache index: %s", index)
try:
data = self.items[index]
except IndexError:
data = {}
log.info("Retrieved cache: %s", data)
return data
|
839bd6c7d9f4247d4717fb97a3d18d480dc678f4
|
chemfiles/find_chemfiles.py
|
chemfiles/find_chemfiles.py
|
# -* coding: utf-8 -*
import os
from ctypes import cdll
from ctypes.util import find_library
from chemfiles import ffi
ROOT = os.path.dirname(__file__)
def load_clib():
'''
Load chemfiles C++ library, and set the environment as needed.
'''
os.environ['CHEMFILES_PLUGINS'] = os.path.join(ROOT, "molfiles")
libpath = find_library("chemfiles")
if not libpath:
# Rely on the library built by the setup.py function
libpath = os.path.join(ROOT, "_chemfiles.so")
try:
return cdll.LoadLibrary(libpath)
except OSError:
# We could not find chemfiles ...
raise ImportError("Could not find the chemfiles library. " +
"Are you sure it is installed?")
class ChemfilesLibrary(object):
def __init__(self):
self._cache = None
def __call__(self):
if self._cache is None:
self._cache = load_clib()
ffi.set_interface(self._cache)
return self._cache
|
# -* coding: utf-8 -*
import os
from ctypes import cdll
from ctypes.util import find_library
from chemfiles import ffi
ROOT = os.path.dirname(__file__)
def load_clib():
'''Load chemfiles C++ library'''
libpath = find_library("chemfiles")
if not libpath:
# Rely on the library built by the setup.py function
libpath = os.path.join(ROOT, "_chemfiles.so")
try:
return cdll.LoadLibrary(libpath)
except OSError:
raise ImportError(
"Could not find chemfiles library. Are you sure it's installed?"
)
class ChemfilesLibrary(object):
def __init__(self):
self._cache = None
def __call__(self):
if self._cache is None:
self._cache = load_clib()
ffi.set_interface(self._cache)
return self._cache
|
Remove all reference to molfiles plugins
|
Remove all reference to molfiles plugins
|
Python
|
mpl-2.0
|
Luthaf/Chemharp-python
|
# -* coding: utf-8 -*
import os
from ctypes import cdll
from ctypes.util import find_library
from chemfiles import ffi
ROOT = os.path.dirname(__file__)
def load_clib():
'''
Load chemfiles C++ library, and set the environment as needed.
'''
os.environ['CHEMFILES_PLUGINS'] = os.path.join(ROOT, "molfiles")
libpath = find_library("chemfiles")
if not libpath:
# Rely on the library built by the setup.py function
libpath = os.path.join(ROOT, "_chemfiles.so")
try:
return cdll.LoadLibrary(libpath)
except OSError:
# We could not find chemfiles ...
raise ImportError("Could not find the chemfiles library. " +
"Are you sure it is installed?")
class ChemfilesLibrary(object):
def __init__(self):
self._cache = None
def __call__(self):
if self._cache is None:
self._cache = load_clib()
ffi.set_interface(self._cache)
return self._cache
Remove all reference to molfiles plugins
|
# -* coding: utf-8 -*
import os
from ctypes import cdll
from ctypes.util import find_library
from chemfiles import ffi
ROOT = os.path.dirname(__file__)
def load_clib():
'''Load chemfiles C++ library'''
libpath = find_library("chemfiles")
if not libpath:
# Rely on the library built by the setup.py function
libpath = os.path.join(ROOT, "_chemfiles.so")
try:
return cdll.LoadLibrary(libpath)
except OSError:
raise ImportError(
"Could not find chemfiles library. Are you sure it's installed?"
)
class ChemfilesLibrary(object):
def __init__(self):
self._cache = None
def __call__(self):
if self._cache is None:
self._cache = load_clib()
ffi.set_interface(self._cache)
return self._cache
|
<commit_before># -* coding: utf-8 -*
import os
from ctypes import cdll
from ctypes.util import find_library
from chemfiles import ffi
ROOT = os.path.dirname(__file__)
def load_clib():
'''
Load chemfiles C++ library, and set the environment as needed.
'''
os.environ['CHEMFILES_PLUGINS'] = os.path.join(ROOT, "molfiles")
libpath = find_library("chemfiles")
if not libpath:
# Rely on the library built by the setup.py function
libpath = os.path.join(ROOT, "_chemfiles.so")
try:
return cdll.LoadLibrary(libpath)
except OSError:
# We could not find chemfiles ...
raise ImportError("Could not find the chemfiles library. " +
"Are you sure it is installed?")
class ChemfilesLibrary(object):
def __init__(self):
self._cache = None
def __call__(self):
if self._cache is None:
self._cache = load_clib()
ffi.set_interface(self._cache)
return self._cache
<commit_msg>Remove all reference to molfiles plugins<commit_after>
|
# -* coding: utf-8 -*
import os
from ctypes import cdll
from ctypes.util import find_library
from chemfiles import ffi
ROOT = os.path.dirname(__file__)
def load_clib():
'''Load chemfiles C++ library'''
libpath = find_library("chemfiles")
if not libpath:
# Rely on the library built by the setup.py function
libpath = os.path.join(ROOT, "_chemfiles.so")
try:
return cdll.LoadLibrary(libpath)
except OSError:
raise ImportError(
"Could not find chemfiles library. Are you sure it's installed?"
)
class ChemfilesLibrary(object):
def __init__(self):
self._cache = None
def __call__(self):
if self._cache is None:
self._cache = load_clib()
ffi.set_interface(self._cache)
return self._cache
|
# -* coding: utf-8 -*
import os
from ctypes import cdll
from ctypes.util import find_library
from chemfiles import ffi
ROOT = os.path.dirname(__file__)
def load_clib():
'''
Load chemfiles C++ library, and set the environment as needed.
'''
os.environ['CHEMFILES_PLUGINS'] = os.path.join(ROOT, "molfiles")
libpath = find_library("chemfiles")
if not libpath:
# Rely on the library built by the setup.py function
libpath = os.path.join(ROOT, "_chemfiles.so")
try:
return cdll.LoadLibrary(libpath)
except OSError:
# We could not find chemfiles ...
raise ImportError("Could not find the chemfiles library. " +
"Are you sure it is installed?")
class ChemfilesLibrary(object):
def __init__(self):
self._cache = None
def __call__(self):
if self._cache is None:
self._cache = load_clib()
ffi.set_interface(self._cache)
return self._cache
Remove all reference to molfiles plugins# -* coding: utf-8 -*
import os
from ctypes import cdll
from ctypes.util import find_library
from chemfiles import ffi
ROOT = os.path.dirname(__file__)
def load_clib():
'''Load chemfiles C++ library'''
libpath = find_library("chemfiles")
if not libpath:
# Rely on the library built by the setup.py function
libpath = os.path.join(ROOT, "_chemfiles.so")
try:
return cdll.LoadLibrary(libpath)
except OSError:
raise ImportError(
"Could not find chemfiles library. Are you sure it's installed?"
)
class ChemfilesLibrary(object):
def __init__(self):
self._cache = None
def __call__(self):
if self._cache is None:
self._cache = load_clib()
ffi.set_interface(self._cache)
return self._cache
|
<commit_before># -* coding: utf-8 -*
import os
from ctypes import cdll
from ctypes.util import find_library
from chemfiles import ffi
ROOT = os.path.dirname(__file__)
def load_clib():
'''
Load chemfiles C++ library, and set the environment as needed.
'''
os.environ['CHEMFILES_PLUGINS'] = os.path.join(ROOT, "molfiles")
libpath = find_library("chemfiles")
if not libpath:
# Rely on the library built by the setup.py function
libpath = os.path.join(ROOT, "_chemfiles.so")
try:
return cdll.LoadLibrary(libpath)
except OSError:
# We could not find chemfiles ...
raise ImportError("Could not find the chemfiles library. " +
"Are you sure it is installed?")
class ChemfilesLibrary(object):
def __init__(self):
self._cache = None
def __call__(self):
if self._cache is None:
self._cache = load_clib()
ffi.set_interface(self._cache)
return self._cache
<commit_msg>Remove all reference to molfiles plugins<commit_after># -* coding: utf-8 -*
import os
from ctypes import cdll
from ctypes.util import find_library
from chemfiles import ffi
ROOT = os.path.dirname(__file__)
def load_clib():
'''Load chemfiles C++ library'''
libpath = find_library("chemfiles")
if not libpath:
# Rely on the library built by the setup.py function
libpath = os.path.join(ROOT, "_chemfiles.so")
try:
return cdll.LoadLibrary(libpath)
except OSError:
raise ImportError(
"Could not find chemfiles library. Are you sure it's installed?"
)
class ChemfilesLibrary(object):
def __init__(self):
self._cache = None
def __call__(self):
if self._cache is None:
self._cache = load_clib()
ffi.set_interface(self._cache)
return self._cache
|
2c0407d85c54c64d4b619bbae5add1f1da1574d0
|
scripts/merge_benchmarks.py
|
scripts/merge_benchmarks.py
|
import pandas as pd
import argparse
import os
if __name__=='__main__':
"""
Merge benchmark output for all scenarios, methods and settings
"""
parser = argparse.ArgumentParser(description='Collect all benchmarks')
parser.add_argument('-o', '--output', required=True, help='output file')
parser.add_argument('-r', '--root', required=True,
help='root directory for scIB output')
args = parser.parse_args()
bench_files = []
for path, dirs, files in os.walk(args.root):
for file in files:
if 'integration' in path and file.endswith('.benchmark'):
bench_files.append(os.path.join(path, file))
res_list = []
for file in bench_files:
clean_name = file.replace(args.root, "").replace(".benchmark", "")
res = pd.read_csv(file, sep='\t')
res.rename(columns={res.columns[1]: 'h_m_s'}, inplace=True)
res['scenario'] = clean_name
res.set_index('scenario', inplace=True)
res_list.append(res)
results = pd.concat(res_list)
results.to_csv(args.output, index_label='scenario')
|
import pandas as pd
import argparse
import os
if __name__=='__main__':
"""
Merge benchmark output for all scenarios, methods and settings
"""
parser = argparse.ArgumentParser(description='Collect all benchmarks')
parser.add_argument('-o', '--output', required=True, help='output file')
parser.add_argument('-r', '--root', required=True,
help='root directory for scIB output')
args = parser.parse_args()
bench_files = []
for path, dirs, files in os.walk(args.root):
for file in files:
if 'integration' in path and file.endswith('.benchmark'):
bench_files.append(os.path.join(path, file))
res_list = []
for file in bench_files:
if os.stat(file).st_size == 0:
print(f'{file} is empty and will be skipped')
continue
clean_name = file.replace(args.root, "").replace(".benchmark", "")
res = pd.read_csv(file, sep='\t')
res.rename(columns={res.columns[1]: 'h_m_s'}, inplace=True)
res['scenario'] = clean_name
res.set_index('scenario', inplace=True)
res_list.append(res)
results = pd.concat(res_list)
results.to_csv(args.output, index_label='scenario')
|
Check for empty files when merging benchmarks
|
Check for empty files when merging benchmarks
|
Python
|
mit
|
theislab/scib,theislab/scib
|
import pandas as pd
import argparse
import os
if __name__=='__main__':
"""
Merge benchmark output for all scenarios, methods and settings
"""
parser = argparse.ArgumentParser(description='Collect all benchmarks')
parser.add_argument('-o', '--output', required=True, help='output file')
parser.add_argument('-r', '--root', required=True,
help='root directory for scIB output')
args = parser.parse_args()
bench_files = []
for path, dirs, files in os.walk(args.root):
for file in files:
if 'integration' in path and file.endswith('.benchmark'):
bench_files.append(os.path.join(path, file))
res_list = []
for file in bench_files:
clean_name = file.replace(args.root, "").replace(".benchmark", "")
res = pd.read_csv(file, sep='\t')
res.rename(columns={res.columns[1]: 'h_m_s'}, inplace=True)
res['scenario'] = clean_name
res.set_index('scenario', inplace=True)
res_list.append(res)
results = pd.concat(res_list)
results.to_csv(args.output, index_label='scenario')
Check for empty files when merging benchmarks
|
import pandas as pd
import argparse
import os
if __name__=='__main__':
"""
Merge benchmark output for all scenarios, methods and settings
"""
parser = argparse.ArgumentParser(description='Collect all benchmarks')
parser.add_argument('-o', '--output', required=True, help='output file')
parser.add_argument('-r', '--root', required=True,
help='root directory for scIB output')
args = parser.parse_args()
bench_files = []
for path, dirs, files in os.walk(args.root):
for file in files:
if 'integration' in path and file.endswith('.benchmark'):
bench_files.append(os.path.join(path, file))
res_list = []
for file in bench_files:
if os.stat(file).st_size == 0:
print(f'{file} is empty and will be skipped')
continue
clean_name = file.replace(args.root, "").replace(".benchmark", "")
res = pd.read_csv(file, sep='\t')
res.rename(columns={res.columns[1]: 'h_m_s'}, inplace=True)
res['scenario'] = clean_name
res.set_index('scenario', inplace=True)
res_list.append(res)
results = pd.concat(res_list)
results.to_csv(args.output, index_label='scenario')
|
<commit_before>import pandas as pd
import argparse
import os
if __name__=='__main__':
"""
Merge benchmark output for all scenarios, methods and settings
"""
parser = argparse.ArgumentParser(description='Collect all benchmarks')
parser.add_argument('-o', '--output', required=True, help='output file')
parser.add_argument('-r', '--root', required=True,
help='root directory for scIB output')
args = parser.parse_args()
bench_files = []
for path, dirs, files in os.walk(args.root):
for file in files:
if 'integration' in path and file.endswith('.benchmark'):
bench_files.append(os.path.join(path, file))
res_list = []
for file in bench_files:
clean_name = file.replace(args.root, "").replace(".benchmark", "")
res = pd.read_csv(file, sep='\t')
res.rename(columns={res.columns[1]: 'h_m_s'}, inplace=True)
res['scenario'] = clean_name
res.set_index('scenario', inplace=True)
res_list.append(res)
results = pd.concat(res_list)
results.to_csv(args.output, index_label='scenario')
<commit_msg>Check for empty files when merging benchmarks<commit_after>
|
import pandas as pd
import argparse
import os
if __name__=='__main__':
"""
Merge benchmark output for all scenarios, methods and settings
"""
parser = argparse.ArgumentParser(description='Collect all benchmarks')
parser.add_argument('-o', '--output', required=True, help='output file')
parser.add_argument('-r', '--root', required=True,
help='root directory for scIB output')
args = parser.parse_args()
bench_files = []
for path, dirs, files in os.walk(args.root):
for file in files:
if 'integration' in path and file.endswith('.benchmark'):
bench_files.append(os.path.join(path, file))
res_list = []
for file in bench_files:
if os.stat(file).st_size == 0:
print(f'{file} is empty and will be skipped')
continue
clean_name = file.replace(args.root, "").replace(".benchmark", "")
res = pd.read_csv(file, sep='\t')
res.rename(columns={res.columns[1]: 'h_m_s'}, inplace=True)
res['scenario'] = clean_name
res.set_index('scenario', inplace=True)
res_list.append(res)
results = pd.concat(res_list)
results.to_csv(args.output, index_label='scenario')
|
import pandas as pd
import argparse
import os
if __name__=='__main__':
"""
Merge benchmark output for all scenarios, methods and settings
"""
parser = argparse.ArgumentParser(description='Collect all benchmarks')
parser.add_argument('-o', '--output', required=True, help='output file')
parser.add_argument('-r', '--root', required=True,
help='root directory for scIB output')
args = parser.parse_args()
bench_files = []
for path, dirs, files in os.walk(args.root):
for file in files:
if 'integration' in path and file.endswith('.benchmark'):
bench_files.append(os.path.join(path, file))
res_list = []
for file in bench_files:
clean_name = file.replace(args.root, "").replace(".benchmark", "")
res = pd.read_csv(file, sep='\t')
res.rename(columns={res.columns[1]: 'h_m_s'}, inplace=True)
res['scenario'] = clean_name
res.set_index('scenario', inplace=True)
res_list.append(res)
results = pd.concat(res_list)
results.to_csv(args.output, index_label='scenario')
Check for empty files when merging benchmarksimport pandas as pd
import argparse
import os
if __name__=='__main__':
"""
Merge benchmark output for all scenarios, methods and settings
"""
parser = argparse.ArgumentParser(description='Collect all benchmarks')
parser.add_argument('-o', '--output', required=True, help='output file')
parser.add_argument('-r', '--root', required=True,
help='root directory for scIB output')
args = parser.parse_args()
bench_files = []
for path, dirs, files in os.walk(args.root):
for file in files:
if 'integration' in path and file.endswith('.benchmark'):
bench_files.append(os.path.join(path, file))
res_list = []
for file in bench_files:
if os.stat(file).st_size == 0:
print(f'{file} is empty and will be skipped')
continue
clean_name = file.replace(args.root, "").replace(".benchmark", "")
res = pd.read_csv(file, sep='\t')
res.rename(columns={res.columns[1]: 'h_m_s'}, inplace=True)
res['scenario'] = clean_name
res.set_index('scenario', inplace=True)
res_list.append(res)
results = pd.concat(res_list)
results.to_csv(args.output, index_label='scenario')
|
<commit_before>import pandas as pd
import argparse
import os
if __name__=='__main__':
"""
Merge benchmark output for all scenarios, methods and settings
"""
parser = argparse.ArgumentParser(description='Collect all benchmarks')
parser.add_argument('-o', '--output', required=True, help='output file')
parser.add_argument('-r', '--root', required=True,
help='root directory for scIB output')
args = parser.parse_args()
bench_files = []
for path, dirs, files in os.walk(args.root):
for file in files:
if 'integration' in path and file.endswith('.benchmark'):
bench_files.append(os.path.join(path, file))
res_list = []
for file in bench_files:
clean_name = file.replace(args.root, "").replace(".benchmark", "")
res = pd.read_csv(file, sep='\t')
res.rename(columns={res.columns[1]: 'h_m_s'}, inplace=True)
res['scenario'] = clean_name
res.set_index('scenario', inplace=True)
res_list.append(res)
results = pd.concat(res_list)
results.to_csv(args.output, index_label='scenario')
<commit_msg>Check for empty files when merging benchmarks<commit_after>import pandas as pd
import argparse
import os
if __name__=='__main__':
"""
Merge benchmark output for all scenarios, methods and settings
"""
parser = argparse.ArgumentParser(description='Collect all benchmarks')
parser.add_argument('-o', '--output', required=True, help='output file')
parser.add_argument('-r', '--root', required=True,
help='root directory for scIB output')
args = parser.parse_args()
bench_files = []
for path, dirs, files in os.walk(args.root):
for file in files:
if 'integration' in path and file.endswith('.benchmark'):
bench_files.append(os.path.join(path, file))
res_list = []
for file in bench_files:
if os.stat(file).st_size == 0:
print(f'{file} is empty and will be skipped')
continue
clean_name = file.replace(args.root, "").replace(".benchmark", "")
res = pd.read_csv(file, sep='\t')
res.rename(columns={res.columns[1]: 'h_m_s'}, inplace=True)
res['scenario'] = clean_name
res.set_index('scenario', inplace=True)
res_list.append(res)
results = pd.concat(res_list)
results.to_csv(args.output, index_label='scenario')
|
c1cfef3cd92b60c3f8db2e5aae8a57c201dd27c7
|
main.py
|
main.py
|
# -*- coding: utf-8 -*-
# traitement global des fichiers wav
import os,numpy,octaveIO,string,subprocess
def createDataFiles():
if not os.path.exists('data'):
os.makedirs('data')
print "Please add some data, I don't work for free"
else:
res = []
for root, dirs, files in os.walk('data'):
print root,dirs,files
for file in files:
if file.endswith(".wav"):
print "treating file "+file
name=os.path.splitext(file)[0]
fileName = os.path.join(root, name)
wavName = fileName+'.wav'
matName = fileName+'.mat'
#print string.join(['octave','--eval','cepstraux('+'\''+wavName+'\',\''+matName+'\')'])
subprocess.Popen(['octave','--eval','cepstraux('+'\''+wavName+'\',\''+matName+'\')'])
# triplet=octaveIO.retrieve(matName,['mu','sig','pi'])
# res.append(triplet)
return res
createDataFiles()
|
# -*- coding: utf-8 -*-
# traitement global des fichiers wav
import os,numpy,octaveIO,string,subprocess
def createDataFiles():
if not os.path.exists('data'):
os.makedirs('data')
print "Please add some data, I don't work for free"
else:
res = []
for root, dirs, files in os.walk('data'):
print root,dirs,files
for file in files:
if file.endswith(".wav"):
print "treating file "+file
name=os.path.splitext(file)[0]
fileName = os.path.join(root, name)
wavName = fileName+'.wav'
matName = fileName+'.mat'
#print string.join(['octave','--eval','cepstraux('+'\''+wavName+'\',\''+matName+'\')'])
subprocess.call(['octave','--eval','cepstraux('+'\''+wavName+'\',\''+matName+'\')'])
triplet=octaveIO.retrieve(matName,['mu','sig','pi'])
res.append(triplet)
return res
res = createDataFiles()
print res
|
Call au lieu de Popen pour synchroniser
|
Call au lieu de Popen pour synchroniser
|
Python
|
mit
|
tomsib2001/speaker-recognition,tomsib2001/speaker-recognition
|
# -*- coding: utf-8 -*-
# traitement global des fichiers wav
import os,numpy,octaveIO,string,subprocess
def createDataFiles():
if not os.path.exists('data'):
os.makedirs('data')
print "Please add some data, I don't work for free"
else:
res = []
for root, dirs, files in os.walk('data'):
print root,dirs,files
for file in files:
if file.endswith(".wav"):
print "treating file "+file
name=os.path.splitext(file)[0]
fileName = os.path.join(root, name)
wavName = fileName+'.wav'
matName = fileName+'.mat'
#print string.join(['octave','--eval','cepstraux('+'\''+wavName+'\',\''+matName+'\')'])
subprocess.Popen(['octave','--eval','cepstraux('+'\''+wavName+'\',\''+matName+'\')'])
# triplet=octaveIO.retrieve(matName,['mu','sig','pi'])
# res.append(triplet)
return res
createDataFiles()
Call au lieu de Popen pour synchroniser
|
# -*- coding: utf-8 -*-
# traitement global des fichiers wav
import os,numpy,octaveIO,string,subprocess
def createDataFiles():
if not os.path.exists('data'):
os.makedirs('data')
print "Please add some data, I don't work for free"
else:
res = []
for root, dirs, files in os.walk('data'):
print root,dirs,files
for file in files:
if file.endswith(".wav"):
print "treating file "+file
name=os.path.splitext(file)[0]
fileName = os.path.join(root, name)
wavName = fileName+'.wav'
matName = fileName+'.mat'
#print string.join(['octave','--eval','cepstraux('+'\''+wavName+'\',\''+matName+'\')'])
subprocess.call(['octave','--eval','cepstraux('+'\''+wavName+'\',\''+matName+'\')'])
triplet=octaveIO.retrieve(matName,['mu','sig','pi'])
res.append(triplet)
return res
res = createDataFiles()
print res
|
<commit_before># -*- coding: utf-8 -*-
# traitement global des fichiers wav
import os,numpy,octaveIO,string,subprocess
def createDataFiles():
if not os.path.exists('data'):
os.makedirs('data')
print "Please add some data, I don't work for free"
else:
res = []
for root, dirs, files in os.walk('data'):
print root,dirs,files
for file in files:
if file.endswith(".wav"):
print "treating file "+file
name=os.path.splitext(file)[0]
fileName = os.path.join(root, name)
wavName = fileName+'.wav'
matName = fileName+'.mat'
#print string.join(['octave','--eval','cepstraux('+'\''+wavName+'\',\''+matName+'\')'])
subprocess.Popen(['octave','--eval','cepstraux('+'\''+wavName+'\',\''+matName+'\')'])
# triplet=octaveIO.retrieve(matName,['mu','sig','pi'])
# res.append(triplet)
return res
createDataFiles()
<commit_msg>Call au lieu de Popen pour synchroniser<commit_after>
|
# -*- coding: utf-8 -*-
# traitement global des fichiers wav
import os,numpy,octaveIO,string,subprocess
def createDataFiles():
if not os.path.exists('data'):
os.makedirs('data')
print "Please add some data, I don't work for free"
else:
res = []
for root, dirs, files in os.walk('data'):
print root,dirs,files
for file in files:
if file.endswith(".wav"):
print "treating file "+file
name=os.path.splitext(file)[0]
fileName = os.path.join(root, name)
wavName = fileName+'.wav'
matName = fileName+'.mat'
#print string.join(['octave','--eval','cepstraux('+'\''+wavName+'\',\''+matName+'\')'])
subprocess.call(['octave','--eval','cepstraux('+'\''+wavName+'\',\''+matName+'\')'])
triplet=octaveIO.retrieve(matName,['mu','sig','pi'])
res.append(triplet)
return res
res = createDataFiles()
print res
|
# -*- coding: utf-8 -*-
# traitement global des fichiers wav
import os,numpy,octaveIO,string,subprocess
def createDataFiles():
if not os.path.exists('data'):
os.makedirs('data')
print "Please add some data, I don't work for free"
else:
res = []
for root, dirs, files in os.walk('data'):
print root,dirs,files
for file in files:
if file.endswith(".wav"):
print "treating file "+file
name=os.path.splitext(file)[0]
fileName = os.path.join(root, name)
wavName = fileName+'.wav'
matName = fileName+'.mat'
#print string.join(['octave','--eval','cepstraux('+'\''+wavName+'\',\''+matName+'\')'])
subprocess.Popen(['octave','--eval','cepstraux('+'\''+wavName+'\',\''+matName+'\')'])
# triplet=octaveIO.retrieve(matName,['mu','sig','pi'])
# res.append(triplet)
return res
createDataFiles()
Call au lieu de Popen pour synchroniser# -*- coding: utf-8 -*-
# traitement global des fichiers wav
import os,numpy,octaveIO,string,subprocess
def createDataFiles():
if not os.path.exists('data'):
os.makedirs('data')
print "Please add some data, I don't work for free"
else:
res = []
for root, dirs, files in os.walk('data'):
print root,dirs,files
for file in files:
if file.endswith(".wav"):
print "treating file "+file
name=os.path.splitext(file)[0]
fileName = os.path.join(root, name)
wavName = fileName+'.wav'
matName = fileName+'.mat'
#print string.join(['octave','--eval','cepstraux('+'\''+wavName+'\',\''+matName+'\')'])
subprocess.call(['octave','--eval','cepstraux('+'\''+wavName+'\',\''+matName+'\')'])
triplet=octaveIO.retrieve(matName,['mu','sig','pi'])
res.append(triplet)
return res
res = createDataFiles()
print res
|
<commit_before># -*- coding: utf-8 -*-
# traitement global des fichiers wav
import os,numpy,octaveIO,string,subprocess
def createDataFiles():
if not os.path.exists('data'):
os.makedirs('data')
print "Please add some data, I don't work for free"
else:
res = []
for root, dirs, files in os.walk('data'):
print root,dirs,files
for file in files:
if file.endswith(".wav"):
print "treating file "+file
name=os.path.splitext(file)[0]
fileName = os.path.join(root, name)
wavName = fileName+'.wav'
matName = fileName+'.mat'
#print string.join(['octave','--eval','cepstraux('+'\''+wavName+'\',\''+matName+'\')'])
subprocess.Popen(['octave','--eval','cepstraux('+'\''+wavName+'\',\''+matName+'\')'])
# triplet=octaveIO.retrieve(matName,['mu','sig','pi'])
# res.append(triplet)
return res
createDataFiles()
<commit_msg>Call au lieu de Popen pour synchroniser<commit_after># -*- coding: utf-8 -*-
# traitement global des fichiers wav
import os,numpy,octaveIO,string,subprocess
def createDataFiles():
if not os.path.exists('data'):
os.makedirs('data')
print "Please add some data, I don't work for free"
else:
res = []
for root, dirs, files in os.walk('data'):
print root,dirs,files
for file in files:
if file.endswith(".wav"):
print "treating file "+file
name=os.path.splitext(file)[0]
fileName = os.path.join(root, name)
wavName = fileName+'.wav'
matName = fileName+'.mat'
#print string.join(['octave','--eval','cepstraux('+'\''+wavName+'\',\''+matName+'\')'])
subprocess.call(['octave','--eval','cepstraux('+'\''+wavName+'\',\''+matName+'\')'])
triplet=octaveIO.retrieve(matName,['mu','sig','pi'])
res.append(triplet)
return res
res = createDataFiles()
print res
|
5549af8fd6213fbe849e3d2578290bc8616360ab
|
standup/wsgi.py
|
standup/wsgi.py
|
"""
WSGI config for standup project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from raven.contrib.django.raven_compat.middleware.wsgi import Sentry
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "standup.settings")
application = Sentry(get_wsgi_application())
|
"""
WSGI config for standup project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.cache.backends.memcached import BaseMemcachedCache
from django.core.wsgi import get_wsgi_application
from raven.contrib.django.raven_compat.middleware.wsgi import Sentry
# Fix django closing connection to MemCachier after every request (#11331)
# per the MemCachier docs.
BaseMemcachedCache.close = lambda self, **kwargs: None
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "standup.settings")
application = Sentry(get_wsgi_application())
|
Add monkeypatch to prevent connection closing for memcachier
|
Add monkeypatch to prevent connection closing for memcachier
|
Python
|
bsd-3-clause
|
mozilla/standup,mozilla/standup,mozilla/standup,mozilla/standup
|
"""
WSGI config for standup project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from raven.contrib.django.raven_compat.middleware.wsgi import Sentry
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "standup.settings")
application = Sentry(get_wsgi_application())
Add monkeypatch to prevent connection closing for memcachier
|
"""
WSGI config for standup project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.cache.backends.memcached import BaseMemcachedCache
from django.core.wsgi import get_wsgi_application
from raven.contrib.django.raven_compat.middleware.wsgi import Sentry
# Fix django closing connection to MemCachier after every request (#11331)
# per the MemCachier docs.
BaseMemcachedCache.close = lambda self, **kwargs: None
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "standup.settings")
application = Sentry(get_wsgi_application())
|
<commit_before>"""
WSGI config for standup project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from raven.contrib.django.raven_compat.middleware.wsgi import Sentry
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "standup.settings")
application = Sentry(get_wsgi_application())
<commit_msg>Add monkeypatch to prevent connection closing for memcachier<commit_after>
|
"""
WSGI config for standup project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.cache.backends.memcached import BaseMemcachedCache
from django.core.wsgi import get_wsgi_application
from raven.contrib.django.raven_compat.middleware.wsgi import Sentry
# Fix django closing connection to MemCachier after every request (#11331)
# per the MemCachier docs.
BaseMemcachedCache.close = lambda self, **kwargs: None
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "standup.settings")
application = Sentry(get_wsgi_application())
|
"""
WSGI config for standup project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from raven.contrib.django.raven_compat.middleware.wsgi import Sentry
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "standup.settings")
application = Sentry(get_wsgi_application())
Add monkeypatch to prevent connection closing for memcachier"""
WSGI config for standup project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.cache.backends.memcached import BaseMemcachedCache
from django.core.wsgi import get_wsgi_application
from raven.contrib.django.raven_compat.middleware.wsgi import Sentry
# Fix django closing connection to MemCachier after every request (#11331)
# per the MemCachier docs.
BaseMemcachedCache.close = lambda self, **kwargs: None
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "standup.settings")
application = Sentry(get_wsgi_application())
|
<commit_before>"""
WSGI config for standup project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from raven.contrib.django.raven_compat.middleware.wsgi import Sentry
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "standup.settings")
application = Sentry(get_wsgi_application())
<commit_msg>Add monkeypatch to prevent connection closing for memcachier<commit_after>"""
WSGI config for standup project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.cache.backends.memcached import BaseMemcachedCache
from django.core.wsgi import get_wsgi_application
from raven.contrib.django.raven_compat.middleware.wsgi import Sentry
# Fix django closing connection to MemCachier after every request (#11331)
# per the MemCachier docs.
BaseMemcachedCache.close = lambda self, **kwargs: None
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "standup.settings")
application = Sentry(get_wsgi_application())
|
4313578e48f0cae111a73c9cdabc29b12c423856
|
tests/test_client.py
|
tests/test_client.py
|
import unittest
from opendiamond.client.rpc import ControlConnection, BlastConnection
TEST_HOST = '128.2.209.111'
class ClientRPC(unittest.TestCase):
def test_connection_pair_nonce(self):
control = ControlConnection()
blast = BlastConnection()
nonce = control.connect(TEST_HOST)
blast.connect(TEST_HOST, nonce)
if __name__ == '__main__':
unittest.main()
|
import logging
import os
from opendiamond.client.search import DiamondSearch
from opendiamond.client.rpc import ControlConnection, BlastConnection
from opendiamond.client.util import get_default_rgb_filter, get_default_scopecookies
import unittest
_log = logging.getLogger()
_log.setLevel(logging.DEBUG)
_log.addHandler(logging.StreamHandler())
TEST_HOST = 'cloudlet013.elijah.cs.cmu.edu'
class TestClientRPC(unittest.TestCase):
def test_connection_pair_nonce(self):
control = ControlConnection()
blast = BlastConnection()
nonce = control.connect(TEST_HOST)
blast.connect(TEST_HOST, nonce)
class TestClientUtil(unittest.TestCase):
def test_get_default_rgb_filter(self):
rgb_filter = get_default_rgb_filter()
self.assertIsNotNone(rgb_filter)
self.assertEqual(rgb_filter.name, 'RGB')
self.assertEqual(str(rgb_filter), 'RGB')
class TestClientSearch(unittest.TestCase):
def test_default_rgb_filter_default_cookies(self):
self.assertTrue(os.path.isfile(os.path.join(os.environ['HOME'], '.diamond', 'NEWSCOPE')))
self.assertTrue(os.path.isfile(os.path.join(os.environ['HOME'], '.diamond', 'filters', 'fil_rgb')))
cookies = get_default_scopecookies()
rgb_filter = get_default_rgb_filter()
filters = [rgb_filter]
search = DiamondSearch(cookies, filters)
search_id = search.start()
self.assertTrue(search_id)
_log.info("Search ID %s", search_id)
n_results = 0
for res in search.results:
n_results += 1
if n_results % 10 == 0:
print "Got %d results\r" % n_results,
print ""
# _log.info("The last object: %s", str(res))
stats = search.get_stats()
_log.info("Stats: %s", str(stats))
search.close()
if __name__ == '__main__':
unittest.main()
|
Add test cases for client lib. Need to support specifying push attributes.
|
Add test cases for client lib. Need to support specifying push attributes.
|
Python
|
epl-1.0
|
cmusatyalab/opendiamond,cmusatyalab/opendiamond,cmusatyalab/opendiamond,cmusatyalab/opendiamond,cmusatyalab/opendiamond
|
import unittest
from opendiamond.client.rpc import ControlConnection, BlastConnection
TEST_HOST = '128.2.209.111'
class ClientRPC(unittest.TestCase):
def test_connection_pair_nonce(self):
control = ControlConnection()
blast = BlastConnection()
nonce = control.connect(TEST_HOST)
blast.connect(TEST_HOST, nonce)
if __name__ == '__main__':
unittest.main()
Add test cases for client lib. Need to support specifying push attributes.
|
import logging
import os
from opendiamond.client.search import DiamondSearch
from opendiamond.client.rpc import ControlConnection, BlastConnection
from opendiamond.client.util import get_default_rgb_filter, get_default_scopecookies
import unittest
_log = logging.getLogger()
_log.setLevel(logging.DEBUG)
_log.addHandler(logging.StreamHandler())
TEST_HOST = 'cloudlet013.elijah.cs.cmu.edu'
class TestClientRPC(unittest.TestCase):
def test_connection_pair_nonce(self):
control = ControlConnection()
blast = BlastConnection()
nonce = control.connect(TEST_HOST)
blast.connect(TEST_HOST, nonce)
class TestClientUtil(unittest.TestCase):
def test_get_default_rgb_filter(self):
rgb_filter = get_default_rgb_filter()
self.assertIsNotNone(rgb_filter)
self.assertEqual(rgb_filter.name, 'RGB')
self.assertEqual(str(rgb_filter), 'RGB')
class TestClientSearch(unittest.TestCase):
def test_default_rgb_filter_default_cookies(self):
self.assertTrue(os.path.isfile(os.path.join(os.environ['HOME'], '.diamond', 'NEWSCOPE')))
self.assertTrue(os.path.isfile(os.path.join(os.environ['HOME'], '.diamond', 'filters', 'fil_rgb')))
cookies = get_default_scopecookies()
rgb_filter = get_default_rgb_filter()
filters = [rgb_filter]
search = DiamondSearch(cookies, filters)
search_id = search.start()
self.assertTrue(search_id)
_log.info("Search ID %s", search_id)
n_results = 0
for res in search.results:
n_results += 1
if n_results % 10 == 0:
print "Got %d results\r" % n_results,
print ""
# _log.info("The last object: %s", str(res))
stats = search.get_stats()
_log.info("Stats: %s", str(stats))
search.close()
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from opendiamond.client.rpc import ControlConnection, BlastConnection
TEST_HOST = '128.2.209.111'
class ClientRPC(unittest.TestCase):
def test_connection_pair_nonce(self):
control = ControlConnection()
blast = BlastConnection()
nonce = control.connect(TEST_HOST)
blast.connect(TEST_HOST, nonce)
if __name__ == '__main__':
unittest.main()
<commit_msg>Add test cases for client lib. Need to support specifying push attributes.<commit_after>
|
import logging
import os
from opendiamond.client.search import DiamondSearch
from opendiamond.client.rpc import ControlConnection, BlastConnection
from opendiamond.client.util import get_default_rgb_filter, get_default_scopecookies
import unittest
_log = logging.getLogger()
_log.setLevel(logging.DEBUG)
_log.addHandler(logging.StreamHandler())
TEST_HOST = 'cloudlet013.elijah.cs.cmu.edu'
class TestClientRPC(unittest.TestCase):
def test_connection_pair_nonce(self):
control = ControlConnection()
blast = BlastConnection()
nonce = control.connect(TEST_HOST)
blast.connect(TEST_HOST, nonce)
class TestClientUtil(unittest.TestCase):
def test_get_default_rgb_filter(self):
rgb_filter = get_default_rgb_filter()
self.assertIsNotNone(rgb_filter)
self.assertEqual(rgb_filter.name, 'RGB')
self.assertEqual(str(rgb_filter), 'RGB')
class TestClientSearch(unittest.TestCase):
def test_default_rgb_filter_default_cookies(self):
self.assertTrue(os.path.isfile(os.path.join(os.environ['HOME'], '.diamond', 'NEWSCOPE')))
self.assertTrue(os.path.isfile(os.path.join(os.environ['HOME'], '.diamond', 'filters', 'fil_rgb')))
cookies = get_default_scopecookies()
rgb_filter = get_default_rgb_filter()
filters = [rgb_filter]
search = DiamondSearch(cookies, filters)
search_id = search.start()
self.assertTrue(search_id)
_log.info("Search ID %s", search_id)
n_results = 0
for res in search.results:
n_results += 1
if n_results % 10 == 0:
print "Got %d results\r" % n_results,
print ""
# _log.info("The last object: %s", str(res))
stats = search.get_stats()
_log.info("Stats: %s", str(stats))
search.close()
if __name__ == '__main__':
unittest.main()
|
import unittest
from opendiamond.client.rpc import ControlConnection, BlastConnection
TEST_HOST = '128.2.209.111'
class ClientRPC(unittest.TestCase):
def test_connection_pair_nonce(self):
control = ControlConnection()
blast = BlastConnection()
nonce = control.connect(TEST_HOST)
blast.connect(TEST_HOST, nonce)
if __name__ == '__main__':
unittest.main()
Add test cases for client lib. Need to support specifying push attributes.import logging
import os
from opendiamond.client.search import DiamondSearch
from opendiamond.client.rpc import ControlConnection, BlastConnection
from opendiamond.client.util import get_default_rgb_filter, get_default_scopecookies
import unittest
_log = logging.getLogger()
_log.setLevel(logging.DEBUG)
_log.addHandler(logging.StreamHandler())
TEST_HOST = 'cloudlet013.elijah.cs.cmu.edu'
class TestClientRPC(unittest.TestCase):
def test_connection_pair_nonce(self):
control = ControlConnection()
blast = BlastConnection()
nonce = control.connect(TEST_HOST)
blast.connect(TEST_HOST, nonce)
class TestClientUtil(unittest.TestCase):
def test_get_default_rgb_filter(self):
rgb_filter = get_default_rgb_filter()
self.assertIsNotNone(rgb_filter)
self.assertEqual(rgb_filter.name, 'RGB')
self.assertEqual(str(rgb_filter), 'RGB')
class TestClientSearch(unittest.TestCase):
def test_default_rgb_filter_default_cookies(self):
self.assertTrue(os.path.isfile(os.path.join(os.environ['HOME'], '.diamond', 'NEWSCOPE')))
self.assertTrue(os.path.isfile(os.path.join(os.environ['HOME'], '.diamond', 'filters', 'fil_rgb')))
cookies = get_default_scopecookies()
rgb_filter = get_default_rgb_filter()
filters = [rgb_filter]
search = DiamondSearch(cookies, filters)
search_id = search.start()
self.assertTrue(search_id)
_log.info("Search ID %s", search_id)
n_results = 0
for res in search.results:
n_results += 1
if n_results % 10 == 0:
print "Got %d results\r" % n_results,
print ""
# _log.info("The last object: %s", str(res))
stats = search.get_stats()
_log.info("Stats: %s", str(stats))
search.close()
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from opendiamond.client.rpc import ControlConnection, BlastConnection
TEST_HOST = '128.2.209.111'
class ClientRPC(unittest.TestCase):
def test_connection_pair_nonce(self):
control = ControlConnection()
blast = BlastConnection()
nonce = control.connect(TEST_HOST)
blast.connect(TEST_HOST, nonce)
if __name__ == '__main__':
unittest.main()
<commit_msg>Add test cases for client lib. Need to support specifying push attributes.<commit_after>import logging
import os
from opendiamond.client.search import DiamondSearch
from opendiamond.client.rpc import ControlConnection, BlastConnection
from opendiamond.client.util import get_default_rgb_filter, get_default_scopecookies
import unittest
_log = logging.getLogger()
_log.setLevel(logging.DEBUG)
_log.addHandler(logging.StreamHandler())
TEST_HOST = 'cloudlet013.elijah.cs.cmu.edu'
class TestClientRPC(unittest.TestCase):
def test_connection_pair_nonce(self):
control = ControlConnection()
blast = BlastConnection()
nonce = control.connect(TEST_HOST)
blast.connect(TEST_HOST, nonce)
class TestClientUtil(unittest.TestCase):
def test_get_default_rgb_filter(self):
rgb_filter = get_default_rgb_filter()
self.assertIsNotNone(rgb_filter)
self.assertEqual(rgb_filter.name, 'RGB')
self.assertEqual(str(rgb_filter), 'RGB')
class TestClientSearch(unittest.TestCase):
def test_default_rgb_filter_default_cookies(self):
self.assertTrue(os.path.isfile(os.path.join(os.environ['HOME'], '.diamond', 'NEWSCOPE')))
self.assertTrue(os.path.isfile(os.path.join(os.environ['HOME'], '.diamond', 'filters', 'fil_rgb')))
cookies = get_default_scopecookies()
rgb_filter = get_default_rgb_filter()
filters = [rgb_filter]
search = DiamondSearch(cookies, filters)
search_id = search.start()
self.assertTrue(search_id)
_log.info("Search ID %s", search_id)
n_results = 0
for res in search.results:
n_results += 1
if n_results % 10 == 0:
print "Got %d results\r" % n_results,
print ""
# _log.info("The last object: %s", str(res))
stats = search.get_stats()
_log.info("Stats: %s", str(stats))
search.close()
if __name__ == '__main__':
unittest.main()
|
5e66b10c3f99e683ffbab1c074583436dd791901
|
tests/test_runner.py
|
tests/test_runner.py
|
import asyncio
from pytest import mark, raises
from oshino.run import main
from mock import patch
def create_loop():
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
return loop
@mark.integration
@patch("oshino.core.heart.forever", lambda: False)
@patch("oshino.core.heart.create_loop", create_loop)
def test_startup():
with raises(SystemExit):
main(("--config", "tests/data/test_config.yml", "--noop"))
@mark.integration
@patch("oshino.core.heart.forever", lambda: False)
@patch("oshino.core.heart.create_loop", create_loop)
@patch("dotenv.find_dotenv", lambda: raise RuntimeError("Simply failing"))
def test_dot_env_fail():
with raises(SystemExit):
main(("--config", "tests/data/test_config.yml", "--noop"))
|
import asyncio
from pytest import mark, raises
from oshino.run import main
from mock import patch
def create_loop():
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
return loop
def error_stub():
raise RuntimeError("Simply failing")
@mark.integration
@patch("oshino.core.heart.forever", lambda: False)
@patch("oshino.core.heart.create_loop", create_loop)
def test_startup():
with raises(SystemExit):
main(("--config", "tests/data/test_config.yml", "--noop"))
@mark.integration
@patch("oshino.core.heart.forever", lambda: False)
@patch("oshino.core.heart.create_loop", create_loop)
@patch("dotenv.find_dotenv", error_stub)
def test_dot_env_fail():
with raises(SystemExit):
main(("--config", "tests/data/test_config.yml", "--noop"))
|
Use normal function instead of lambda for this
|
Use normal function instead of lambda for this
|
Python
|
mit
|
CodersOfTheNight/oshino
|
import asyncio
from pytest import mark, raises
from oshino.run import main
from mock import patch
def create_loop():
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
return loop
@mark.integration
@patch("oshino.core.heart.forever", lambda: False)
@patch("oshino.core.heart.create_loop", create_loop)
def test_startup():
with raises(SystemExit):
main(("--config", "tests/data/test_config.yml", "--noop"))
@mark.integration
@patch("oshino.core.heart.forever", lambda: False)
@patch("oshino.core.heart.create_loop", create_loop)
@patch("dotenv.find_dotenv", lambda: raise RuntimeError("Simply failing"))
def test_dot_env_fail():
with raises(SystemExit):
main(("--config", "tests/data/test_config.yml", "--noop"))
Use normal function instead of lambda for this
|
import asyncio
from pytest import mark, raises
from oshino.run import main
from mock import patch
def create_loop():
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
return loop
def error_stub():
raise RuntimeError("Simply failing")
@mark.integration
@patch("oshino.core.heart.forever", lambda: False)
@patch("oshino.core.heart.create_loop", create_loop)
def test_startup():
with raises(SystemExit):
main(("--config", "tests/data/test_config.yml", "--noop"))
@mark.integration
@patch("oshino.core.heart.forever", lambda: False)
@patch("oshino.core.heart.create_loop", create_loop)
@patch("dotenv.find_dotenv", error_stub)
def test_dot_env_fail():
with raises(SystemExit):
main(("--config", "tests/data/test_config.yml", "--noop"))
|
<commit_before>import asyncio
from pytest import mark, raises
from oshino.run import main
from mock import patch
def create_loop():
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
return loop
@mark.integration
@patch("oshino.core.heart.forever", lambda: False)
@patch("oshino.core.heart.create_loop", create_loop)
def test_startup():
with raises(SystemExit):
main(("--config", "tests/data/test_config.yml", "--noop"))
@mark.integration
@patch("oshino.core.heart.forever", lambda: False)
@patch("oshino.core.heart.create_loop", create_loop)
@patch("dotenv.find_dotenv", lambda: raise RuntimeError("Simply failing"))
def test_dot_env_fail():
with raises(SystemExit):
main(("--config", "tests/data/test_config.yml", "--noop"))
<commit_msg>Use normal function instead of lambda for this<commit_after>
|
import asyncio
from pytest import mark, raises
from oshino.run import main
from mock import patch
def create_loop():
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
return loop
def error_stub():
raise RuntimeError("Simply failing")
@mark.integration
@patch("oshino.core.heart.forever", lambda: False)
@patch("oshino.core.heart.create_loop", create_loop)
def test_startup():
with raises(SystemExit):
main(("--config", "tests/data/test_config.yml", "--noop"))
@mark.integration
@patch("oshino.core.heart.forever", lambda: False)
@patch("oshino.core.heart.create_loop", create_loop)
@patch("dotenv.find_dotenv", error_stub)
def test_dot_env_fail():
with raises(SystemExit):
main(("--config", "tests/data/test_config.yml", "--noop"))
|
import asyncio
from pytest import mark, raises
from oshino.run import main
from mock import patch
def create_loop():
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
return loop
@mark.integration
@patch("oshino.core.heart.forever", lambda: False)
@patch("oshino.core.heart.create_loop", create_loop)
def test_startup():
with raises(SystemExit):
main(("--config", "tests/data/test_config.yml", "--noop"))
@mark.integration
@patch("oshino.core.heart.forever", lambda: False)
@patch("oshino.core.heart.create_loop", create_loop)
@patch("dotenv.find_dotenv", lambda: raise RuntimeError("Simply failing"))
def test_dot_env_fail():
with raises(SystemExit):
main(("--config", "tests/data/test_config.yml", "--noop"))
Use normal function instead of lambda for thisimport asyncio
from pytest import mark, raises
from oshino.run import main
from mock import patch
def create_loop():
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
return loop
def error_stub():
raise RuntimeError("Simply failing")
@mark.integration
@patch("oshino.core.heart.forever", lambda: False)
@patch("oshino.core.heart.create_loop", create_loop)
def test_startup():
with raises(SystemExit):
main(("--config", "tests/data/test_config.yml", "--noop"))
@mark.integration
@patch("oshino.core.heart.forever", lambda: False)
@patch("oshino.core.heart.create_loop", create_loop)
@patch("dotenv.find_dotenv", error_stub)
def test_dot_env_fail():
with raises(SystemExit):
main(("--config", "tests/data/test_config.yml", "--noop"))
|
<commit_before>import asyncio
from pytest import mark, raises
from oshino.run import main
from mock import patch
def create_loop():
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
return loop
@mark.integration
@patch("oshino.core.heart.forever", lambda: False)
@patch("oshino.core.heart.create_loop", create_loop)
def test_startup():
with raises(SystemExit):
main(("--config", "tests/data/test_config.yml", "--noop"))
@mark.integration
@patch("oshino.core.heart.forever", lambda: False)
@patch("oshino.core.heart.create_loop", create_loop)
@patch("dotenv.find_dotenv", lambda: raise RuntimeError("Simply failing"))
def test_dot_env_fail():
with raises(SystemExit):
main(("--config", "tests/data/test_config.yml", "--noop"))
<commit_msg>Use normal function instead of lambda for this<commit_after>import asyncio
from pytest import mark, raises
from oshino.run import main
from mock import patch
def create_loop():
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
return loop
def error_stub():
raise RuntimeError("Simply failing")
@mark.integration
@patch("oshino.core.heart.forever", lambda: False)
@patch("oshino.core.heart.create_loop", create_loop)
def test_startup():
with raises(SystemExit):
main(("--config", "tests/data/test_config.yml", "--noop"))
@mark.integration
@patch("oshino.core.heart.forever", lambda: False)
@patch("oshino.core.heart.create_loop", create_loop)
@patch("dotenv.find_dotenv", error_stub)
def test_dot_env_fail():
with raises(SystemExit):
main(("--config", "tests/data/test_config.yml", "--noop"))
|
192c92fba3836f2073576674495faa42799cdb95
|
tests/test_sqlite.py
|
tests/test_sqlite.py
|
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
'.molecule/ansible_inventory').get_hosts('sqlite')
def test_package(Package):
p = Package('pdns-backend-sqlite3')
assert p.is_installed
def test_database_exists(File):
f = File('/var/lib/powerdns/pdns.db')
assert f.exists
assert f.user == 'pdns'
assert f.group == 'pdns'
assert f.mode == 420
assert f.size > 10000
|
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
'.molecule/ansible_inventory').get_hosts('sqlite')
debian_os = ['debian', 'ubuntu']
rhel_os = ['redhat', 'centos']
def test_package(Package, SystemInfo):
p = None
if SystemInfo.distribution in debian_os:
p = Package('pdns-backend-sqlite3')
if SystemInfo.distribution in rhel_os:
p = Package('pdns-backend-sqlite')
assert p.is_installed
def test_database_exists(File):
f = File('/var/lib/powerdns/pdns.db')
assert f.exists
assert f.user == 'pdns'
assert f.group == 'pdns'
assert f.mode == 420
assert f.size > 10000
|
Fix sqlite test on CentOS
|
Fix sqlite test on CentOS
|
Python
|
mit
|
PowerDNS/pdns-ansible
|
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
'.molecule/ansible_inventory').get_hosts('sqlite')
def test_package(Package):
p = Package('pdns-backend-sqlite3')
assert p.is_installed
def test_database_exists(File):
f = File('/var/lib/powerdns/pdns.db')
assert f.exists
assert f.user == 'pdns'
assert f.group == 'pdns'
assert f.mode == 420
assert f.size > 10000
Fix sqlite test on CentOS
|
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
'.molecule/ansible_inventory').get_hosts('sqlite')
debian_os = ['debian', 'ubuntu']
rhel_os = ['redhat', 'centos']
def test_package(Package, SystemInfo):
p = None
if SystemInfo.distribution in debian_os:
p = Package('pdns-backend-sqlite3')
if SystemInfo.distribution in rhel_os:
p = Package('pdns-backend-sqlite')
assert p.is_installed
def test_database_exists(File):
f = File('/var/lib/powerdns/pdns.db')
assert f.exists
assert f.user == 'pdns'
assert f.group == 'pdns'
assert f.mode == 420
assert f.size > 10000
|
<commit_before>import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
'.molecule/ansible_inventory').get_hosts('sqlite')
def test_package(Package):
p = Package('pdns-backend-sqlite3')
assert p.is_installed
def test_database_exists(File):
f = File('/var/lib/powerdns/pdns.db')
assert f.exists
assert f.user == 'pdns'
assert f.group == 'pdns'
assert f.mode == 420
assert f.size > 10000
<commit_msg>Fix sqlite test on CentOS<commit_after>
|
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
'.molecule/ansible_inventory').get_hosts('sqlite')
debian_os = ['debian', 'ubuntu']
rhel_os = ['redhat', 'centos']
def test_package(Package, SystemInfo):
p = None
if SystemInfo.distribution in debian_os:
p = Package('pdns-backend-sqlite3')
if SystemInfo.distribution in rhel_os:
p = Package('pdns-backend-sqlite')
assert p.is_installed
def test_database_exists(File):
f = File('/var/lib/powerdns/pdns.db')
assert f.exists
assert f.user == 'pdns'
assert f.group == 'pdns'
assert f.mode == 420
assert f.size > 10000
|
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
'.molecule/ansible_inventory').get_hosts('sqlite')
def test_package(Package):
p = Package('pdns-backend-sqlite3')
assert p.is_installed
def test_database_exists(File):
f = File('/var/lib/powerdns/pdns.db')
assert f.exists
assert f.user == 'pdns'
assert f.group == 'pdns'
assert f.mode == 420
assert f.size > 10000
Fix sqlite test on CentOSimport testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
'.molecule/ansible_inventory').get_hosts('sqlite')
debian_os = ['debian', 'ubuntu']
rhel_os = ['redhat', 'centos']
def test_package(Package, SystemInfo):
p = None
if SystemInfo.distribution in debian_os:
p = Package('pdns-backend-sqlite3')
if SystemInfo.distribution in rhel_os:
p = Package('pdns-backend-sqlite')
assert p.is_installed
def test_database_exists(File):
f = File('/var/lib/powerdns/pdns.db')
assert f.exists
assert f.user == 'pdns'
assert f.group == 'pdns'
assert f.mode == 420
assert f.size > 10000
|
<commit_before>import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
'.molecule/ansible_inventory').get_hosts('sqlite')
def test_package(Package):
p = Package('pdns-backend-sqlite3')
assert p.is_installed
def test_database_exists(File):
f = File('/var/lib/powerdns/pdns.db')
assert f.exists
assert f.user == 'pdns'
assert f.group == 'pdns'
assert f.mode == 420
assert f.size > 10000
<commit_msg>Fix sqlite test on CentOS<commit_after>import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
'.molecule/ansible_inventory').get_hosts('sqlite')
debian_os = ['debian', 'ubuntu']
rhel_os = ['redhat', 'centos']
def test_package(Package, SystemInfo):
p = None
if SystemInfo.distribution in debian_os:
p = Package('pdns-backend-sqlite3')
if SystemInfo.distribution in rhel_os:
p = Package('pdns-backend-sqlite')
assert p.is_installed
def test_database_exists(File):
f = File('/var/lib/powerdns/pdns.db')
assert f.exists
assert f.user == 'pdns'
assert f.group == 'pdns'
assert f.mode == 420
assert f.size > 10000
|
b6978852775bb48e400a31a1e464d7b596db13f2
|
fsictools.py
|
fsictools.py
|
# -*- coding: utf-8 -*-
"""
fsictools
=========
Supporting tools for FSIC-based economic models. See the individual docstrings
for dependencies additional to those of `fsic`.
"""
# Version number keeps track with the main `fsic` module
from fsic import __version__
import re
from typing import List
from fsic import BaseModel, Symbol
def symbols_to_dataframe(symbols: List[Symbol]) -> 'DataFrame':
"""Convert the list of symbols to a `pandas` DataFrame. **Requires `pandas`**."""
from pandas import DataFrame
return DataFrame([s._asdict() for s in symbols])
def model_to_dataframe(model: BaseModel) -> 'DataFrame':
"""Return the values and solution information from the model as a `pandas` DataFrame. **Requires `pandas`**."""
from pandas import DataFrame
df = DataFrame(model.values.T, index=model.span, columns=model.names)
df['status'] = model.status
df['iterations'] = model.iterations
return df
|
# -*- coding: utf-8 -*-
"""
fsictools
=========
Supporting tools for FSIC-based economic models. See the individual docstrings
for dependencies additional to those of `fsic`.
"""
# Version number keeps track with the main `fsic` module
from fsic import __version__
import re
from typing import List
from fsic import BaseModel, Symbol
def symbols_to_dataframe(symbols: List[Symbol]) -> 'pandas.DataFrame':
"""Convert the list of symbols to a `pandas` DataFrame. **Requires `pandas`**."""
from pandas import DataFrame
return DataFrame([s._asdict() for s in symbols])
def model_to_dataframe(model: BaseModel) -> 'pandas.DataFrame':
"""Return the values and solution information from the model as a `pandas` DataFrame. **Requires `pandas`**."""
from pandas import DataFrame
df = DataFrame(model.values.T, index=model.span, columns=model.names)
df['status'] = model.status
df['iterations'] = model.iterations
return df
|
Update type annotations to reference package names in full
|
TYP: Update type annotations to reference package names in full
|
Python
|
mit
|
ChrisThoung/fsic
|
# -*- coding: utf-8 -*-
"""
fsictools
=========
Supporting tools for FSIC-based economic models. See the individual docstrings
for dependencies additional to those of `fsic`.
"""
# Version number keeps track with the main `fsic` module
from fsic import __version__
import re
from typing import List
from fsic import BaseModel, Symbol
def symbols_to_dataframe(symbols: List[Symbol]) -> 'DataFrame':
"""Convert the list of symbols to a `pandas` DataFrame. **Requires `pandas`**."""
from pandas import DataFrame
return DataFrame([s._asdict() for s in symbols])
def model_to_dataframe(model: BaseModel) -> 'DataFrame':
"""Return the values and solution information from the model as a `pandas` DataFrame. **Requires `pandas`**."""
from pandas import DataFrame
df = DataFrame(model.values.T, index=model.span, columns=model.names)
df['status'] = model.status
df['iterations'] = model.iterations
return df
TYP: Update type annotations to reference package names in full
|
# -*- coding: utf-8 -*-
"""
fsictools
=========
Supporting tools for FSIC-based economic models. See the individual docstrings
for dependencies additional to those of `fsic`.
"""
# Version number keeps track with the main `fsic` module
from fsic import __version__
import re
from typing import List
from fsic import BaseModel, Symbol
def symbols_to_dataframe(symbols: List[Symbol]) -> 'pandas.DataFrame':
"""Convert the list of symbols to a `pandas` DataFrame. **Requires `pandas`**."""
from pandas import DataFrame
return DataFrame([s._asdict() for s in symbols])
def model_to_dataframe(model: BaseModel) -> 'pandas.DataFrame':
"""Return the values and solution information from the model as a `pandas` DataFrame. **Requires `pandas`**."""
from pandas import DataFrame
df = DataFrame(model.values.T, index=model.span, columns=model.names)
df['status'] = model.status
df['iterations'] = model.iterations
return df
|
<commit_before># -*- coding: utf-8 -*-
"""
fsictools
=========
Supporting tools for FSIC-based economic models. See the individual docstrings
for dependencies additional to those of `fsic`.
"""
# Version number keeps track with the main `fsic` module
from fsic import __version__
import re
from typing import List
from fsic import BaseModel, Symbol
def symbols_to_dataframe(symbols: List[Symbol]) -> 'DataFrame':
"""Convert the list of symbols to a `pandas` DataFrame. **Requires `pandas`**."""
from pandas import DataFrame
return DataFrame([s._asdict() for s in symbols])
def model_to_dataframe(model: BaseModel) -> 'DataFrame':
"""Return the values and solution information from the model as a `pandas` DataFrame. **Requires `pandas`**."""
from pandas import DataFrame
df = DataFrame(model.values.T, index=model.span, columns=model.names)
df['status'] = model.status
df['iterations'] = model.iterations
return df
<commit_msg>TYP: Update type annotations to reference package names in full<commit_after>
|
# -*- coding: utf-8 -*-
"""
fsictools
=========
Supporting tools for FSIC-based economic models. See the individual docstrings
for dependencies additional to those of `fsic`.
"""
# Version number keeps track with the main `fsic` module
from fsic import __version__
import re
from typing import List
from fsic import BaseModel, Symbol
def symbols_to_dataframe(symbols: List[Symbol]) -> 'pandas.DataFrame':
"""Convert the list of symbols to a `pandas` DataFrame. **Requires `pandas`**."""
from pandas import DataFrame
return DataFrame([s._asdict() for s in symbols])
def model_to_dataframe(model: BaseModel) -> 'pandas.DataFrame':
"""Return the values and solution information from the model as a `pandas` DataFrame. **Requires `pandas`**."""
from pandas import DataFrame
df = DataFrame(model.values.T, index=model.span, columns=model.names)
df['status'] = model.status
df['iterations'] = model.iterations
return df
|
# -*- coding: utf-8 -*-
"""
fsictools
=========
Supporting tools for FSIC-based economic models. See the individual docstrings
for dependencies additional to those of `fsic`.
"""
# Version number keeps track with the main `fsic` module
from fsic import __version__
import re
from typing import List
from fsic import BaseModel, Symbol
def symbols_to_dataframe(symbols: List[Symbol]) -> 'DataFrame':
"""Convert the list of symbols to a `pandas` DataFrame. **Requires `pandas`**."""
from pandas import DataFrame
return DataFrame([s._asdict() for s in symbols])
def model_to_dataframe(model: BaseModel) -> 'DataFrame':
"""Return the values and solution information from the model as a `pandas` DataFrame. **Requires `pandas`**."""
from pandas import DataFrame
df = DataFrame(model.values.T, index=model.span, columns=model.names)
df['status'] = model.status
df['iterations'] = model.iterations
return df
TYP: Update type annotations to reference package names in full# -*- coding: utf-8 -*-
"""
fsictools
=========
Supporting tools for FSIC-based economic models. See the individual docstrings
for dependencies additional to those of `fsic`.
"""
# Version number keeps track with the main `fsic` module
from fsic import __version__
import re
from typing import List
from fsic import BaseModel, Symbol
def symbols_to_dataframe(symbols: List[Symbol]) -> 'pandas.DataFrame':
"""Convert the list of symbols to a `pandas` DataFrame. **Requires `pandas`**."""
from pandas import DataFrame
return DataFrame([s._asdict() for s in symbols])
def model_to_dataframe(model: BaseModel) -> 'pandas.DataFrame':
"""Return the values and solution information from the model as a `pandas` DataFrame. **Requires `pandas`**."""
from pandas import DataFrame
df = DataFrame(model.values.T, index=model.span, columns=model.names)
df['status'] = model.status
df['iterations'] = model.iterations
return df
|
<commit_before># -*- coding: utf-8 -*-
"""
fsictools
=========
Supporting tools for FSIC-based economic models. See the individual docstrings
for dependencies additional to those of `fsic`.
"""
# Version number keeps track with the main `fsic` module
from fsic import __version__
import re
from typing import List
from fsic import BaseModel, Symbol
def symbols_to_dataframe(symbols: List[Symbol]) -> 'DataFrame':
"""Convert the list of symbols to a `pandas` DataFrame. **Requires `pandas`**."""
from pandas import DataFrame
return DataFrame([s._asdict() for s in symbols])
def model_to_dataframe(model: BaseModel) -> 'DataFrame':
"""Return the values and solution information from the model as a `pandas` DataFrame. **Requires `pandas`**."""
from pandas import DataFrame
df = DataFrame(model.values.T, index=model.span, columns=model.names)
df['status'] = model.status
df['iterations'] = model.iterations
return df
<commit_msg>TYP: Update type annotations to reference package names in full<commit_after># -*- coding: utf-8 -*-
"""
fsictools
=========
Supporting tools for FSIC-based economic models. See the individual docstrings
for dependencies additional to those of `fsic`.
"""
# Version number keeps track with the main `fsic` module
from fsic import __version__
import re
from typing import List
from fsic import BaseModel, Symbol
def symbols_to_dataframe(symbols: List[Symbol]) -> 'pandas.DataFrame':
"""Convert the list of symbols to a `pandas` DataFrame. **Requires `pandas`**."""
from pandas import DataFrame
return DataFrame([s._asdict() for s in symbols])
def model_to_dataframe(model: BaseModel) -> 'pandas.DataFrame':
"""Return the values and solution information from the model as a `pandas` DataFrame. **Requires `pandas`**."""
from pandas import DataFrame
df = DataFrame(model.values.T, index=model.span, columns=model.names)
df['status'] = model.status
df['iterations'] = model.iterations
return df
|
4e78179d81f5e3da6d9981f60133089347a81caf
|
txsni/snimap.py
|
txsni/snimap.py
|
from OpenSSL.SSL import Context, TLSv1_METHOD
from twisted.internet.ssl import ContextFactory
from txsni.only_noticed_pypi_pem_after_i_wrote_this import (
certificateOptionsFromPileOfPEM
)
class SNIMap(ContextFactory, object):
def __init__(self, mapping):
self.mapping = mapping
try:
self.context = self.mapping['DEFAULT'].getContext()
except KeyError:
self.context = Context(TLSv1_METHOD)
self.context.set_tlsext_servername_callback(
self.selectContext
)
def getContext(self):
return self.context
def selectContext(self, connection):
connection.set_context(
self.mapping[connection.get_servername()]
.getContext()
)
class HostDirectoryMap(object):
def __init__(self, directoryPath):
self.directoryPath = directoryPath
def __getitem__(self, hostname):
filePath = self.directoryPath.child(hostname + ".pem")
if filePath.isfile():
return certificateOptionsFromPileOfPEM(filePath.getContent())
else:
raise KeyError("no pem file for " + hostname)
|
from twisted.internet.ssl import CertificateOptions, ContextFactory
from txsni.only_noticed_pypi_pem_after_i_wrote_this import (
certificateOptionsFromPileOfPEM
)
class SNIMap(ContextFactory, object):
def __init__(self, mapping):
self.mapping = mapping
try:
self.context = self.mapping['DEFAULT'].getContext()
except KeyError:
self.context = CertificateOptions().getContext()
self.context.set_tlsext_servername_callback(
self.selectContext
)
def getContext(self):
return self.context
def selectContext(self, connection):
connection.set_context(
self.mapping[connection.get_servername()]
.getContext()
)
class HostDirectoryMap(object):
def __init__(self, directoryPath):
self.directoryPath = directoryPath
def __getitem__(self, hostname):
filePath = self.directoryPath.child(hostname + ".pem")
if filePath.isfile():
return certificateOptionsFromPileOfPEM(filePath.getContent())
else:
raise KeyError("no pem file for " + hostname)
|
Create a properly configured SSL Context
|
Create a properly configured SSL Context
|
Python
|
mit
|
glyph/txsni
|
from OpenSSL.SSL import Context, TLSv1_METHOD
from twisted.internet.ssl import ContextFactory
from txsni.only_noticed_pypi_pem_after_i_wrote_this import (
certificateOptionsFromPileOfPEM
)
class SNIMap(ContextFactory, object):
def __init__(self, mapping):
self.mapping = mapping
try:
self.context = self.mapping['DEFAULT'].getContext()
except KeyError:
self.context = Context(TLSv1_METHOD)
self.context.set_tlsext_servername_callback(
self.selectContext
)
def getContext(self):
return self.context
def selectContext(self, connection):
connection.set_context(
self.mapping[connection.get_servername()]
.getContext()
)
class HostDirectoryMap(object):
def __init__(self, directoryPath):
self.directoryPath = directoryPath
def __getitem__(self, hostname):
filePath = self.directoryPath.child(hostname + ".pem")
if filePath.isfile():
return certificateOptionsFromPileOfPEM(filePath.getContent())
else:
raise KeyError("no pem file for " + hostname)
Create a properly configured SSL Context
|
from twisted.internet.ssl import CertificateOptions, ContextFactory
from txsni.only_noticed_pypi_pem_after_i_wrote_this import (
certificateOptionsFromPileOfPEM
)
class SNIMap(ContextFactory, object):
def __init__(self, mapping):
self.mapping = mapping
try:
self.context = self.mapping['DEFAULT'].getContext()
except KeyError:
self.context = CertificateOptions().getContext()
self.context.set_tlsext_servername_callback(
self.selectContext
)
def getContext(self):
return self.context
def selectContext(self, connection):
connection.set_context(
self.mapping[connection.get_servername()]
.getContext()
)
class HostDirectoryMap(object):
def __init__(self, directoryPath):
self.directoryPath = directoryPath
def __getitem__(self, hostname):
filePath = self.directoryPath.child(hostname + ".pem")
if filePath.isfile():
return certificateOptionsFromPileOfPEM(filePath.getContent())
else:
raise KeyError("no pem file for " + hostname)
|
<commit_before>
from OpenSSL.SSL import Context, TLSv1_METHOD
from twisted.internet.ssl import ContextFactory
from txsni.only_noticed_pypi_pem_after_i_wrote_this import (
certificateOptionsFromPileOfPEM
)
class SNIMap(ContextFactory, object):
def __init__(self, mapping):
self.mapping = mapping
try:
self.context = self.mapping['DEFAULT'].getContext()
except KeyError:
self.context = Context(TLSv1_METHOD)
self.context.set_tlsext_servername_callback(
self.selectContext
)
def getContext(self):
return self.context
def selectContext(self, connection):
connection.set_context(
self.mapping[connection.get_servername()]
.getContext()
)
class HostDirectoryMap(object):
def __init__(self, directoryPath):
self.directoryPath = directoryPath
def __getitem__(self, hostname):
filePath = self.directoryPath.child(hostname + ".pem")
if filePath.isfile():
return certificateOptionsFromPileOfPEM(filePath.getContent())
else:
raise KeyError("no pem file for " + hostname)
<commit_msg>Create a properly configured SSL Context<commit_after>
|
from twisted.internet.ssl import CertificateOptions, ContextFactory
from txsni.only_noticed_pypi_pem_after_i_wrote_this import (
certificateOptionsFromPileOfPEM
)
class SNIMap(ContextFactory, object):
def __init__(self, mapping):
self.mapping = mapping
try:
self.context = self.mapping['DEFAULT'].getContext()
except KeyError:
self.context = CertificateOptions().getContext()
self.context.set_tlsext_servername_callback(
self.selectContext
)
def getContext(self):
return self.context
def selectContext(self, connection):
connection.set_context(
self.mapping[connection.get_servername()]
.getContext()
)
class HostDirectoryMap(object):
def __init__(self, directoryPath):
self.directoryPath = directoryPath
def __getitem__(self, hostname):
filePath = self.directoryPath.child(hostname + ".pem")
if filePath.isfile():
return certificateOptionsFromPileOfPEM(filePath.getContent())
else:
raise KeyError("no pem file for " + hostname)
|
from OpenSSL.SSL import Context, TLSv1_METHOD
from twisted.internet.ssl import ContextFactory
from txsni.only_noticed_pypi_pem_after_i_wrote_this import (
certificateOptionsFromPileOfPEM
)
class SNIMap(ContextFactory, object):
def __init__(self, mapping):
self.mapping = mapping
try:
self.context = self.mapping['DEFAULT'].getContext()
except KeyError:
self.context = Context(TLSv1_METHOD)
self.context.set_tlsext_servername_callback(
self.selectContext
)
def getContext(self):
return self.context
def selectContext(self, connection):
connection.set_context(
self.mapping[connection.get_servername()]
.getContext()
)
class HostDirectoryMap(object):
def __init__(self, directoryPath):
self.directoryPath = directoryPath
def __getitem__(self, hostname):
filePath = self.directoryPath.child(hostname + ".pem")
if filePath.isfile():
return certificateOptionsFromPileOfPEM(filePath.getContent())
else:
raise KeyError("no pem file for " + hostname)
Create a properly configured SSL Contextfrom twisted.internet.ssl import CertificateOptions, ContextFactory
from txsni.only_noticed_pypi_pem_after_i_wrote_this import (
certificateOptionsFromPileOfPEM
)
class SNIMap(ContextFactory, object):
def __init__(self, mapping):
self.mapping = mapping
try:
self.context = self.mapping['DEFAULT'].getContext()
except KeyError:
self.context = CertificateOptions().getContext()
self.context.set_tlsext_servername_callback(
self.selectContext
)
def getContext(self):
return self.context
def selectContext(self, connection):
connection.set_context(
self.mapping[connection.get_servername()]
.getContext()
)
class HostDirectoryMap(object):
def __init__(self, directoryPath):
self.directoryPath = directoryPath
def __getitem__(self, hostname):
filePath = self.directoryPath.child(hostname + ".pem")
if filePath.isfile():
return certificateOptionsFromPileOfPEM(filePath.getContent())
else:
raise KeyError("no pem file for " + hostname)
|
<commit_before>
from OpenSSL.SSL import Context, TLSv1_METHOD
from twisted.internet.ssl import ContextFactory
from txsni.only_noticed_pypi_pem_after_i_wrote_this import (
certificateOptionsFromPileOfPEM
)
class SNIMap(ContextFactory, object):
def __init__(self, mapping):
self.mapping = mapping
try:
self.context = self.mapping['DEFAULT'].getContext()
except KeyError:
self.context = Context(TLSv1_METHOD)
self.context.set_tlsext_servername_callback(
self.selectContext
)
def getContext(self):
return self.context
def selectContext(self, connection):
connection.set_context(
self.mapping[connection.get_servername()]
.getContext()
)
class HostDirectoryMap(object):
def __init__(self, directoryPath):
self.directoryPath = directoryPath
def __getitem__(self, hostname):
filePath = self.directoryPath.child(hostname + ".pem")
if filePath.isfile():
return certificateOptionsFromPileOfPEM(filePath.getContent())
else:
raise KeyError("no pem file for " + hostname)
<commit_msg>Create a properly configured SSL Context<commit_after>from twisted.internet.ssl import CertificateOptions, ContextFactory
from txsni.only_noticed_pypi_pem_after_i_wrote_this import (
certificateOptionsFromPileOfPEM
)
class SNIMap(ContextFactory, object):
def __init__(self, mapping):
self.mapping = mapping
try:
self.context = self.mapping['DEFAULT'].getContext()
except KeyError:
self.context = CertificateOptions().getContext()
self.context.set_tlsext_servername_callback(
self.selectContext
)
def getContext(self):
return self.context
def selectContext(self, connection):
connection.set_context(
self.mapping[connection.get_servername()]
.getContext()
)
class HostDirectoryMap(object):
def __init__(self, directoryPath):
self.directoryPath = directoryPath
def __getitem__(self, hostname):
filePath = self.directoryPath.child(hostname + ".pem")
if filePath.isfile():
return certificateOptionsFromPileOfPEM(filePath.getContent())
else:
raise KeyError("no pem file for " + hostname)
|
3661eff00612f0d6d4e6e8477a09729635ac9851
|
ditto/core/views/__init__.py
|
ditto/core/views/__init__.py
|
import os
from django.views.generic import TemplateView
class PageView(TemplateView):
def get_template_names(self):
return [os.path.join('pages', self.args[0] + '.html')]
def get_context_data(self, **kwargs):
context = super(PageView, self).get_context_data(**kwargs)
context['nav'] = [self.args[0]]
return context
|
Add utility view for static pages
|
Add utility view for static pages
|
Python
|
bsd-3-clause
|
Kvoti/ditto,Kvoti/ditto,Kvoti/ditto,Kvoti/ditto,Kvoti/ditto
|
Add utility view for static pages
|
import os
from django.views.generic import TemplateView
class PageView(TemplateView):
def get_template_names(self):
return [os.path.join('pages', self.args[0] + '.html')]
def get_context_data(self, **kwargs):
context = super(PageView, self).get_context_data(**kwargs)
context['nav'] = [self.args[0]]
return context
|
<commit_before><commit_msg>Add utility view for static pages<commit_after>
|
import os
from django.views.generic import TemplateView
class PageView(TemplateView):
def get_template_names(self):
return [os.path.join('pages', self.args[0] + '.html')]
def get_context_data(self, **kwargs):
context = super(PageView, self).get_context_data(**kwargs)
context['nav'] = [self.args[0]]
return context
|
Add utility view for static pagesimport os
from django.views.generic import TemplateView
class PageView(TemplateView):
def get_template_names(self):
return [os.path.join('pages', self.args[0] + '.html')]
def get_context_data(self, **kwargs):
context = super(PageView, self).get_context_data(**kwargs)
context['nav'] = [self.args[0]]
return context
|
<commit_before><commit_msg>Add utility view for static pages<commit_after>import os
from django.views.generic import TemplateView
class PageView(TemplateView):
def get_template_names(self):
return [os.path.join('pages', self.args[0] + '.html')]
def get_context_data(self, **kwargs):
context = super(PageView, self).get_context_data(**kwargs)
context['nav'] = [self.args[0]]
return context
|
|
2a0326f4fa379ee74e548b0d3701092caf28e0b4
|
examples/tsa/ex_arma2.py
|
examples/tsa/ex_arma2.py
|
"""
Autoregressive Moving Average (ARMA) Model
"""
import numpy as np
import statsmodels.api as sm
# Generate some data from an ARMA process
from statsmodels.tsa.arima_process import arma_generate_sample
arparams = np.array([.75, -.25])
maparams = np.array([.65, .35])
# The conventions of the arma_generate function require that we specify a
# 1 for the zero-lag of the AR and MA parameters and that the AR parameters
# be negated.
arparams = np.r_[1, -arparams]
maparam = np.r_[1, maparams]
nobs = 250
y = arma_generate_sample(arparams, maparams, nobs)
# Now, optionally, we can add some dates information. For this example,
# we'll use a pandas time series.
import pandas
dates = sm.tsa.datetools.dates_from_range('1980m1', length=nobs)
y = pandas.TimeSeries(y, index=dates)
arma_mod = sm.tsa.ARMA(y, freq='M')
arma_res = arma_mod.fit(order=(2,2), trend='nc', disp=-1)
|
"""
Autoregressive Moving Average (ARMA) Model
"""
import numpy as np
import statsmodels.api as sm
# Generate some data from an ARMA process
from statsmodels.tsa.arima_process import arma_generate_sample
np.random.seed(12345)
arparams = np.array([.75, -.25])
maparams = np.array([.65, .35])
# The conventions of the arma_generate function require that we specify a
# 1 for the zero-lag of the AR and MA parameters and that the AR parameters
# be negated.
arparams = np.r_[1, -arparams]
maparam = np.r_[1, maparams]
nobs = 250
y = arma_generate_sample(arparams, maparams, nobs)
# Now, optionally, we can add some dates information. For this example,
# we'll use a pandas time series.
import pandas
dates = sm.tsa.datetools.dates_from_range('1980m1', length=nobs)
y = pandas.TimeSeries(y, index=dates)
arma_mod = sm.tsa.ARMA(y, freq='M')
arma_res = arma_mod.fit(order=(2,2), trend='nc', disp=-1)
|
Add seed to example and use stationary coefs
|
EX: Add seed to example and use stationary coefs
|
Python
|
bsd-3-clause
|
pprett/statsmodels,wdurhamh/statsmodels,bashtage/statsmodels,YihaoLu/statsmodels,cbmoore/statsmodels,wzbozon/statsmodels,musically-ut/statsmodels,wzbozon/statsmodels,pprett/statsmodels,josef-pkt/statsmodels,ChadFulton/statsmodels,jstoxrocky/statsmodels,jstoxrocky/statsmodels,musically-ut/statsmodels,edhuckle/statsmodels,detrout/debian-statsmodels,wdurhamh/statsmodels,alekz112/statsmodels,nvoron23/statsmodels,cbmoore/statsmodels,adammenges/statsmodels,nvoron23/statsmodels,jseabold/statsmodels,saketkc/statsmodels,huongttlan/statsmodels,bavardage/statsmodels,wzbozon/statsmodels,josef-pkt/statsmodels,statsmodels/statsmodels,hainm/statsmodels,wwf5067/statsmodels,hainm/statsmodels,YihaoLu/statsmodels,josef-pkt/statsmodels,DonBeo/statsmodels,bzero/statsmodels,yl565/statsmodels,waynenilsen/statsmodels,saketkc/statsmodels,wwf5067/statsmodels,YihaoLu/statsmodels,bavardage/statsmodels,rgommers/statsmodels,phobson/statsmodels,phobson/statsmodels,phobson/statsmodels,cbmoore/statsmodels,huongttlan/statsmodels,jstoxrocky/statsmodels,huongttlan/statsmodels,waynenilsen/statsmodels,hainm/statsmodels,bert9bert/statsmodels,bashtage/statsmodels,wkfwkf/statsmodels,rgommers/statsmodels,bert9bert/statsmodels,wdurhamh/statsmodels,Averroes/statsmodels,gef756/statsmodels,ChadFulton/statsmodels,wzbozon/statsmodels,statsmodels/statsmodels,josef-pkt/statsmodels,bzero/statsmodels,astocko/statsmodels,gef756/statsmodels,hainm/statsmodels,Averroes/statsmodels,yarikoptic/pystatsmodels,nvoron23/statsmodels,DonBeo/statsmodels,yl565/statsmodels,bashtage/statsmodels,bavardage/statsmodels,hlin117/statsmodels,saketkc/statsmodels,kiyoto/statsmodels,wwf5067/statsmodels,nguyentu1602/statsmodels,wkfwkf/statsmodels,statsmodels/statsmodels,yl565/statsmodels,jseabold/statsmodels,saketkc/statsmodels,yarikoptic/pystatsmodels,wkfwkf/statsmodels,statsmodels/statsmodels,edhuckle/statsmodels,Averroes/statsmodels,jseabold/statsmodels,rgommers/statsmodels,statsmodels/statsmodels,nguyentu1602/statsmodels,musically-ut/statsmodels,detrout/debian-statsmodels,gef756/statsmodels,nguyentu1602/statsmodels,wzbozon/statsmodels,saketkc/statsmodels,kiyoto/statsmodels,bashtage/statsmodels,ChadFulton/statsmodels,bsipocz/statsmodels,bzero/statsmodels,DonBeo/statsmodels,edhuckle/statsmodels,kiyoto/statsmodels,wdurhamh/statsmodels,waynenilsen/statsmodels,kiyoto/statsmodels,adammenges/statsmodels,astocko/statsmodels,bert9bert/statsmodels,DonBeo/statsmodels,detrout/debian-statsmodels,Averroes/statsmodels,wwf5067/statsmodels,jstoxrocky/statsmodels,astocko/statsmodels,huongttlan/statsmodels,pprett/statsmodels,edhuckle/statsmodels,bert9bert/statsmodels,josef-pkt/statsmodels,phobson/statsmodels,DonBeo/statsmodels,yl565/statsmodels,hlin117/statsmodels,bavardage/statsmodels,ChadFulton/statsmodels,hlin117/statsmodels,alekz112/statsmodels,yarikoptic/pystatsmodels,cbmoore/statsmodels,waynenilsen/statsmodels,bzero/statsmodels,bashtage/statsmodels,nvoron23/statsmodels,cbmoore/statsmodels,rgommers/statsmodels,statsmodels/statsmodels,wdurhamh/statsmodels,bavardage/statsmodels,kiyoto/statsmodels,ChadFulton/statsmodels,bert9bert/statsmodels,wkfwkf/statsmodels,nvoron23/statsmodels,alekz112/statsmodels,phobson/statsmodels,wkfwkf/statsmodels,YihaoLu/statsmodels,astocko/statsmodels,edhuckle/statsmodels,musically-ut/statsmodels,jseabold/statsmodels,josef-pkt/statsmodels,alekz112/statsmodels,nguyentu1602/statsmodels,bsipocz/statsmodels,hlin117/statsmodels,YihaoLu/statsmodels,adammenges/statsmodels,ChadFulton/statsmodels,gef756/statsmodels,rgommers/statsmodels,bsipocz/statsmodels,bashtage/statsmodels,bsipocz/statsmodels,gef756/statsmodels,jseabold/statsmodels,bzero/statsmodels,yl565/statsmodels,pprett/statsmodels,adammenges/statsmodels,detrout/debian-statsmodels
|
"""
Autoregressive Moving Average (ARMA) Model
"""
import numpy as np
import statsmodels.api as sm
# Generate some data from an ARMA process
from statsmodels.tsa.arima_process import arma_generate_sample
arparams = np.array([.75, -.25])
maparams = np.array([.65, .35])
# The conventions of the arma_generate function require that we specify a
# 1 for the zero-lag of the AR and MA parameters and that the AR parameters
# be negated.
arparams = np.r_[1, -arparams]
maparam = np.r_[1, maparams]
nobs = 250
y = arma_generate_sample(arparams, maparams, nobs)
# Now, optionally, we can add some dates information. For this example,
# we'll use a pandas time series.
import pandas
dates = sm.tsa.datetools.dates_from_range('1980m1', length=nobs)
y = pandas.TimeSeries(y, index=dates)
arma_mod = sm.tsa.ARMA(y, freq='M')
arma_res = arma_mod.fit(order=(2,2), trend='nc', disp=-1)
EX: Add seed to example and use stationary coefs
|
"""
Autoregressive Moving Average (ARMA) Model
"""
import numpy as np
import statsmodels.api as sm
# Generate some data from an ARMA process
from statsmodels.tsa.arima_process import arma_generate_sample
np.random.seed(12345)
arparams = np.array([.75, -.25])
maparams = np.array([.65, .35])
# The conventions of the arma_generate function require that we specify a
# 1 for the zero-lag of the AR and MA parameters and that the AR parameters
# be negated.
arparams = np.r_[1, -arparams]
maparam = np.r_[1, maparams]
nobs = 250
y = arma_generate_sample(arparams, maparams, nobs)
# Now, optionally, we can add some dates information. For this example,
# we'll use a pandas time series.
import pandas
dates = sm.tsa.datetools.dates_from_range('1980m1', length=nobs)
y = pandas.TimeSeries(y, index=dates)
arma_mod = sm.tsa.ARMA(y, freq='M')
arma_res = arma_mod.fit(order=(2,2), trend='nc', disp=-1)
|
<commit_before>"""
Autoregressive Moving Average (ARMA) Model
"""
import numpy as np
import statsmodels.api as sm
# Generate some data from an ARMA process
from statsmodels.tsa.arima_process import arma_generate_sample
arparams = np.array([.75, -.25])
maparams = np.array([.65, .35])
# The conventions of the arma_generate function require that we specify a
# 1 for the zero-lag of the AR and MA parameters and that the AR parameters
# be negated.
arparams = np.r_[1, -arparams]
maparam = np.r_[1, maparams]
nobs = 250
y = arma_generate_sample(arparams, maparams, nobs)
# Now, optionally, we can add some dates information. For this example,
# we'll use a pandas time series.
import pandas
dates = sm.tsa.datetools.dates_from_range('1980m1', length=nobs)
y = pandas.TimeSeries(y, index=dates)
arma_mod = sm.tsa.ARMA(y, freq='M')
arma_res = arma_mod.fit(order=(2,2), trend='nc', disp=-1)
<commit_msg>EX: Add seed to example and use stationary coefs<commit_after>
|
"""
Autoregressive Moving Average (ARMA) Model
"""
import numpy as np
import statsmodels.api as sm
# Generate some data from an ARMA process
from statsmodels.tsa.arima_process import arma_generate_sample
np.random.seed(12345)
arparams = np.array([.75, -.25])
maparams = np.array([.65, .35])
# The conventions of the arma_generate function require that we specify a
# 1 for the zero-lag of the AR and MA parameters and that the AR parameters
# be negated.
arparams = np.r_[1, -arparams]
maparam = np.r_[1, maparams]
nobs = 250
y = arma_generate_sample(arparams, maparams, nobs)
# Now, optionally, we can add some dates information. For this example,
# we'll use a pandas time series.
import pandas
dates = sm.tsa.datetools.dates_from_range('1980m1', length=nobs)
y = pandas.TimeSeries(y, index=dates)
arma_mod = sm.tsa.ARMA(y, freq='M')
arma_res = arma_mod.fit(order=(2,2), trend='nc', disp=-1)
|
"""
Autoregressive Moving Average (ARMA) Model
"""
import numpy as np
import statsmodels.api as sm
# Generate some data from an ARMA process
from statsmodels.tsa.arima_process import arma_generate_sample
arparams = np.array([.75, -.25])
maparams = np.array([.65, .35])
# The conventions of the arma_generate function require that we specify a
# 1 for the zero-lag of the AR and MA parameters and that the AR parameters
# be negated.
arparams = np.r_[1, -arparams]
maparam = np.r_[1, maparams]
nobs = 250
y = arma_generate_sample(arparams, maparams, nobs)
# Now, optionally, we can add some dates information. For this example,
# we'll use a pandas time series.
import pandas
dates = sm.tsa.datetools.dates_from_range('1980m1', length=nobs)
y = pandas.TimeSeries(y, index=dates)
arma_mod = sm.tsa.ARMA(y, freq='M')
arma_res = arma_mod.fit(order=(2,2), trend='nc', disp=-1)
EX: Add seed to example and use stationary coefs"""
Autoregressive Moving Average (ARMA) Model
"""
import numpy as np
import statsmodels.api as sm
# Generate some data from an ARMA process
from statsmodels.tsa.arima_process import arma_generate_sample
np.random.seed(12345)
arparams = np.array([.75, -.25])
maparams = np.array([.65, .35])
# The conventions of the arma_generate function require that we specify a
# 1 for the zero-lag of the AR and MA parameters and that the AR parameters
# be negated.
arparams = np.r_[1, -arparams]
maparam = np.r_[1, maparams]
nobs = 250
y = arma_generate_sample(arparams, maparams, nobs)
# Now, optionally, we can add some dates information. For this example,
# we'll use a pandas time series.
import pandas
dates = sm.tsa.datetools.dates_from_range('1980m1', length=nobs)
y = pandas.TimeSeries(y, index=dates)
arma_mod = sm.tsa.ARMA(y, freq='M')
arma_res = arma_mod.fit(order=(2,2), trend='nc', disp=-1)
|
<commit_before>"""
Autoregressive Moving Average (ARMA) Model
"""
import numpy as np
import statsmodels.api as sm
# Generate some data from an ARMA process
from statsmodels.tsa.arima_process import arma_generate_sample
arparams = np.array([.75, -.25])
maparams = np.array([.65, .35])
# The conventions of the arma_generate function require that we specify a
# 1 for the zero-lag of the AR and MA parameters and that the AR parameters
# be negated.
arparams = np.r_[1, -arparams]
maparam = np.r_[1, maparams]
nobs = 250
y = arma_generate_sample(arparams, maparams, nobs)
# Now, optionally, we can add some dates information. For this example,
# we'll use a pandas time series.
import pandas
dates = sm.tsa.datetools.dates_from_range('1980m1', length=nobs)
y = pandas.TimeSeries(y, index=dates)
arma_mod = sm.tsa.ARMA(y, freq='M')
arma_res = arma_mod.fit(order=(2,2), trend='nc', disp=-1)
<commit_msg>EX: Add seed to example and use stationary coefs<commit_after>"""
Autoregressive Moving Average (ARMA) Model
"""
import numpy as np
import statsmodels.api as sm
# Generate some data from an ARMA process
from statsmodels.tsa.arima_process import arma_generate_sample
np.random.seed(12345)
arparams = np.array([.75, -.25])
maparams = np.array([.65, .35])
# The conventions of the arma_generate function require that we specify a
# 1 for the zero-lag of the AR and MA parameters and that the AR parameters
# be negated.
arparams = np.r_[1, -arparams]
maparam = np.r_[1, maparams]
nobs = 250
y = arma_generate_sample(arparams, maparams, nobs)
# Now, optionally, we can add some dates information. For this example,
# we'll use a pandas time series.
import pandas
dates = sm.tsa.datetools.dates_from_range('1980m1', length=nobs)
y = pandas.TimeSeries(y, index=dates)
arma_mod = sm.tsa.ARMA(y, freq='M')
arma_res = arma_mod.fit(order=(2,2), trend='nc', disp=-1)
|
fd5e21705d8f7757cf345c8c98af260203c44517
|
malcolm/modules/__init__.py
|
malcolm/modules/__init__.py
|
class Importer(object):
def __init__(self):
self.update_dict = {}
self.ignore = ["docs"]
def import_subpackages(self, path):
import os
dirname = os.path.join(os.path.dirname(__file__), *path)
for f in os.listdir(dirname):
if f not in self.ignore and os.path.isdir(os.path.join(dirname, f)):
self.try_import_path(path + [f])
def try_import_path(self, path):
import importlib
name = ".".join(path)
try:
self.update_dict[name] = importlib.import_module(
"malcolm.modules.%s" % name)
except ImportError:
import logging
# Create a module level logger
log = logging.getLogger(__name__)
log.info("Importing %s failed", name, exc_info=True)
# Try the import of subpackages too
self.import_subpackages(path)
def prepare(self, globals_d):
self.import_subpackages([])
globals_d.update(self.update_dict)
__all__ = list(self.update_dict)
return __all__
__all__ = Importer().prepare(globals())
del Importer
|
class Importer(object):
def __init__(self):
self.update_dict = {}
self.dirnames = [
"vmetas", "infos", "controllers", "parts", "includes", "blocks"]
def import_subpackages(self, path, filter=()):
import os
dirname = os.path.join(os.path.dirname(__file__), *path)
for f in os.listdir(dirname):
if not filter or f in filter:
if os.path.isdir(os.path.join(dirname, f)):
self.try_import_path(path + [f])
# Try the import of subpackages too
self.import_subpackages(path + [f], self.dirnames)
def try_import_path(self, path):
import importlib
name = ".".join(path)
try:
self.update_dict[name] = importlib.import_module(
"malcolm.modules.%s" % name)
except ImportError:
import logging
# Create a module level logger
log = logging.getLogger(__name__)
log.warning("Importing %s failed", name, exc_info=True)
def prepare(self, globals_d):
self.import_subpackages([])
globals_d.update(self.update_dict)
__all__ = list(self.update_dict)
return __all__
__all__ = Importer().prepare(globals())
del Importer
|
Improve import logic for clearer error messages
|
Improve import logic for clearer error messages
|
Python
|
apache-2.0
|
dls-controls/pymalcolm,dls-controls/pymalcolm,dls-controls/pymalcolm
|
class Importer(object):
def __init__(self):
self.update_dict = {}
self.ignore = ["docs"]
def import_subpackages(self, path):
import os
dirname = os.path.join(os.path.dirname(__file__), *path)
for f in os.listdir(dirname):
if f not in self.ignore and os.path.isdir(os.path.join(dirname, f)):
self.try_import_path(path + [f])
def try_import_path(self, path):
import importlib
name = ".".join(path)
try:
self.update_dict[name] = importlib.import_module(
"malcolm.modules.%s" % name)
except ImportError:
import logging
# Create a module level logger
log = logging.getLogger(__name__)
log.info("Importing %s failed", name, exc_info=True)
# Try the import of subpackages too
self.import_subpackages(path)
def prepare(self, globals_d):
self.import_subpackages([])
globals_d.update(self.update_dict)
__all__ = list(self.update_dict)
return __all__
__all__ = Importer().prepare(globals())
del Importer
Improve import logic for clearer error messages
|
class Importer(object):
def __init__(self):
self.update_dict = {}
self.dirnames = [
"vmetas", "infos", "controllers", "parts", "includes", "blocks"]
def import_subpackages(self, path, filter=()):
import os
dirname = os.path.join(os.path.dirname(__file__), *path)
for f in os.listdir(dirname):
if not filter or f in filter:
if os.path.isdir(os.path.join(dirname, f)):
self.try_import_path(path + [f])
# Try the import of subpackages too
self.import_subpackages(path + [f], self.dirnames)
def try_import_path(self, path):
import importlib
name = ".".join(path)
try:
self.update_dict[name] = importlib.import_module(
"malcolm.modules.%s" % name)
except ImportError:
import logging
# Create a module level logger
log = logging.getLogger(__name__)
log.warning("Importing %s failed", name, exc_info=True)
def prepare(self, globals_d):
self.import_subpackages([])
globals_d.update(self.update_dict)
__all__ = list(self.update_dict)
return __all__
__all__ = Importer().prepare(globals())
del Importer
|
<commit_before>class Importer(object):
def __init__(self):
self.update_dict = {}
self.ignore = ["docs"]
def import_subpackages(self, path):
import os
dirname = os.path.join(os.path.dirname(__file__), *path)
for f in os.listdir(dirname):
if f not in self.ignore and os.path.isdir(os.path.join(dirname, f)):
self.try_import_path(path + [f])
def try_import_path(self, path):
import importlib
name = ".".join(path)
try:
self.update_dict[name] = importlib.import_module(
"malcolm.modules.%s" % name)
except ImportError:
import logging
# Create a module level logger
log = logging.getLogger(__name__)
log.info("Importing %s failed", name, exc_info=True)
# Try the import of subpackages too
self.import_subpackages(path)
def prepare(self, globals_d):
self.import_subpackages([])
globals_d.update(self.update_dict)
__all__ = list(self.update_dict)
return __all__
__all__ = Importer().prepare(globals())
del Importer
<commit_msg>Improve import logic for clearer error messages<commit_after>
|
class Importer(object):
def __init__(self):
self.update_dict = {}
self.dirnames = [
"vmetas", "infos", "controllers", "parts", "includes", "blocks"]
def import_subpackages(self, path, filter=()):
import os
dirname = os.path.join(os.path.dirname(__file__), *path)
for f in os.listdir(dirname):
if not filter or f in filter:
if os.path.isdir(os.path.join(dirname, f)):
self.try_import_path(path + [f])
# Try the import of subpackages too
self.import_subpackages(path + [f], self.dirnames)
def try_import_path(self, path):
import importlib
name = ".".join(path)
try:
self.update_dict[name] = importlib.import_module(
"malcolm.modules.%s" % name)
except ImportError:
import logging
# Create a module level logger
log = logging.getLogger(__name__)
log.warning("Importing %s failed", name, exc_info=True)
def prepare(self, globals_d):
self.import_subpackages([])
globals_d.update(self.update_dict)
__all__ = list(self.update_dict)
return __all__
__all__ = Importer().prepare(globals())
del Importer
|
class Importer(object):
def __init__(self):
self.update_dict = {}
self.ignore = ["docs"]
def import_subpackages(self, path):
import os
dirname = os.path.join(os.path.dirname(__file__), *path)
for f in os.listdir(dirname):
if f not in self.ignore and os.path.isdir(os.path.join(dirname, f)):
self.try_import_path(path + [f])
def try_import_path(self, path):
import importlib
name = ".".join(path)
try:
self.update_dict[name] = importlib.import_module(
"malcolm.modules.%s" % name)
except ImportError:
import logging
# Create a module level logger
log = logging.getLogger(__name__)
log.info("Importing %s failed", name, exc_info=True)
# Try the import of subpackages too
self.import_subpackages(path)
def prepare(self, globals_d):
self.import_subpackages([])
globals_d.update(self.update_dict)
__all__ = list(self.update_dict)
return __all__
__all__ = Importer().prepare(globals())
del Importer
Improve import logic for clearer error messagesclass Importer(object):
def __init__(self):
self.update_dict = {}
self.dirnames = [
"vmetas", "infos", "controllers", "parts", "includes", "blocks"]
def import_subpackages(self, path, filter=()):
import os
dirname = os.path.join(os.path.dirname(__file__), *path)
for f in os.listdir(dirname):
if not filter or f in filter:
if os.path.isdir(os.path.join(dirname, f)):
self.try_import_path(path + [f])
# Try the import of subpackages too
self.import_subpackages(path + [f], self.dirnames)
def try_import_path(self, path):
import importlib
name = ".".join(path)
try:
self.update_dict[name] = importlib.import_module(
"malcolm.modules.%s" % name)
except ImportError:
import logging
# Create a module level logger
log = logging.getLogger(__name__)
log.warning("Importing %s failed", name, exc_info=True)
def prepare(self, globals_d):
self.import_subpackages([])
globals_d.update(self.update_dict)
__all__ = list(self.update_dict)
return __all__
__all__ = Importer().prepare(globals())
del Importer
|
<commit_before>class Importer(object):
def __init__(self):
self.update_dict = {}
self.ignore = ["docs"]
def import_subpackages(self, path):
import os
dirname = os.path.join(os.path.dirname(__file__), *path)
for f in os.listdir(dirname):
if f not in self.ignore and os.path.isdir(os.path.join(dirname, f)):
self.try_import_path(path + [f])
def try_import_path(self, path):
import importlib
name = ".".join(path)
try:
self.update_dict[name] = importlib.import_module(
"malcolm.modules.%s" % name)
except ImportError:
import logging
# Create a module level logger
log = logging.getLogger(__name__)
log.info("Importing %s failed", name, exc_info=True)
# Try the import of subpackages too
self.import_subpackages(path)
def prepare(self, globals_d):
self.import_subpackages([])
globals_d.update(self.update_dict)
__all__ = list(self.update_dict)
return __all__
__all__ = Importer().prepare(globals())
del Importer
<commit_msg>Improve import logic for clearer error messages<commit_after>class Importer(object):
def __init__(self):
self.update_dict = {}
self.dirnames = [
"vmetas", "infos", "controllers", "parts", "includes", "blocks"]
def import_subpackages(self, path, filter=()):
import os
dirname = os.path.join(os.path.dirname(__file__), *path)
for f in os.listdir(dirname):
if not filter or f in filter:
if os.path.isdir(os.path.join(dirname, f)):
self.try_import_path(path + [f])
# Try the import of subpackages too
self.import_subpackages(path + [f], self.dirnames)
def try_import_path(self, path):
import importlib
name = ".".join(path)
try:
self.update_dict[name] = importlib.import_module(
"malcolm.modules.%s" % name)
except ImportError:
import logging
# Create a module level logger
log = logging.getLogger(__name__)
log.warning("Importing %s failed", name, exc_info=True)
def prepare(self, globals_d):
self.import_subpackages([])
globals_d.update(self.update_dict)
__all__ = list(self.update_dict)
return __all__
__all__ = Importer().prepare(globals())
del Importer
|
74ea104c81908976dfe0c708d2dfd8d7eb10f0cc
|
mayatools/transforms.py
|
mayatools/transforms.py
|
import re
from maya import cmds
from . import context
def transfer_global_transforms(dst_to_src, time_range=None):
"""Bake global transform from one node onto another.
:param dict dst_to_src: Mapping nodes to transfer transformations onto, to
the nodes to source those transformations from.
:param tuple time_range: ``(min_time, max_time)`` or None for the current
playback timeframe.
"""
dst_to_src = dict(dst_to_src)
if not dst_to_src:
return
# Contrain every dst to their src.
constraints = []
for dst, src in dst_to_src.iteritems():
constraints.extend((
cmds.parentConstraint(src, dst),
cmds.scaleConstraint(src, dst),
))
if time_range is None:
time_range = (cmds.playbackOptions(q=True, minTime=True), cmds.playbackOptions(q=True, maxTime=True))
with context.suspend_refresh():
cmds.bakeResults(*dst_to_src.iterkeys(), **dict(
simulation=True,
time=time_range,
))
cmds.delete(*constraints)
|
import re
from maya import cmds
from . import context
def transfer_global_transforms(dst_to_src, time_range=None):
"""Bake global transform from one node onto another.
:param dict dst_to_src: Mapping nodes to transfer transformations onto, to
the nodes to source those transformations from.
:param tuple time_range: ``(min_time, max_time)`` or None for the current
playback timeframe.
"""
dst_to_src = dict(dst_to_src)
if not dst_to_src:
return
# Contrain every dst to their src.
constraints = []
for dst, src in dst_to_src.iteritems():
constraints.extend((
cmds.parentConstraint(src, dst),
cmds.scaleConstraint(src, dst),
))
if time_range is None:
time_range = (cmds.playbackOptions(q=True, minTime=True), cmds.playbackOptions(q=True, maxTime=True))
with context.suspend_refresh():
cmds.bakeResults(*dst_to_src.iterkeys(), **dict(
simulation=True,
time=time_range,
))
cmds.delete(*constraints)
# Fix angle flips with a Euler filter.
for dst in dst_to_src.iterkeys():
curves = cmds.listConnections(dst, type='animCurveTA')
curves = [x for x in curves if 'rotate' in x]
if curves:
cmds.filterCurve(*curves, filter='euler')
|
Fix angle flips with a Euler filter
|
Locators: Fix angle flips with a Euler filter
|
Python
|
bsd-3-clause
|
westernx/mayatools,westernx/mayatools
|
import re
from maya import cmds
from . import context
def transfer_global_transforms(dst_to_src, time_range=None):
"""Bake global transform from one node onto another.
:param dict dst_to_src: Mapping nodes to transfer transformations onto, to
the nodes to source those transformations from.
:param tuple time_range: ``(min_time, max_time)`` or None for the current
playback timeframe.
"""
dst_to_src = dict(dst_to_src)
if not dst_to_src:
return
# Contrain every dst to their src.
constraints = []
for dst, src in dst_to_src.iteritems():
constraints.extend((
cmds.parentConstraint(src, dst),
cmds.scaleConstraint(src, dst),
))
if time_range is None:
time_range = (cmds.playbackOptions(q=True, minTime=True), cmds.playbackOptions(q=True, maxTime=True))
with context.suspend_refresh():
cmds.bakeResults(*dst_to_src.iterkeys(), **dict(
simulation=True,
time=time_range,
))
cmds.delete(*constraints)
Locators: Fix angle flips with a Euler filter
|
import re
from maya import cmds
from . import context
def transfer_global_transforms(dst_to_src, time_range=None):
"""Bake global transform from one node onto another.
:param dict dst_to_src: Mapping nodes to transfer transformations onto, to
the nodes to source those transformations from.
:param tuple time_range: ``(min_time, max_time)`` or None for the current
playback timeframe.
"""
dst_to_src = dict(dst_to_src)
if not dst_to_src:
return
# Contrain every dst to their src.
constraints = []
for dst, src in dst_to_src.iteritems():
constraints.extend((
cmds.parentConstraint(src, dst),
cmds.scaleConstraint(src, dst),
))
if time_range is None:
time_range = (cmds.playbackOptions(q=True, minTime=True), cmds.playbackOptions(q=True, maxTime=True))
with context.suspend_refresh():
cmds.bakeResults(*dst_to_src.iterkeys(), **dict(
simulation=True,
time=time_range,
))
cmds.delete(*constraints)
# Fix angle flips with a Euler filter.
for dst in dst_to_src.iterkeys():
curves = cmds.listConnections(dst, type='animCurveTA')
curves = [x for x in curves if 'rotate' in x]
if curves:
cmds.filterCurve(*curves, filter='euler')
|
<commit_before>import re
from maya import cmds
from . import context
def transfer_global_transforms(dst_to_src, time_range=None):
"""Bake global transform from one node onto another.
:param dict dst_to_src: Mapping nodes to transfer transformations onto, to
the nodes to source those transformations from.
:param tuple time_range: ``(min_time, max_time)`` or None for the current
playback timeframe.
"""
dst_to_src = dict(dst_to_src)
if not dst_to_src:
return
# Contrain every dst to their src.
constraints = []
for dst, src in dst_to_src.iteritems():
constraints.extend((
cmds.parentConstraint(src, dst),
cmds.scaleConstraint(src, dst),
))
if time_range is None:
time_range = (cmds.playbackOptions(q=True, minTime=True), cmds.playbackOptions(q=True, maxTime=True))
with context.suspend_refresh():
cmds.bakeResults(*dst_to_src.iterkeys(), **dict(
simulation=True,
time=time_range,
))
cmds.delete(*constraints)
<commit_msg>Locators: Fix angle flips with a Euler filter<commit_after>
|
import re
from maya import cmds
from . import context
def transfer_global_transforms(dst_to_src, time_range=None):
"""Bake global transform from one node onto another.
:param dict dst_to_src: Mapping nodes to transfer transformations onto, to
the nodes to source those transformations from.
:param tuple time_range: ``(min_time, max_time)`` or None for the current
playback timeframe.
"""
dst_to_src = dict(dst_to_src)
if not dst_to_src:
return
# Contrain every dst to their src.
constraints = []
for dst, src in dst_to_src.iteritems():
constraints.extend((
cmds.parentConstraint(src, dst),
cmds.scaleConstraint(src, dst),
))
if time_range is None:
time_range = (cmds.playbackOptions(q=True, minTime=True), cmds.playbackOptions(q=True, maxTime=True))
with context.suspend_refresh():
cmds.bakeResults(*dst_to_src.iterkeys(), **dict(
simulation=True,
time=time_range,
))
cmds.delete(*constraints)
# Fix angle flips with a Euler filter.
for dst in dst_to_src.iterkeys():
curves = cmds.listConnections(dst, type='animCurveTA')
curves = [x for x in curves if 'rotate' in x]
if curves:
cmds.filterCurve(*curves, filter='euler')
|
import re
from maya import cmds
from . import context
def transfer_global_transforms(dst_to_src, time_range=None):
"""Bake global transform from one node onto another.
:param dict dst_to_src: Mapping nodes to transfer transformations onto, to
the nodes to source those transformations from.
:param tuple time_range: ``(min_time, max_time)`` or None for the current
playback timeframe.
"""
dst_to_src = dict(dst_to_src)
if not dst_to_src:
return
# Contrain every dst to their src.
constraints = []
for dst, src in dst_to_src.iteritems():
constraints.extend((
cmds.parentConstraint(src, dst),
cmds.scaleConstraint(src, dst),
))
if time_range is None:
time_range = (cmds.playbackOptions(q=True, minTime=True), cmds.playbackOptions(q=True, maxTime=True))
with context.suspend_refresh():
cmds.bakeResults(*dst_to_src.iterkeys(), **dict(
simulation=True,
time=time_range,
))
cmds.delete(*constraints)
Locators: Fix angle flips with a Euler filterimport re
from maya import cmds
from . import context
def transfer_global_transforms(dst_to_src, time_range=None):
"""Bake global transform from one node onto another.
:param dict dst_to_src: Mapping nodes to transfer transformations onto, to
the nodes to source those transformations from.
:param tuple time_range: ``(min_time, max_time)`` or None for the current
playback timeframe.
"""
dst_to_src = dict(dst_to_src)
if not dst_to_src:
return
# Contrain every dst to their src.
constraints = []
for dst, src in dst_to_src.iteritems():
constraints.extend((
cmds.parentConstraint(src, dst),
cmds.scaleConstraint(src, dst),
))
if time_range is None:
time_range = (cmds.playbackOptions(q=True, minTime=True), cmds.playbackOptions(q=True, maxTime=True))
with context.suspend_refresh():
cmds.bakeResults(*dst_to_src.iterkeys(), **dict(
simulation=True,
time=time_range,
))
cmds.delete(*constraints)
# Fix angle flips with a Euler filter.
for dst in dst_to_src.iterkeys():
curves = cmds.listConnections(dst, type='animCurveTA')
curves = [x for x in curves if 'rotate' in x]
if curves:
cmds.filterCurve(*curves, filter='euler')
|
<commit_before>import re
from maya import cmds
from . import context
def transfer_global_transforms(dst_to_src, time_range=None):
"""Bake global transform from one node onto another.
:param dict dst_to_src: Mapping nodes to transfer transformations onto, to
the nodes to source those transformations from.
:param tuple time_range: ``(min_time, max_time)`` or None for the current
playback timeframe.
"""
dst_to_src = dict(dst_to_src)
if not dst_to_src:
return
# Contrain every dst to their src.
constraints = []
for dst, src in dst_to_src.iteritems():
constraints.extend((
cmds.parentConstraint(src, dst),
cmds.scaleConstraint(src, dst),
))
if time_range is None:
time_range = (cmds.playbackOptions(q=True, minTime=True), cmds.playbackOptions(q=True, maxTime=True))
with context.suspend_refresh():
cmds.bakeResults(*dst_to_src.iterkeys(), **dict(
simulation=True,
time=time_range,
))
cmds.delete(*constraints)
<commit_msg>Locators: Fix angle flips with a Euler filter<commit_after>import re
from maya import cmds
from . import context
def transfer_global_transforms(dst_to_src, time_range=None):
"""Bake global transform from one node onto another.
:param dict dst_to_src: Mapping nodes to transfer transformations onto, to
the nodes to source those transformations from.
:param tuple time_range: ``(min_time, max_time)`` or None for the current
playback timeframe.
"""
dst_to_src = dict(dst_to_src)
if not dst_to_src:
return
# Contrain every dst to their src.
constraints = []
for dst, src in dst_to_src.iteritems():
constraints.extend((
cmds.parentConstraint(src, dst),
cmds.scaleConstraint(src, dst),
))
if time_range is None:
time_range = (cmds.playbackOptions(q=True, minTime=True), cmds.playbackOptions(q=True, maxTime=True))
with context.suspend_refresh():
cmds.bakeResults(*dst_to_src.iterkeys(), **dict(
simulation=True,
time=time_range,
))
cmds.delete(*constraints)
# Fix angle flips with a Euler filter.
for dst in dst_to_src.iterkeys():
curves = cmds.listConnections(dst, type='animCurveTA')
curves = [x for x in curves if 'rotate' in x]
if curves:
cmds.filterCurve(*curves, filter='euler')
|
a73a4b3373ad032ac2ad02426fef8a23314d5826
|
test/test_external_libs.py
|
test/test_external_libs.py
|
import unittest
from test.util import ykman_cli
class TestExternalLibraries(unittest.TestCase):
def test_ykman_version(self):
output = ykman_cli('-v')
# Test that major version is 1 on all libs
self.assertIn('libykpers 1', output)
self.assertIn('libu2f-host 1', output)
self.assertIn('libusb 1', output)
def test_ykman_version_not_found(self):
output = ykman_cli('-v')
self.assertNotIn('not found!', output)
self.assertNotIn('<pyusb backend missing>', output)
|
import unittest
import os
from test.util import ykman_cli
@unittest.skipIf(
os.environ.get('INTEGRATION_TESTS') != 'TRUE', 'INTEGRATION_TESTS != TRUE')
class TestExternalLibraries(unittest.TestCase):
def test_ykman_version(self):
output = ykman_cli('-v')
# Test that major version is 1 on all libs
self.assertIn('libykpers 1', output)
self.assertIn('libu2f-host 1', output)
self.assertIn('libusb 1', output)
def test_ykman_version_not_found(self):
output = ykman_cli('-v')
self.assertNotIn('not found!', output)
self.assertNotIn('<pyusb backend missing>', output)
|
Revert "Don't check INTEGRATION_TESTS env var in external libs test"
|
Revert "Don't check INTEGRATION_TESTS env var in external libs test"
This reverts commit 648d02fbfca79241a65902f6dd9a7a767a0f633d.
|
Python
|
bsd-2-clause
|
Yubico/yubikey-manager,Yubico/yubikey-manager
|
import unittest
from test.util import ykman_cli
class TestExternalLibraries(unittest.TestCase):
def test_ykman_version(self):
output = ykman_cli('-v')
# Test that major version is 1 on all libs
self.assertIn('libykpers 1', output)
self.assertIn('libu2f-host 1', output)
self.assertIn('libusb 1', output)
def test_ykman_version_not_found(self):
output = ykman_cli('-v')
self.assertNotIn('not found!', output)
self.assertNotIn('<pyusb backend missing>', output)
Revert "Don't check INTEGRATION_TESTS env var in external libs test"
This reverts commit 648d02fbfca79241a65902f6dd9a7a767a0f633d.
|
import unittest
import os
from test.util import ykman_cli
@unittest.skipIf(
os.environ.get('INTEGRATION_TESTS') != 'TRUE', 'INTEGRATION_TESTS != TRUE')
class TestExternalLibraries(unittest.TestCase):
def test_ykman_version(self):
output = ykman_cli('-v')
# Test that major version is 1 on all libs
self.assertIn('libykpers 1', output)
self.assertIn('libu2f-host 1', output)
self.assertIn('libusb 1', output)
def test_ykman_version_not_found(self):
output = ykman_cli('-v')
self.assertNotIn('not found!', output)
self.assertNotIn('<pyusb backend missing>', output)
|
<commit_before>import unittest
from test.util import ykman_cli
class TestExternalLibraries(unittest.TestCase):
def test_ykman_version(self):
output = ykman_cli('-v')
# Test that major version is 1 on all libs
self.assertIn('libykpers 1', output)
self.assertIn('libu2f-host 1', output)
self.assertIn('libusb 1', output)
def test_ykman_version_not_found(self):
output = ykman_cli('-v')
self.assertNotIn('not found!', output)
self.assertNotIn('<pyusb backend missing>', output)
<commit_msg>Revert "Don't check INTEGRATION_TESTS env var in external libs test"
This reverts commit 648d02fbfca79241a65902f6dd9a7a767a0f633d.<commit_after>
|
import unittest
import os
from test.util import ykman_cli
@unittest.skipIf(
os.environ.get('INTEGRATION_TESTS') != 'TRUE', 'INTEGRATION_TESTS != TRUE')
class TestExternalLibraries(unittest.TestCase):
def test_ykman_version(self):
output = ykman_cli('-v')
# Test that major version is 1 on all libs
self.assertIn('libykpers 1', output)
self.assertIn('libu2f-host 1', output)
self.assertIn('libusb 1', output)
def test_ykman_version_not_found(self):
output = ykman_cli('-v')
self.assertNotIn('not found!', output)
self.assertNotIn('<pyusb backend missing>', output)
|
import unittest
from test.util import ykman_cli
class TestExternalLibraries(unittest.TestCase):
def test_ykman_version(self):
output = ykman_cli('-v')
# Test that major version is 1 on all libs
self.assertIn('libykpers 1', output)
self.assertIn('libu2f-host 1', output)
self.assertIn('libusb 1', output)
def test_ykman_version_not_found(self):
output = ykman_cli('-v')
self.assertNotIn('not found!', output)
self.assertNotIn('<pyusb backend missing>', output)
Revert "Don't check INTEGRATION_TESTS env var in external libs test"
This reverts commit 648d02fbfca79241a65902f6dd9a7a767a0f633d.import unittest
import os
from test.util import ykman_cli
@unittest.skipIf(
os.environ.get('INTEGRATION_TESTS') != 'TRUE', 'INTEGRATION_TESTS != TRUE')
class TestExternalLibraries(unittest.TestCase):
def test_ykman_version(self):
output = ykman_cli('-v')
# Test that major version is 1 on all libs
self.assertIn('libykpers 1', output)
self.assertIn('libu2f-host 1', output)
self.assertIn('libusb 1', output)
def test_ykman_version_not_found(self):
output = ykman_cli('-v')
self.assertNotIn('not found!', output)
self.assertNotIn('<pyusb backend missing>', output)
|
<commit_before>import unittest
from test.util import ykman_cli
class TestExternalLibraries(unittest.TestCase):
def test_ykman_version(self):
output = ykman_cli('-v')
# Test that major version is 1 on all libs
self.assertIn('libykpers 1', output)
self.assertIn('libu2f-host 1', output)
self.assertIn('libusb 1', output)
def test_ykman_version_not_found(self):
output = ykman_cli('-v')
self.assertNotIn('not found!', output)
self.assertNotIn('<pyusb backend missing>', output)
<commit_msg>Revert "Don't check INTEGRATION_TESTS env var in external libs test"
This reverts commit 648d02fbfca79241a65902f6dd9a7a767a0f633d.<commit_after>import unittest
import os
from test.util import ykman_cli
@unittest.skipIf(
os.environ.get('INTEGRATION_TESTS') != 'TRUE', 'INTEGRATION_TESTS != TRUE')
class TestExternalLibraries(unittest.TestCase):
def test_ykman_version(self):
output = ykman_cli('-v')
# Test that major version is 1 on all libs
self.assertIn('libykpers 1', output)
self.assertIn('libu2f-host 1', output)
self.assertIn('libusb 1', output)
def test_ykman_version_not_found(self):
output = ykman_cli('-v')
self.assertNotIn('not found!', output)
self.assertNotIn('<pyusb backend missing>', output)
|
4ed53518d3a2e630fe7f7f3f266f81bb12ada7ae
|
pinax/apps/blog/forms.py
|
pinax/apps/blog/forms.py
|
from datetime import datetime
from django import forms
from django.utils.translation import ugettext_lazy as _
from pinax.apps.blog.models import Post
class BlogForm(forms.ModelForm):
slug = forms.SlugField(
max_length = 20,
help_text = _("a short version of the title consisting only of letters, numbers, underscores and hyphens."),
)
class Meta:
model = Post
exclude = [
"author",
"creator_ip",
"created_at",
"updated_at",
"publish",
]
def __init__(self, user=None, *args, **kwargs):
self.user = user
super(BlogForm, self).__init__(*args, **kwargs)
def clean_slug(self):
if not self.instance.pk:
if Post.objects.filter(author=self.user, slug=self.cleaned_data["slug"]).exists():
raise forms.ValidationError(u"This field must be unique")
return self.cleaned_data["slug"]
try:
post = Post.objects.get(
author = self.user,
created_at__month = self.instance.created_at.month,
created_at__year = self.instance.created_at.year,
slug = self.cleaned_data["slug"]
)
if post != self.instance:
raise forms.ValidationError(u"This field must be unique")
except Post.DoesNotExist:
pass
return self.cleaned_data["slug"]
|
from datetime import datetime
from django import forms
from django.utils.translation import ugettext_lazy as _
from pinax.apps.blog.models import Post
class BlogForm(forms.ModelForm):
slug = forms.SlugField(
max_length = 40,
help_text = _("a short version of the title consisting only of letters, numbers, underscores and hyphens."),
)
class Meta:
model = Post
exclude = [
"author",
"creator_ip",
"created_at",
"updated_at",
"publish",
]
def __init__(self, user=None, *args, **kwargs):
self.user = user
super(BlogForm, self).__init__(*args, **kwargs)
def clean_slug(self):
if not self.instance.pk:
if Post.objects.filter(author=self.user, slug=self.cleaned_data["slug"]).exists():
raise forms.ValidationError(u"This field must be unique")
return self.cleaned_data["slug"]
try:
post = Post.objects.get(
author = self.user,
created_at__month = self.instance.created_at.month,
created_at__year = self.instance.created_at.year,
slug = self.cleaned_data["slug"]
)
if post != self.instance:
raise forms.ValidationError(u"This field must be unique")
except Post.DoesNotExist:
pass
return self.cleaned_data["slug"]
|
Increase blog slug max length
|
Increase blog slug max length
|
Python
|
mit
|
amarandon/pinax,amarandon/pinax,amarandon/pinax,amarandon/pinax
|
from datetime import datetime
from django import forms
from django.utils.translation import ugettext_lazy as _
from pinax.apps.blog.models import Post
class BlogForm(forms.ModelForm):
slug = forms.SlugField(
max_length = 20,
help_text = _("a short version of the title consisting only of letters, numbers, underscores and hyphens."),
)
class Meta:
model = Post
exclude = [
"author",
"creator_ip",
"created_at",
"updated_at",
"publish",
]
def __init__(self, user=None, *args, **kwargs):
self.user = user
super(BlogForm, self).__init__(*args, **kwargs)
def clean_slug(self):
if not self.instance.pk:
if Post.objects.filter(author=self.user, slug=self.cleaned_data["slug"]).exists():
raise forms.ValidationError(u"This field must be unique")
return self.cleaned_data["slug"]
try:
post = Post.objects.get(
author = self.user,
created_at__month = self.instance.created_at.month,
created_at__year = self.instance.created_at.year,
slug = self.cleaned_data["slug"]
)
if post != self.instance:
raise forms.ValidationError(u"This field must be unique")
except Post.DoesNotExist:
pass
return self.cleaned_data["slug"]
Increase blog slug max length
|
from datetime import datetime
from django import forms
from django.utils.translation import ugettext_lazy as _
from pinax.apps.blog.models import Post
class BlogForm(forms.ModelForm):
slug = forms.SlugField(
max_length = 40,
help_text = _("a short version of the title consisting only of letters, numbers, underscores and hyphens."),
)
class Meta:
model = Post
exclude = [
"author",
"creator_ip",
"created_at",
"updated_at",
"publish",
]
def __init__(self, user=None, *args, **kwargs):
self.user = user
super(BlogForm, self).__init__(*args, **kwargs)
def clean_slug(self):
if not self.instance.pk:
if Post.objects.filter(author=self.user, slug=self.cleaned_data["slug"]).exists():
raise forms.ValidationError(u"This field must be unique")
return self.cleaned_data["slug"]
try:
post = Post.objects.get(
author = self.user,
created_at__month = self.instance.created_at.month,
created_at__year = self.instance.created_at.year,
slug = self.cleaned_data["slug"]
)
if post != self.instance:
raise forms.ValidationError(u"This field must be unique")
except Post.DoesNotExist:
pass
return self.cleaned_data["slug"]
|
<commit_before>from datetime import datetime
from django import forms
from django.utils.translation import ugettext_lazy as _
from pinax.apps.blog.models import Post
class BlogForm(forms.ModelForm):
slug = forms.SlugField(
max_length = 20,
help_text = _("a short version of the title consisting only of letters, numbers, underscores and hyphens."),
)
class Meta:
model = Post
exclude = [
"author",
"creator_ip",
"created_at",
"updated_at",
"publish",
]
def __init__(self, user=None, *args, **kwargs):
self.user = user
super(BlogForm, self).__init__(*args, **kwargs)
def clean_slug(self):
if not self.instance.pk:
if Post.objects.filter(author=self.user, slug=self.cleaned_data["slug"]).exists():
raise forms.ValidationError(u"This field must be unique")
return self.cleaned_data["slug"]
try:
post = Post.objects.get(
author = self.user,
created_at__month = self.instance.created_at.month,
created_at__year = self.instance.created_at.year,
slug = self.cleaned_data["slug"]
)
if post != self.instance:
raise forms.ValidationError(u"This field must be unique")
except Post.DoesNotExist:
pass
return self.cleaned_data["slug"]
<commit_msg>Increase blog slug max length<commit_after>
|
from datetime import datetime
from django import forms
from django.utils.translation import ugettext_lazy as _
from pinax.apps.blog.models import Post
class BlogForm(forms.ModelForm):
slug = forms.SlugField(
max_length = 40,
help_text = _("a short version of the title consisting only of letters, numbers, underscores and hyphens."),
)
class Meta:
model = Post
exclude = [
"author",
"creator_ip",
"created_at",
"updated_at",
"publish",
]
def __init__(self, user=None, *args, **kwargs):
self.user = user
super(BlogForm, self).__init__(*args, **kwargs)
def clean_slug(self):
if not self.instance.pk:
if Post.objects.filter(author=self.user, slug=self.cleaned_data["slug"]).exists():
raise forms.ValidationError(u"This field must be unique")
return self.cleaned_data["slug"]
try:
post = Post.objects.get(
author = self.user,
created_at__month = self.instance.created_at.month,
created_at__year = self.instance.created_at.year,
slug = self.cleaned_data["slug"]
)
if post != self.instance:
raise forms.ValidationError(u"This field must be unique")
except Post.DoesNotExist:
pass
return self.cleaned_data["slug"]
|
from datetime import datetime
from django import forms
from django.utils.translation import ugettext_lazy as _
from pinax.apps.blog.models import Post
class BlogForm(forms.ModelForm):
slug = forms.SlugField(
max_length = 20,
help_text = _("a short version of the title consisting only of letters, numbers, underscores and hyphens."),
)
class Meta:
model = Post
exclude = [
"author",
"creator_ip",
"created_at",
"updated_at",
"publish",
]
def __init__(self, user=None, *args, **kwargs):
self.user = user
super(BlogForm, self).__init__(*args, **kwargs)
def clean_slug(self):
if not self.instance.pk:
if Post.objects.filter(author=self.user, slug=self.cleaned_data["slug"]).exists():
raise forms.ValidationError(u"This field must be unique")
return self.cleaned_data["slug"]
try:
post = Post.objects.get(
author = self.user,
created_at__month = self.instance.created_at.month,
created_at__year = self.instance.created_at.year,
slug = self.cleaned_data["slug"]
)
if post != self.instance:
raise forms.ValidationError(u"This field must be unique")
except Post.DoesNotExist:
pass
return self.cleaned_data["slug"]
Increase blog slug max lengthfrom datetime import datetime
from django import forms
from django.utils.translation import ugettext_lazy as _
from pinax.apps.blog.models import Post
class BlogForm(forms.ModelForm):
slug = forms.SlugField(
max_length = 40,
help_text = _("a short version of the title consisting only of letters, numbers, underscores and hyphens."),
)
class Meta:
model = Post
exclude = [
"author",
"creator_ip",
"created_at",
"updated_at",
"publish",
]
def __init__(self, user=None, *args, **kwargs):
self.user = user
super(BlogForm, self).__init__(*args, **kwargs)
def clean_slug(self):
if not self.instance.pk:
if Post.objects.filter(author=self.user, slug=self.cleaned_data["slug"]).exists():
raise forms.ValidationError(u"This field must be unique")
return self.cleaned_data["slug"]
try:
post = Post.objects.get(
author = self.user,
created_at__month = self.instance.created_at.month,
created_at__year = self.instance.created_at.year,
slug = self.cleaned_data["slug"]
)
if post != self.instance:
raise forms.ValidationError(u"This field must be unique")
except Post.DoesNotExist:
pass
return self.cleaned_data["slug"]
|
<commit_before>from datetime import datetime
from django import forms
from django.utils.translation import ugettext_lazy as _
from pinax.apps.blog.models import Post
class BlogForm(forms.ModelForm):
slug = forms.SlugField(
max_length = 20,
help_text = _("a short version of the title consisting only of letters, numbers, underscores and hyphens."),
)
class Meta:
model = Post
exclude = [
"author",
"creator_ip",
"created_at",
"updated_at",
"publish",
]
def __init__(self, user=None, *args, **kwargs):
self.user = user
super(BlogForm, self).__init__(*args, **kwargs)
def clean_slug(self):
if not self.instance.pk:
if Post.objects.filter(author=self.user, slug=self.cleaned_data["slug"]).exists():
raise forms.ValidationError(u"This field must be unique")
return self.cleaned_data["slug"]
try:
post = Post.objects.get(
author = self.user,
created_at__month = self.instance.created_at.month,
created_at__year = self.instance.created_at.year,
slug = self.cleaned_data["slug"]
)
if post != self.instance:
raise forms.ValidationError(u"This field must be unique")
except Post.DoesNotExist:
pass
return self.cleaned_data["slug"]
<commit_msg>Increase blog slug max length<commit_after>from datetime import datetime
from django import forms
from django.utils.translation import ugettext_lazy as _
from pinax.apps.blog.models import Post
class BlogForm(forms.ModelForm):
slug = forms.SlugField(
max_length = 40,
help_text = _("a short version of the title consisting only of letters, numbers, underscores and hyphens."),
)
class Meta:
model = Post
exclude = [
"author",
"creator_ip",
"created_at",
"updated_at",
"publish",
]
def __init__(self, user=None, *args, **kwargs):
self.user = user
super(BlogForm, self).__init__(*args, **kwargs)
def clean_slug(self):
if not self.instance.pk:
if Post.objects.filter(author=self.user, slug=self.cleaned_data["slug"]).exists():
raise forms.ValidationError(u"This field must be unique")
return self.cleaned_data["slug"]
try:
post = Post.objects.get(
author = self.user,
created_at__month = self.instance.created_at.month,
created_at__year = self.instance.created_at.year,
slug = self.cleaned_data["slug"]
)
if post != self.instance:
raise forms.ValidationError(u"This field must be unique")
except Post.DoesNotExist:
pass
return self.cleaned_data["slug"]
|
9936c9e33276fde64f62ac28d03c0870676233d3
|
setup.py
|
setup.py
|
import sys
from setuptools import setup
if sys.version_info[:2] < (3, 1):
sys.exit('raven-bash requires Python 3.1 or higher.')
long_description = '''\
Raven Sentry client for Bash.
Logs error if one of your commands exits with non-zero return code and produces simple traceback for
easier debugging. It also tries to extract last values of the variables visible in the traceback.
Environment variables and stderr output are also included.
For more information please visit project repo on GitHub: https://github.com/hareevs/raven-bash
'''
setup(
name='raven-bash',
version='0.1.1',
description='Raven Sentry client for Bash.',
long_description=long_description,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
],
keywords='raven sentry bash',
author='Viktor Stískala',
author_email='viktor@stiskala.cz',
url='https://github.com/hareevs/raven-bash',
license='Apache License 2.0',
install_requires=['raven>=5.1.1', 'configparser'],
packages=['logger'],
package_data={'logger': ['raven-bash', 'logger/*.py']},
entry_points={
'console_scripts': [
'raven-logger=logger.raven_logger:main',
],
},
scripts=['raven-bash'],
zip_safe=False
)
|
import sys
from setuptools import setup
long_description = '''\
Raven Sentry client for Bash.
Logs error if one of your commands exits with non-zero return code and produces simple traceback for
easier debugging. It also tries to extract last values of the variables visible in the traceback.
Environment variables and stderr output are also included.
For more information please visit project repo on GitHub: https://github.com/hareevs/raven-bash
'''
setup(
name='raven-bash',
version='0.1.1',
description='Raven Sentry client for Bash.',
long_description=long_description,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
],
keywords='raven sentry bash',
author='Viktor Stískala',
author_email='viktor@stiskala.cz',
url='https://github.com/hareevs/raven-bash',
license='Apache License 2.0',
install_requires=['raven>=5.1.1', 'configparser'],
packages=['logger'],
package_data={'logger': ['raven-bash', 'logger/*.py']},
entry_points={
'console_scripts': [
'raven-logger=logger.raven_logger:main',
],
},
scripts=['raven-bash'],
zip_safe=False
)
|
Remove requirement for Python 3+
|
Remove requirement for Python 3+
|
Python
|
apache-2.0
|
hareevs/raven-bash,ViktorStiskala/raven-bash,ViktorStiskala/raven-bash,hareevs/raven-bash
|
import sys
from setuptools import setup
if sys.version_info[:2] < (3, 1):
sys.exit('raven-bash requires Python 3.1 or higher.')
long_description = '''\
Raven Sentry client for Bash.
Logs error if one of your commands exits with non-zero return code and produces simple traceback for
easier debugging. It also tries to extract last values of the variables visible in the traceback.
Environment variables and stderr output are also included.
For more information please visit project repo on GitHub: https://github.com/hareevs/raven-bash
'''
setup(
name='raven-bash',
version='0.1.1',
description='Raven Sentry client for Bash.',
long_description=long_description,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
],
keywords='raven sentry bash',
author='Viktor Stískala',
author_email='viktor@stiskala.cz',
url='https://github.com/hareevs/raven-bash',
license='Apache License 2.0',
install_requires=['raven>=5.1.1', 'configparser'],
packages=['logger'],
package_data={'logger': ['raven-bash', 'logger/*.py']},
entry_points={
'console_scripts': [
'raven-logger=logger.raven_logger:main',
],
},
scripts=['raven-bash'],
zip_safe=False
)
Remove requirement for Python 3+
|
import sys
from setuptools import setup
long_description = '''\
Raven Sentry client for Bash.
Logs error if one of your commands exits with non-zero return code and produces simple traceback for
easier debugging. It also tries to extract last values of the variables visible in the traceback.
Environment variables and stderr output are also included.
For more information please visit project repo on GitHub: https://github.com/hareevs/raven-bash
'''
setup(
name='raven-bash',
version='0.1.1',
description='Raven Sentry client for Bash.',
long_description=long_description,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
],
keywords='raven sentry bash',
author='Viktor Stískala',
author_email='viktor@stiskala.cz',
url='https://github.com/hareevs/raven-bash',
license='Apache License 2.0',
install_requires=['raven>=5.1.1', 'configparser'],
packages=['logger'],
package_data={'logger': ['raven-bash', 'logger/*.py']},
entry_points={
'console_scripts': [
'raven-logger=logger.raven_logger:main',
],
},
scripts=['raven-bash'],
zip_safe=False
)
|
<commit_before>import sys
from setuptools import setup
if sys.version_info[:2] < (3, 1):
sys.exit('raven-bash requires Python 3.1 or higher.')
long_description = '''\
Raven Sentry client for Bash.
Logs error if one of your commands exits with non-zero return code and produces simple traceback for
easier debugging. It also tries to extract last values of the variables visible in the traceback.
Environment variables and stderr output are also included.
For more information please visit project repo on GitHub: https://github.com/hareevs/raven-bash
'''
setup(
name='raven-bash',
version='0.1.1',
description='Raven Sentry client for Bash.',
long_description=long_description,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
],
keywords='raven sentry bash',
author='Viktor Stískala',
author_email='viktor@stiskala.cz',
url='https://github.com/hareevs/raven-bash',
license='Apache License 2.0',
install_requires=['raven>=5.1.1', 'configparser'],
packages=['logger'],
package_data={'logger': ['raven-bash', 'logger/*.py']},
entry_points={
'console_scripts': [
'raven-logger=logger.raven_logger:main',
],
},
scripts=['raven-bash'],
zip_safe=False
)
<commit_msg>Remove requirement for Python 3+<commit_after>
|
import sys
from setuptools import setup
long_description = '''\
Raven Sentry client for Bash.
Logs error if one of your commands exits with non-zero return code and produces simple traceback for
easier debugging. It also tries to extract last values of the variables visible in the traceback.
Environment variables and stderr output are also included.
For more information please visit project repo on GitHub: https://github.com/hareevs/raven-bash
'''
setup(
name='raven-bash',
version='0.1.1',
description='Raven Sentry client for Bash.',
long_description=long_description,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
],
keywords='raven sentry bash',
author='Viktor Stískala',
author_email='viktor@stiskala.cz',
url='https://github.com/hareevs/raven-bash',
license='Apache License 2.0',
install_requires=['raven>=5.1.1', 'configparser'],
packages=['logger'],
package_data={'logger': ['raven-bash', 'logger/*.py']},
entry_points={
'console_scripts': [
'raven-logger=logger.raven_logger:main',
],
},
scripts=['raven-bash'],
zip_safe=False
)
|
import sys
from setuptools import setup
if sys.version_info[:2] < (3, 1):
sys.exit('raven-bash requires Python 3.1 or higher.')
long_description = '''\
Raven Sentry client for Bash.
Logs error if one of your commands exits with non-zero return code and produces simple traceback for
easier debugging. It also tries to extract last values of the variables visible in the traceback.
Environment variables and stderr output are also included.
For more information please visit project repo on GitHub: https://github.com/hareevs/raven-bash
'''
setup(
name='raven-bash',
version='0.1.1',
description='Raven Sentry client for Bash.',
long_description=long_description,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
],
keywords='raven sentry bash',
author='Viktor Stískala',
author_email='viktor@stiskala.cz',
url='https://github.com/hareevs/raven-bash',
license='Apache License 2.0',
install_requires=['raven>=5.1.1', 'configparser'],
packages=['logger'],
package_data={'logger': ['raven-bash', 'logger/*.py']},
entry_points={
'console_scripts': [
'raven-logger=logger.raven_logger:main',
],
},
scripts=['raven-bash'],
zip_safe=False
)
Remove requirement for Python 3+import sys
from setuptools import setup
long_description = '''\
Raven Sentry client for Bash.
Logs error if one of your commands exits with non-zero return code and produces simple traceback for
easier debugging. It also tries to extract last values of the variables visible in the traceback.
Environment variables and stderr output are also included.
For more information please visit project repo on GitHub: https://github.com/hareevs/raven-bash
'''
setup(
name='raven-bash',
version='0.1.1',
description='Raven Sentry client for Bash.',
long_description=long_description,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
],
keywords='raven sentry bash',
author='Viktor Stískala',
author_email='viktor@stiskala.cz',
url='https://github.com/hareevs/raven-bash',
license='Apache License 2.0',
install_requires=['raven>=5.1.1', 'configparser'],
packages=['logger'],
package_data={'logger': ['raven-bash', 'logger/*.py']},
entry_points={
'console_scripts': [
'raven-logger=logger.raven_logger:main',
],
},
scripts=['raven-bash'],
zip_safe=False
)
|
<commit_before>import sys
from setuptools import setup
if sys.version_info[:2] < (3, 1):
sys.exit('raven-bash requires Python 3.1 or higher.')
long_description = '''\
Raven Sentry client for Bash.
Logs error if one of your commands exits with non-zero return code and produces simple traceback for
easier debugging. It also tries to extract last values of the variables visible in the traceback.
Environment variables and stderr output are also included.
For more information please visit project repo on GitHub: https://github.com/hareevs/raven-bash
'''
setup(
name='raven-bash',
version='0.1.1',
description='Raven Sentry client for Bash.',
long_description=long_description,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
],
keywords='raven sentry bash',
author='Viktor Stískala',
author_email='viktor@stiskala.cz',
url='https://github.com/hareevs/raven-bash',
license='Apache License 2.0',
install_requires=['raven>=5.1.1', 'configparser'],
packages=['logger'],
package_data={'logger': ['raven-bash', 'logger/*.py']},
entry_points={
'console_scripts': [
'raven-logger=logger.raven_logger:main',
],
},
scripts=['raven-bash'],
zip_safe=False
)
<commit_msg>Remove requirement for Python 3+<commit_after>import sys
from setuptools import setup
long_description = '''\
Raven Sentry client for Bash.
Logs error if one of your commands exits with non-zero return code and produces simple traceback for
easier debugging. It also tries to extract last values of the variables visible in the traceback.
Environment variables and stderr output are also included.
For more information please visit project repo on GitHub: https://github.com/hareevs/raven-bash
'''
setup(
name='raven-bash',
version='0.1.1',
description='Raven Sentry client for Bash.',
long_description=long_description,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
],
keywords='raven sentry bash',
author='Viktor Stískala',
author_email='viktor@stiskala.cz',
url='https://github.com/hareevs/raven-bash',
license='Apache License 2.0',
install_requires=['raven>=5.1.1', 'configparser'],
packages=['logger'],
package_data={'logger': ['raven-bash', 'logger/*.py']},
entry_points={
'console_scripts': [
'raven-logger=logger.raven_logger:main',
],
},
scripts=['raven-bash'],
zip_safe=False
)
|
e6d90a61fdcf78f1ab5b3f13398a66f5c1eb20b1
|
setup.py
|
setup.py
|
from setuptools import setup
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
long_description = open('README.md').read()
setup(
name='pyvault',
version='1.8',
description='Python password manager',
long_description=long_description,
author='Gabriel Bordeaux',
author_email='pypi@gab.lc',
url='https://github.com/gabfl/vault',
license='MIT',
packages=['vault', 'vault.lib'],
package_dir={'vault': 'src'},
install_requires=['pycryptodome', 'pyperclip', 'tabulate', 'argparse', 'passwordgenerator'], # external dependencies
entry_points={
'console_scripts': [
'vault = vault.vault:main',
],
},
classifiers=[ # see https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Topic :: Security',
'Topic :: Security :: Cryptography',
'License :: OSI Approved :: MIT License',
'Operating System :: MacOS',
'Operating System :: POSIX :: Linux',
'Natural Language :: English',
#'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python',
'Development Status :: 4 - Beta',
#'Development Status :: 5 - Production/Stable',
],
)
|
from setuptools import setup
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
long_description = open('README.md').read()
setup(
name='pyvault',
version='1.8.1',
description='Python password manager',
long_description=long_description,
author='Gabriel Bordeaux',
author_email='pypi@gab.lc',
url='https://github.com/gabfl/vault',
license='MIT',
packages=['vault', 'vault.lib'],
package_dir={'vault': 'src'},
install_requires=['pycryptodome', 'pyperclip', 'tabulate', 'argparse', 'passwordgenerator'], # external dependencies
entry_points={
'console_scripts': [
'vault = vault.vault:main',
],
},
classifiers=[ # see https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Topic :: Security',
'Topic :: Security :: Cryptography',
'License :: OSI Approved :: MIT License',
'Operating System :: MacOS',
'Operating System :: POSIX :: Linux',
'Natural Language :: English',
#'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python',
'Development Status :: 4 - Beta',
#'Development Status :: 5 - Production/Stable',
],
)
|
Bump version to fix build
|
Bump version to fix build
|
Python
|
mit
|
gabfl/vault
|
from setuptools import setup
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
long_description = open('README.md').read()
setup(
name='pyvault',
version='1.8',
description='Python password manager',
long_description=long_description,
author='Gabriel Bordeaux',
author_email='pypi@gab.lc',
url='https://github.com/gabfl/vault',
license='MIT',
packages=['vault', 'vault.lib'],
package_dir={'vault': 'src'},
install_requires=['pycryptodome', 'pyperclip', 'tabulate', 'argparse', 'passwordgenerator'], # external dependencies
entry_points={
'console_scripts': [
'vault = vault.vault:main',
],
},
classifiers=[ # see https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Topic :: Security',
'Topic :: Security :: Cryptography',
'License :: OSI Approved :: MIT License',
'Operating System :: MacOS',
'Operating System :: POSIX :: Linux',
'Natural Language :: English',
#'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python',
'Development Status :: 4 - Beta',
#'Development Status :: 5 - Production/Stable',
],
)
Bump version to fix build
|
from setuptools import setup
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
long_description = open('README.md').read()
setup(
name='pyvault',
version='1.8.1',
description='Python password manager',
long_description=long_description,
author='Gabriel Bordeaux',
author_email='pypi@gab.lc',
url='https://github.com/gabfl/vault',
license='MIT',
packages=['vault', 'vault.lib'],
package_dir={'vault': 'src'},
install_requires=['pycryptodome', 'pyperclip', 'tabulate', 'argparse', 'passwordgenerator'], # external dependencies
entry_points={
'console_scripts': [
'vault = vault.vault:main',
],
},
classifiers=[ # see https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Topic :: Security',
'Topic :: Security :: Cryptography',
'License :: OSI Approved :: MIT License',
'Operating System :: MacOS',
'Operating System :: POSIX :: Linux',
'Natural Language :: English',
#'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python',
'Development Status :: 4 - Beta',
#'Development Status :: 5 - Production/Stable',
],
)
|
<commit_before>from setuptools import setup
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
long_description = open('README.md').read()
setup(
name='pyvault',
version='1.8',
description='Python password manager',
long_description=long_description,
author='Gabriel Bordeaux',
author_email='pypi@gab.lc',
url='https://github.com/gabfl/vault',
license='MIT',
packages=['vault', 'vault.lib'],
package_dir={'vault': 'src'},
install_requires=['pycryptodome', 'pyperclip', 'tabulate', 'argparse', 'passwordgenerator'], # external dependencies
entry_points={
'console_scripts': [
'vault = vault.vault:main',
],
},
classifiers=[ # see https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Topic :: Security',
'Topic :: Security :: Cryptography',
'License :: OSI Approved :: MIT License',
'Operating System :: MacOS',
'Operating System :: POSIX :: Linux',
'Natural Language :: English',
#'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python',
'Development Status :: 4 - Beta',
#'Development Status :: 5 - Production/Stable',
],
)
<commit_msg>Bump version to fix build<commit_after>
|
from setuptools import setup
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
long_description = open('README.md').read()
setup(
name='pyvault',
version='1.8.1',
description='Python password manager',
long_description=long_description,
author='Gabriel Bordeaux',
author_email='pypi@gab.lc',
url='https://github.com/gabfl/vault',
license='MIT',
packages=['vault', 'vault.lib'],
package_dir={'vault': 'src'},
install_requires=['pycryptodome', 'pyperclip', 'tabulate', 'argparse', 'passwordgenerator'], # external dependencies
entry_points={
'console_scripts': [
'vault = vault.vault:main',
],
},
classifiers=[ # see https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Topic :: Security',
'Topic :: Security :: Cryptography',
'License :: OSI Approved :: MIT License',
'Operating System :: MacOS',
'Operating System :: POSIX :: Linux',
'Natural Language :: English',
#'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python',
'Development Status :: 4 - Beta',
#'Development Status :: 5 - Production/Stable',
],
)
|
from setuptools import setup
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
long_description = open('README.md').read()
setup(
name='pyvault',
version='1.8',
description='Python password manager',
long_description=long_description,
author='Gabriel Bordeaux',
author_email='pypi@gab.lc',
url='https://github.com/gabfl/vault',
license='MIT',
packages=['vault', 'vault.lib'],
package_dir={'vault': 'src'},
install_requires=['pycryptodome', 'pyperclip', 'tabulate', 'argparse', 'passwordgenerator'], # external dependencies
entry_points={
'console_scripts': [
'vault = vault.vault:main',
],
},
classifiers=[ # see https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Topic :: Security',
'Topic :: Security :: Cryptography',
'License :: OSI Approved :: MIT License',
'Operating System :: MacOS',
'Operating System :: POSIX :: Linux',
'Natural Language :: English',
#'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python',
'Development Status :: 4 - Beta',
#'Development Status :: 5 - Production/Stable',
],
)
Bump version to fix buildfrom setuptools import setup
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
long_description = open('README.md').read()
setup(
name='pyvault',
version='1.8.1',
description='Python password manager',
long_description=long_description,
author='Gabriel Bordeaux',
author_email='pypi@gab.lc',
url='https://github.com/gabfl/vault',
license='MIT',
packages=['vault', 'vault.lib'],
package_dir={'vault': 'src'},
install_requires=['pycryptodome', 'pyperclip', 'tabulate', 'argparse', 'passwordgenerator'], # external dependencies
entry_points={
'console_scripts': [
'vault = vault.vault:main',
],
},
classifiers=[ # see https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Topic :: Security',
'Topic :: Security :: Cryptography',
'License :: OSI Approved :: MIT License',
'Operating System :: MacOS',
'Operating System :: POSIX :: Linux',
'Natural Language :: English',
#'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python',
'Development Status :: 4 - Beta',
#'Development Status :: 5 - Production/Stable',
],
)
|
<commit_before>from setuptools import setup
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
long_description = open('README.md').read()
setup(
name='pyvault',
version='1.8',
description='Python password manager',
long_description=long_description,
author='Gabriel Bordeaux',
author_email='pypi@gab.lc',
url='https://github.com/gabfl/vault',
license='MIT',
packages=['vault', 'vault.lib'],
package_dir={'vault': 'src'},
install_requires=['pycryptodome', 'pyperclip', 'tabulate', 'argparse', 'passwordgenerator'], # external dependencies
entry_points={
'console_scripts': [
'vault = vault.vault:main',
],
},
classifiers=[ # see https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Topic :: Security',
'Topic :: Security :: Cryptography',
'License :: OSI Approved :: MIT License',
'Operating System :: MacOS',
'Operating System :: POSIX :: Linux',
'Natural Language :: English',
#'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python',
'Development Status :: 4 - Beta',
#'Development Status :: 5 - Production/Stable',
],
)
<commit_msg>Bump version to fix build<commit_after>from setuptools import setup
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
long_description = open('README.md').read()
setup(
name='pyvault',
version='1.8.1',
description='Python password manager',
long_description=long_description,
author='Gabriel Bordeaux',
author_email='pypi@gab.lc',
url='https://github.com/gabfl/vault',
license='MIT',
packages=['vault', 'vault.lib'],
package_dir={'vault': 'src'},
install_requires=['pycryptodome', 'pyperclip', 'tabulate', 'argparse', 'passwordgenerator'], # external dependencies
entry_points={
'console_scripts': [
'vault = vault.vault:main',
],
},
classifiers=[ # see https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Topic :: Security',
'Topic :: Security :: Cryptography',
'License :: OSI Approved :: MIT License',
'Operating System :: MacOS',
'Operating System :: POSIX :: Linux',
'Natural Language :: English',
#'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python',
'Development Status :: 4 - Beta',
#'Development Status :: 5 - Production/Stable',
],
)
|
14a7ce543192dc5104af370a0b3d08301241ad8b
|
ord_hackday/search/views.py
|
ord_hackday/search/views.py
|
from django.shortcuts import render
from ord_hackday.search.models import Portal
import requests
import json
def search(request):
c = {}
if 'query' in request.GET:
query = request.GET['query']
if len(query) > 0:
portals = Portal.objects.all()
c['portals'] = portals
c['results'] = []
for portal in portals:
url = portal.url + '/api/3/action/package_search?q=' + query
r = requests.get(url)
json_result = json.loads(r.text)
if json_result['success']:
for r in json_result['result']['results']:
c['results'].append(r)
return render(request, 'search.html', c)
|
from django.shortcuts import render
from ord_hackday.search.models import Portal
import requests
import json
def search(request):
c = {}
if 'query' in request.GET:
query = request.GET['query']
if len(query) > 0:
portals = Portal.objects.all()
c['portals'] = portals
c['results'] = []
for portal in portals:
url = portal.url + '/api/3/action/package_search?q=' + query
r = requests.get(url)
json_result = json.loads(r.text)
if json_result['success']:
for r in json_result['result']['results']:
r['result_url'] = portal.url + '/dataset/' + r['name']
c['results'].append(r)
return render(request, 'search.html', c)
|
Return constructed URL for each result.
|
Return constructed URL for each result.
|
Python
|
mit
|
bellisk/opendata-multisearch,bellisk/opendata-multisearch
|
from django.shortcuts import render
from ord_hackday.search.models import Portal
import requests
import json
def search(request):
c = {}
if 'query' in request.GET:
query = request.GET['query']
if len(query) > 0:
portals = Portal.objects.all()
c['portals'] = portals
c['results'] = []
for portal in portals:
url = portal.url + '/api/3/action/package_search?q=' + query
r = requests.get(url)
json_result = json.loads(r.text)
if json_result['success']:
for r in json_result['result']['results']:
c['results'].append(r)
return render(request, 'search.html', c)Return constructed URL for each result.
|
from django.shortcuts import render
from ord_hackday.search.models import Portal
import requests
import json
def search(request):
c = {}
if 'query' in request.GET:
query = request.GET['query']
if len(query) > 0:
portals = Portal.objects.all()
c['portals'] = portals
c['results'] = []
for portal in portals:
url = portal.url + '/api/3/action/package_search?q=' + query
r = requests.get(url)
json_result = json.loads(r.text)
if json_result['success']:
for r in json_result['result']['results']:
r['result_url'] = portal.url + '/dataset/' + r['name']
c['results'].append(r)
return render(request, 'search.html', c)
|
<commit_before>from django.shortcuts import render
from ord_hackday.search.models import Portal
import requests
import json
def search(request):
c = {}
if 'query' in request.GET:
query = request.GET['query']
if len(query) > 0:
portals = Portal.objects.all()
c['portals'] = portals
c['results'] = []
for portal in portals:
url = portal.url + '/api/3/action/package_search?q=' + query
r = requests.get(url)
json_result = json.loads(r.text)
if json_result['success']:
for r in json_result['result']['results']:
c['results'].append(r)
return render(request, 'search.html', c)<commit_msg>Return constructed URL for each result.<commit_after>
|
from django.shortcuts import render
from ord_hackday.search.models import Portal
import requests
import json
def search(request):
c = {}
if 'query' in request.GET:
query = request.GET['query']
if len(query) > 0:
portals = Portal.objects.all()
c['portals'] = portals
c['results'] = []
for portal in portals:
url = portal.url + '/api/3/action/package_search?q=' + query
r = requests.get(url)
json_result = json.loads(r.text)
if json_result['success']:
for r in json_result['result']['results']:
r['result_url'] = portal.url + '/dataset/' + r['name']
c['results'].append(r)
return render(request, 'search.html', c)
|
from django.shortcuts import render
from ord_hackday.search.models import Portal
import requests
import json
def search(request):
c = {}
if 'query' in request.GET:
query = request.GET['query']
if len(query) > 0:
portals = Portal.objects.all()
c['portals'] = portals
c['results'] = []
for portal in portals:
url = portal.url + '/api/3/action/package_search?q=' + query
r = requests.get(url)
json_result = json.loads(r.text)
if json_result['success']:
for r in json_result['result']['results']:
c['results'].append(r)
return render(request, 'search.html', c)Return constructed URL for each result.from django.shortcuts import render
from ord_hackday.search.models import Portal
import requests
import json
def search(request):
c = {}
if 'query' in request.GET:
query = request.GET['query']
if len(query) > 0:
portals = Portal.objects.all()
c['portals'] = portals
c['results'] = []
for portal in portals:
url = portal.url + '/api/3/action/package_search?q=' + query
r = requests.get(url)
json_result = json.loads(r.text)
if json_result['success']:
for r in json_result['result']['results']:
r['result_url'] = portal.url + '/dataset/' + r['name']
c['results'].append(r)
return render(request, 'search.html', c)
|
<commit_before>from django.shortcuts import render
from ord_hackday.search.models import Portal
import requests
import json
def search(request):
c = {}
if 'query' in request.GET:
query = request.GET['query']
if len(query) > 0:
portals = Portal.objects.all()
c['portals'] = portals
c['results'] = []
for portal in portals:
url = portal.url + '/api/3/action/package_search?q=' + query
r = requests.get(url)
json_result = json.loads(r.text)
if json_result['success']:
for r in json_result['result']['results']:
c['results'].append(r)
return render(request, 'search.html', c)<commit_msg>Return constructed URL for each result.<commit_after>from django.shortcuts import render
from ord_hackday.search.models import Portal
import requests
import json
def search(request):
c = {}
if 'query' in request.GET:
query = request.GET['query']
if len(query) > 0:
portals = Portal.objects.all()
c['portals'] = portals
c['results'] = []
for portal in portals:
url = portal.url + '/api/3/action/package_search?q=' + query
r = requests.get(url)
json_result = json.loads(r.text)
if json_result['success']:
for r in json_result['result']['results']:
r['result_url'] = portal.url + '/dataset/' + r['name']
c['results'].append(r)
return render(request, 'search.html', c)
|
a2762922b98c3733f103a631cd3ef346ab5bb54f
|
examples/pax_mininet_node.py
|
examples/pax_mininet_node.py
|
# coding: latin-1
"""
pax_mininet_node.py: Defines PaxNode which allows Pax to behave as the sole packet hander on a node.
"""
from mininet.node import Node
from mininet.log import info, warn
class PaxNode( Node ):
"PaxNode: A node which allows Pax to behave as the sole packet hander on that node."
def __init__(self, name, **params):
super(PaxNode, self).__init__(name, **params)
def config(self, **params):
super(PaxNode, self).config(**params)
# Setup iptable rules to drop incoming packets on each interface:
# Because Pax only sniffs packets (it doesn't steal them), we need to drop the packets
# to prevent the OS from handling them and responding.
print "Drop all incoming TCP traffic on nat0 so that Pax is effectively the middle-man"
for intf in self.intfList():
self.cmd("iptables -A INPUT -p tcp -i %s -j DROP" % intf.name)
# Disable ip_forward because otherwise this still happens, even with the above iptables rules
self.cmd("sysctl -w net.ipv4.ip_forward=0")
def terminate(self):
# Remove iptables rules
for intf in self.intfList():
self.cmd("iptables -D INPUT -p tcp -i %s -j DROP" % intf.name)
super(PaxNode, self).terminate()
|
# coding: latin-1
"""
pax_mininet_node.py: Defines PaxNode which allows Pax to behave as the sole packet hander on a node.
"""
from mininet.node import Node
from mininet.log import info, warn
class PaxNode( Node ):
"PaxNode: A node which allows Pax to behave as the sole packet hander on that node."
def __init__(self, name, **params):
super(PaxNode, self).__init__(name, **params)
def config(self, **params):
super(PaxNode, self).config(**params)
# Setup iptable rules to drop incoming packets on each interface:
# Because Pax only sniffs packets (it doesn't steal them), we need to drop the packets
# to prevent the OS from handling them and responding.
for intf in self.intfList():
self.cmd("iptables -A INPUT -p tcp -i %s -j DROP" % intf.name)
# Disable ip_forward because otherwise this still happens, even with the above iptables rules
self.cmd("sysctl -w net.ipv4.ip_forward=0")
def terminate(self):
# Remove iptables rules
for intf in self.intfList():
self.cmd("iptables -D INPUT -p tcp -i %s -j DROP" % intf.name)
super(PaxNode, self).terminate()
|
Remove print statement from PaxNode config method
|
Remove print statement from PaxNode config method
|
Python
|
apache-2.0
|
TMVector/pax,TMVector/pax,niksu/pax,niksu/pax,niksu/pax
|
# coding: latin-1
"""
pax_mininet_node.py: Defines PaxNode which allows Pax to behave as the sole packet hander on a node.
"""
from mininet.node import Node
from mininet.log import info, warn
class PaxNode( Node ):
"PaxNode: A node which allows Pax to behave as the sole packet hander on that node."
def __init__(self, name, **params):
super(PaxNode, self).__init__(name, **params)
def config(self, **params):
super(PaxNode, self).config(**params)
# Setup iptable rules to drop incoming packets on each interface:
# Because Pax only sniffs packets (it doesn't steal them), we need to drop the packets
# to prevent the OS from handling them and responding.
print "Drop all incoming TCP traffic on nat0 so that Pax is effectively the middle-man"
for intf in self.intfList():
self.cmd("iptables -A INPUT -p tcp -i %s -j DROP" % intf.name)
# Disable ip_forward because otherwise this still happens, even with the above iptables rules
self.cmd("sysctl -w net.ipv4.ip_forward=0")
def terminate(self):
# Remove iptables rules
for intf in self.intfList():
self.cmd("iptables -D INPUT -p tcp -i %s -j DROP" % intf.name)
super(PaxNode, self).terminate()
Remove print statement from PaxNode config method
|
# coding: latin-1
"""
pax_mininet_node.py: Defines PaxNode which allows Pax to behave as the sole packet hander on a node.
"""
from mininet.node import Node
from mininet.log import info, warn
class PaxNode( Node ):
"PaxNode: A node which allows Pax to behave as the sole packet hander on that node."
def __init__(self, name, **params):
super(PaxNode, self).__init__(name, **params)
def config(self, **params):
super(PaxNode, self).config(**params)
# Setup iptable rules to drop incoming packets on each interface:
# Because Pax only sniffs packets (it doesn't steal them), we need to drop the packets
# to prevent the OS from handling them and responding.
for intf in self.intfList():
self.cmd("iptables -A INPUT -p tcp -i %s -j DROP" % intf.name)
# Disable ip_forward because otherwise this still happens, even with the above iptables rules
self.cmd("sysctl -w net.ipv4.ip_forward=0")
def terminate(self):
# Remove iptables rules
for intf in self.intfList():
self.cmd("iptables -D INPUT -p tcp -i %s -j DROP" % intf.name)
super(PaxNode, self).terminate()
|
<commit_before># coding: latin-1
"""
pax_mininet_node.py: Defines PaxNode which allows Pax to behave as the sole packet hander on a node.
"""
from mininet.node import Node
from mininet.log import info, warn
class PaxNode( Node ):
"PaxNode: A node which allows Pax to behave as the sole packet hander on that node."
def __init__(self, name, **params):
super(PaxNode, self).__init__(name, **params)
def config(self, **params):
super(PaxNode, self).config(**params)
# Setup iptable rules to drop incoming packets on each interface:
# Because Pax only sniffs packets (it doesn't steal them), we need to drop the packets
# to prevent the OS from handling them and responding.
print "Drop all incoming TCP traffic on nat0 so that Pax is effectively the middle-man"
for intf in self.intfList():
self.cmd("iptables -A INPUT -p tcp -i %s -j DROP" % intf.name)
# Disable ip_forward because otherwise this still happens, even with the above iptables rules
self.cmd("sysctl -w net.ipv4.ip_forward=0")
def terminate(self):
# Remove iptables rules
for intf in self.intfList():
self.cmd("iptables -D INPUT -p tcp -i %s -j DROP" % intf.name)
super(PaxNode, self).terminate()
<commit_msg>Remove print statement from PaxNode config method<commit_after>
|
# coding: latin-1
"""
pax_mininet_node.py: Defines PaxNode which allows Pax to behave as the sole packet hander on a node.
"""
from mininet.node import Node
from mininet.log import info, warn
class PaxNode( Node ):
"PaxNode: A node which allows Pax to behave as the sole packet hander on that node."
def __init__(self, name, **params):
super(PaxNode, self).__init__(name, **params)
def config(self, **params):
super(PaxNode, self).config(**params)
# Setup iptable rules to drop incoming packets on each interface:
# Because Pax only sniffs packets (it doesn't steal them), we need to drop the packets
# to prevent the OS from handling them and responding.
for intf in self.intfList():
self.cmd("iptables -A INPUT -p tcp -i %s -j DROP" % intf.name)
# Disable ip_forward because otherwise this still happens, even with the above iptables rules
self.cmd("sysctl -w net.ipv4.ip_forward=0")
def terminate(self):
# Remove iptables rules
for intf in self.intfList():
self.cmd("iptables -D INPUT -p tcp -i %s -j DROP" % intf.name)
super(PaxNode, self).terminate()
|
# coding: latin-1
"""
pax_mininet_node.py: Defines PaxNode which allows Pax to behave as the sole packet hander on a node.
"""
from mininet.node import Node
from mininet.log import info, warn
class PaxNode( Node ):
"PaxNode: A node which allows Pax to behave as the sole packet hander on that node."
def __init__(self, name, **params):
super(PaxNode, self).__init__(name, **params)
def config(self, **params):
super(PaxNode, self).config(**params)
# Setup iptable rules to drop incoming packets on each interface:
# Because Pax only sniffs packets (it doesn't steal them), we need to drop the packets
# to prevent the OS from handling them and responding.
print "Drop all incoming TCP traffic on nat0 so that Pax is effectively the middle-man"
for intf in self.intfList():
self.cmd("iptables -A INPUT -p tcp -i %s -j DROP" % intf.name)
# Disable ip_forward because otherwise this still happens, even with the above iptables rules
self.cmd("sysctl -w net.ipv4.ip_forward=0")
def terminate(self):
# Remove iptables rules
for intf in self.intfList():
self.cmd("iptables -D INPUT -p tcp -i %s -j DROP" % intf.name)
super(PaxNode, self).terminate()
Remove print statement from PaxNode config method# coding: latin-1
"""
pax_mininet_node.py: Defines PaxNode which allows Pax to behave as the sole packet hander on a node.
"""
from mininet.node import Node
from mininet.log import info, warn
class PaxNode( Node ):
"PaxNode: A node which allows Pax to behave as the sole packet hander on that node."
def __init__(self, name, **params):
super(PaxNode, self).__init__(name, **params)
def config(self, **params):
super(PaxNode, self).config(**params)
# Setup iptable rules to drop incoming packets on each interface:
# Because Pax only sniffs packets (it doesn't steal them), we need to drop the packets
# to prevent the OS from handling them and responding.
for intf in self.intfList():
self.cmd("iptables -A INPUT -p tcp -i %s -j DROP" % intf.name)
# Disable ip_forward because otherwise this still happens, even with the above iptables rules
self.cmd("sysctl -w net.ipv4.ip_forward=0")
def terminate(self):
# Remove iptables rules
for intf in self.intfList():
self.cmd("iptables -D INPUT -p tcp -i %s -j DROP" % intf.name)
super(PaxNode, self).terminate()
|
<commit_before># coding: latin-1
"""
pax_mininet_node.py: Defines PaxNode which allows Pax to behave as the sole packet hander on a node.
"""
from mininet.node import Node
from mininet.log import info, warn
class PaxNode( Node ):
"PaxNode: A node which allows Pax to behave as the sole packet hander on that node."
def __init__(self, name, **params):
super(PaxNode, self).__init__(name, **params)
def config(self, **params):
super(PaxNode, self).config(**params)
# Setup iptable rules to drop incoming packets on each interface:
# Because Pax only sniffs packets (it doesn't steal them), we need to drop the packets
# to prevent the OS from handling them and responding.
print "Drop all incoming TCP traffic on nat0 so that Pax is effectively the middle-man"
for intf in self.intfList():
self.cmd("iptables -A INPUT -p tcp -i %s -j DROP" % intf.name)
# Disable ip_forward because otherwise this still happens, even with the above iptables rules
self.cmd("sysctl -w net.ipv4.ip_forward=0")
def terminate(self):
# Remove iptables rules
for intf in self.intfList():
self.cmd("iptables -D INPUT -p tcp -i %s -j DROP" % intf.name)
super(PaxNode, self).terminate()
<commit_msg>Remove print statement from PaxNode config method<commit_after># coding: latin-1
"""
pax_mininet_node.py: Defines PaxNode which allows Pax to behave as the sole packet hander on a node.
"""
from mininet.node import Node
from mininet.log import info, warn
class PaxNode( Node ):
"PaxNode: A node which allows Pax to behave as the sole packet hander on that node."
def __init__(self, name, **params):
super(PaxNode, self).__init__(name, **params)
def config(self, **params):
super(PaxNode, self).config(**params)
# Setup iptable rules to drop incoming packets on each interface:
# Because Pax only sniffs packets (it doesn't steal them), we need to drop the packets
# to prevent the OS from handling them and responding.
for intf in self.intfList():
self.cmd("iptables -A INPUT -p tcp -i %s -j DROP" % intf.name)
# Disable ip_forward because otherwise this still happens, even with the above iptables rules
self.cmd("sysctl -w net.ipv4.ip_forward=0")
def terminate(self):
# Remove iptables rules
for intf in self.intfList():
self.cmd("iptables -D INPUT -p tcp -i %s -j DROP" % intf.name)
super(PaxNode, self).terminate()
|
92d23c145c450f8ff0345aa6a5ea000c856e764d
|
indra/sources/tas/__init__.py
|
indra/sources/tas/__init__.py
|
"""This module provides and API and processor to the Target Affinity Spectrum
data set compiled by N. Moret in the Laboratory of Systems Pharmacology at HMS.
This data set is based on experiments as opposed to the manually curated
drug-target relationships provided in the LINCS small molecule dataset."""
from .api import *
|
"""This module provides and API and processor to the Target Affinity Spectrum
data set compiled by N. Moret in the Laboratory of Systems Pharmacology at HMS.
This data set is based on experiments as opposed to the manually curated
drug-target relationships provided in the LINCS small molecule dataset.
Moret, N., et al. (2018). Cheminformatics tools for analyzing and designing
optimized small molecule libraries. BioRxiv, (617), 358978.
https://doi.org/10.1101/358978
"""
from .api import *
|
Add citation to TAS docs
|
Add citation to TAS docs
|
Python
|
bsd-2-clause
|
johnbachman/belpy,johnbachman/belpy,sorgerlab/belpy,sorgerlab/indra,pvtodorov/indra,pvtodorov/indra,johnbachman/indra,bgyori/indra,pvtodorov/indra,pvtodorov/indra,bgyori/indra,bgyori/indra,sorgerlab/indra,sorgerlab/indra,johnbachman/indra,johnbachman/indra,sorgerlab/belpy,sorgerlab/belpy,johnbachman/belpy
|
"""This module provides and API and processor to the Target Affinity Spectrum
data set compiled by N. Moret in the Laboratory of Systems Pharmacology at HMS.
This data set is based on experiments as opposed to the manually curated
drug-target relationships provided in the LINCS small molecule dataset."""
from .api import *
Add citation to TAS docs
|
"""This module provides and API and processor to the Target Affinity Spectrum
data set compiled by N. Moret in the Laboratory of Systems Pharmacology at HMS.
This data set is based on experiments as opposed to the manually curated
drug-target relationships provided in the LINCS small molecule dataset.
Moret, N., et al. (2018). Cheminformatics tools for analyzing and designing
optimized small molecule libraries. BioRxiv, (617), 358978.
https://doi.org/10.1101/358978
"""
from .api import *
|
<commit_before>"""This module provides and API and processor to the Target Affinity Spectrum
data set compiled by N. Moret in the Laboratory of Systems Pharmacology at HMS.
This data set is based on experiments as opposed to the manually curated
drug-target relationships provided in the LINCS small molecule dataset."""
from .api import *
<commit_msg>Add citation to TAS docs<commit_after>
|
"""This module provides and API and processor to the Target Affinity Spectrum
data set compiled by N. Moret in the Laboratory of Systems Pharmacology at HMS.
This data set is based on experiments as opposed to the manually curated
drug-target relationships provided in the LINCS small molecule dataset.
Moret, N., et al. (2018). Cheminformatics tools for analyzing and designing
optimized small molecule libraries. BioRxiv, (617), 358978.
https://doi.org/10.1101/358978
"""
from .api import *
|
"""This module provides and API and processor to the Target Affinity Spectrum
data set compiled by N. Moret in the Laboratory of Systems Pharmacology at HMS.
This data set is based on experiments as opposed to the manually curated
drug-target relationships provided in the LINCS small molecule dataset."""
from .api import *
Add citation to TAS docs"""This module provides and API and processor to the Target Affinity Spectrum
data set compiled by N. Moret in the Laboratory of Systems Pharmacology at HMS.
This data set is based on experiments as opposed to the manually curated
drug-target relationships provided in the LINCS small molecule dataset.
Moret, N., et al. (2018). Cheminformatics tools for analyzing and designing
optimized small molecule libraries. BioRxiv, (617), 358978.
https://doi.org/10.1101/358978
"""
from .api import *
|
<commit_before>"""This module provides and API and processor to the Target Affinity Spectrum
data set compiled by N. Moret in the Laboratory of Systems Pharmacology at HMS.
This data set is based on experiments as opposed to the manually curated
drug-target relationships provided in the LINCS small molecule dataset."""
from .api import *
<commit_msg>Add citation to TAS docs<commit_after>"""This module provides and API and processor to the Target Affinity Spectrum
data set compiled by N. Moret in the Laboratory of Systems Pharmacology at HMS.
This data set is based on experiments as opposed to the manually curated
drug-target relationships provided in the LINCS small molecule dataset.
Moret, N., et al. (2018). Cheminformatics tools for analyzing and designing
optimized small molecule libraries. BioRxiv, (617), 358978.
https://doi.org/10.1101/358978
"""
from .api import *
|
f77b17cca1686ea082a5a71d18dfe4ca01699b3e
|
raxcli/utils.py
|
raxcli/utils.py
|
# Copyright 2013 Rackspace
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
'get_enum_as_dict'
]
def get_enum_as_dict(cls, friendly_names=False):
"""
Convert an "enum" class to a dict key is the enum name and value is an enum
value.
"""
result = {}
for key, value in cls.__dict__.items():
if key.startswith('__'):
continue
if key[0] != key[0].upper():
continue
name = key
if friendly_names:
name = name.replace('_', ' ').lower().title()
result[name] = value
return result
|
# Copyright 2013 Rackspace
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
'get_enum_as_dict'
]
def get_enum_as_dict(cls, reverse=False, friendly_names=False):
"""
Convert an "enum" class to a dict key is the enum name and value is an enum
value.
"""
result = {}
for key, value in cls.__dict__.items():
if key.startswith('__'):
continue
if key[0] != key[0].upper():
continue
name = key
if friendly_names:
name = name.replace('_', ' ').lower().title()
if reverse:
result[value] = name
else:
result[name] = value
return result
|
Add 'reverse' argument to get_enum_as_dict.
|
Add 'reverse' argument to get_enum_as_dict.
|
Python
|
apache-2.0
|
racker/python-raxcli
|
# Copyright 2013 Rackspace
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
'get_enum_as_dict'
]
def get_enum_as_dict(cls, friendly_names=False):
"""
Convert an "enum" class to a dict key is the enum name and value is an enum
value.
"""
result = {}
for key, value in cls.__dict__.items():
if key.startswith('__'):
continue
if key[0] != key[0].upper():
continue
name = key
if friendly_names:
name = name.replace('_', ' ').lower().title()
result[name] = value
return result
Add 'reverse' argument to get_enum_as_dict.
|
# Copyright 2013 Rackspace
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
'get_enum_as_dict'
]
def get_enum_as_dict(cls, reverse=False, friendly_names=False):
"""
Convert an "enum" class to a dict key is the enum name and value is an enum
value.
"""
result = {}
for key, value in cls.__dict__.items():
if key.startswith('__'):
continue
if key[0] != key[0].upper():
continue
name = key
if friendly_names:
name = name.replace('_', ' ').lower().title()
if reverse:
result[value] = name
else:
result[name] = value
return result
|
<commit_before># Copyright 2013 Rackspace
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
'get_enum_as_dict'
]
def get_enum_as_dict(cls, friendly_names=False):
"""
Convert an "enum" class to a dict key is the enum name and value is an enum
value.
"""
result = {}
for key, value in cls.__dict__.items():
if key.startswith('__'):
continue
if key[0] != key[0].upper():
continue
name = key
if friendly_names:
name = name.replace('_', ' ').lower().title()
result[name] = value
return result
<commit_msg>Add 'reverse' argument to get_enum_as_dict.<commit_after>
|
# Copyright 2013 Rackspace
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
'get_enum_as_dict'
]
def get_enum_as_dict(cls, reverse=False, friendly_names=False):
"""
Convert an "enum" class to a dict key is the enum name and value is an enum
value.
"""
result = {}
for key, value in cls.__dict__.items():
if key.startswith('__'):
continue
if key[0] != key[0].upper():
continue
name = key
if friendly_names:
name = name.replace('_', ' ').lower().title()
if reverse:
result[value] = name
else:
result[name] = value
return result
|
# Copyright 2013 Rackspace
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
'get_enum_as_dict'
]
def get_enum_as_dict(cls, friendly_names=False):
"""
Convert an "enum" class to a dict key is the enum name and value is an enum
value.
"""
result = {}
for key, value in cls.__dict__.items():
if key.startswith('__'):
continue
if key[0] != key[0].upper():
continue
name = key
if friendly_names:
name = name.replace('_', ' ').lower().title()
result[name] = value
return result
Add 'reverse' argument to get_enum_as_dict.# Copyright 2013 Rackspace
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
'get_enum_as_dict'
]
def get_enum_as_dict(cls, reverse=False, friendly_names=False):
"""
Convert an "enum" class to a dict key is the enum name and value is an enum
value.
"""
result = {}
for key, value in cls.__dict__.items():
if key.startswith('__'):
continue
if key[0] != key[0].upper():
continue
name = key
if friendly_names:
name = name.replace('_', ' ').lower().title()
if reverse:
result[value] = name
else:
result[name] = value
return result
|
<commit_before># Copyright 2013 Rackspace
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
'get_enum_as_dict'
]
def get_enum_as_dict(cls, friendly_names=False):
"""
Convert an "enum" class to a dict key is the enum name and value is an enum
value.
"""
result = {}
for key, value in cls.__dict__.items():
if key.startswith('__'):
continue
if key[0] != key[0].upper():
continue
name = key
if friendly_names:
name = name.replace('_', ' ').lower().title()
result[name] = value
return result
<commit_msg>Add 'reverse' argument to get_enum_as_dict.<commit_after># Copyright 2013 Rackspace
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
'get_enum_as_dict'
]
def get_enum_as_dict(cls, reverse=False, friendly_names=False):
"""
Convert an "enum" class to a dict key is the enum name and value is an enum
value.
"""
result = {}
for key, value in cls.__dict__.items():
if key.startswith('__'):
continue
if key[0] != key[0].upper():
continue
name = key
if friendly_names:
name = name.replace('_', ' ').lower().title()
if reverse:
result[value] = name
else:
result[name] = value
return result
|
1ba14774b1ed483f512562ab83f91fab8b843db7
|
nazs/web/core/blocks.py
|
nazs/web/core/blocks.py
|
from django.utils.translation import ugettext as _
from achilles import blocks, tables
import nazs
register = blocks.Library('core')
@register.block(template_name='web/core/welcome.html')
def home():
return {'version': nazs.__version__}
@register.block(template_name='web/core/apply_button.html')
def apply_button():
return {'active': nazs.changed()}
@register.block('modules')
class Modules(tables.Table):
id_field = 'name'
# Module name
name = tables.Column(verbose_name=_('Module'))
# Module status
status = tables.MergeColumn(
verbose_name=_('Status'),
columns=(
('install', tables.ActionColumn(verbose_name=_('Install'),
action='core:install_module',
visible=lambda m: not m.installed)),
('enable', tables.ActionColumn(verbose_name=_('Enable'),
action='core:enable_module',
visible=lambda m: m.installed and
not m.enabled)),
('disable', tables.ActionColumn(verbose_name=_('Disable'),
action='core:disable_module',
visible=lambda m: m.installed and
m.enabled)),
)
)
def objects(self):
return nazs.modules()
def get_object(self, name):
for module in nazs.modules():
if module.name == name:
return module
raise KeyError('Module %s not found' % name)
|
from django.utils.translation import ugettext as _
from achilles import blocks, tables
import nazs
register = blocks.Library('core')
@register.block(template_name='web/core/welcome.html')
def home():
return {'version': nazs.__version__}
@register.block(template_name='web/core/apply_button.html')
def apply_button():
return {'active': nazs.changed()}
@register.block('modules')
class Modules(tables.Table):
id_field = 'name'
# Module name
name = tables.Column(verbose_name=_('Module'))
# Module status
status = tables.MergeColumn(
verbose_name=_('Status'),
columns=(
('install', tables.ActionColumn(verbose_name=_('Install'),
action='core:install_module',
classes='btn btn-sm btn-primary',
visible=lambda m: not m.installed)),
('enable', tables.ActionColumn(verbose_name=_('Enable'),
action='core:enable_module',
classes='btn btn-sm btn-success',
visible=lambda m: m.installed and
not m.enabled)),
('disable', tables.ActionColumn(verbose_name=_('Disable'),
action='core:disable_module',
classes='btn btn-sm btn-danger',
visible=lambda m: m.installed and
m.enabled)),
)
)
def objects(self):
return nazs.modules()
def get_object(self, name):
for module in nazs.modules():
if module.name == name:
return module
raise KeyError('Module %s not found' % name)
|
Add proper css classes to action buttons
|
Add proper css classes to action buttons
|
Python
|
agpl-3.0
|
exekias/droplet,exekias/droplet,exekias/droplet
|
from django.utils.translation import ugettext as _
from achilles import blocks, tables
import nazs
register = blocks.Library('core')
@register.block(template_name='web/core/welcome.html')
def home():
return {'version': nazs.__version__}
@register.block(template_name='web/core/apply_button.html')
def apply_button():
return {'active': nazs.changed()}
@register.block('modules')
class Modules(tables.Table):
id_field = 'name'
# Module name
name = tables.Column(verbose_name=_('Module'))
# Module status
status = tables.MergeColumn(
verbose_name=_('Status'),
columns=(
('install', tables.ActionColumn(verbose_name=_('Install'),
action='core:install_module',
visible=lambda m: not m.installed)),
('enable', tables.ActionColumn(verbose_name=_('Enable'),
action='core:enable_module',
visible=lambda m: m.installed and
not m.enabled)),
('disable', tables.ActionColumn(verbose_name=_('Disable'),
action='core:disable_module',
visible=lambda m: m.installed and
m.enabled)),
)
)
def objects(self):
return nazs.modules()
def get_object(self, name):
for module in nazs.modules():
if module.name == name:
return module
raise KeyError('Module %s not found' % name)
Add proper css classes to action buttons
|
from django.utils.translation import ugettext as _
from achilles import blocks, tables
import nazs
register = blocks.Library('core')
@register.block(template_name='web/core/welcome.html')
def home():
return {'version': nazs.__version__}
@register.block(template_name='web/core/apply_button.html')
def apply_button():
return {'active': nazs.changed()}
@register.block('modules')
class Modules(tables.Table):
id_field = 'name'
# Module name
name = tables.Column(verbose_name=_('Module'))
# Module status
status = tables.MergeColumn(
verbose_name=_('Status'),
columns=(
('install', tables.ActionColumn(verbose_name=_('Install'),
action='core:install_module',
classes='btn btn-sm btn-primary',
visible=lambda m: not m.installed)),
('enable', tables.ActionColumn(verbose_name=_('Enable'),
action='core:enable_module',
classes='btn btn-sm btn-success',
visible=lambda m: m.installed and
not m.enabled)),
('disable', tables.ActionColumn(verbose_name=_('Disable'),
action='core:disable_module',
classes='btn btn-sm btn-danger',
visible=lambda m: m.installed and
m.enabled)),
)
)
def objects(self):
return nazs.modules()
def get_object(self, name):
for module in nazs.modules():
if module.name == name:
return module
raise KeyError('Module %s not found' % name)
|
<commit_before>from django.utils.translation import ugettext as _
from achilles import blocks, tables
import nazs
register = blocks.Library('core')
@register.block(template_name='web/core/welcome.html')
def home():
return {'version': nazs.__version__}
@register.block(template_name='web/core/apply_button.html')
def apply_button():
return {'active': nazs.changed()}
@register.block('modules')
class Modules(tables.Table):
id_field = 'name'
# Module name
name = tables.Column(verbose_name=_('Module'))
# Module status
status = tables.MergeColumn(
verbose_name=_('Status'),
columns=(
('install', tables.ActionColumn(verbose_name=_('Install'),
action='core:install_module',
visible=lambda m: not m.installed)),
('enable', tables.ActionColumn(verbose_name=_('Enable'),
action='core:enable_module',
visible=lambda m: m.installed and
not m.enabled)),
('disable', tables.ActionColumn(verbose_name=_('Disable'),
action='core:disable_module',
visible=lambda m: m.installed and
m.enabled)),
)
)
def objects(self):
return nazs.modules()
def get_object(self, name):
for module in nazs.modules():
if module.name == name:
return module
raise KeyError('Module %s not found' % name)
<commit_msg>Add proper css classes to action buttons<commit_after>
|
from django.utils.translation import ugettext as _
from achilles import blocks, tables
import nazs
register = blocks.Library('core')
@register.block(template_name='web/core/welcome.html')
def home():
return {'version': nazs.__version__}
@register.block(template_name='web/core/apply_button.html')
def apply_button():
return {'active': nazs.changed()}
@register.block('modules')
class Modules(tables.Table):
id_field = 'name'
# Module name
name = tables.Column(verbose_name=_('Module'))
# Module status
status = tables.MergeColumn(
verbose_name=_('Status'),
columns=(
('install', tables.ActionColumn(verbose_name=_('Install'),
action='core:install_module',
classes='btn btn-sm btn-primary',
visible=lambda m: not m.installed)),
('enable', tables.ActionColumn(verbose_name=_('Enable'),
action='core:enable_module',
classes='btn btn-sm btn-success',
visible=lambda m: m.installed and
not m.enabled)),
('disable', tables.ActionColumn(verbose_name=_('Disable'),
action='core:disable_module',
classes='btn btn-sm btn-danger',
visible=lambda m: m.installed and
m.enabled)),
)
)
def objects(self):
return nazs.modules()
def get_object(self, name):
for module in nazs.modules():
if module.name == name:
return module
raise KeyError('Module %s not found' % name)
|
from django.utils.translation import ugettext as _
from achilles import blocks, tables
import nazs
register = blocks.Library('core')
@register.block(template_name='web/core/welcome.html')
def home():
return {'version': nazs.__version__}
@register.block(template_name='web/core/apply_button.html')
def apply_button():
return {'active': nazs.changed()}
@register.block('modules')
class Modules(tables.Table):
id_field = 'name'
# Module name
name = tables.Column(verbose_name=_('Module'))
# Module status
status = tables.MergeColumn(
verbose_name=_('Status'),
columns=(
('install', tables.ActionColumn(verbose_name=_('Install'),
action='core:install_module',
visible=lambda m: not m.installed)),
('enable', tables.ActionColumn(verbose_name=_('Enable'),
action='core:enable_module',
visible=lambda m: m.installed and
not m.enabled)),
('disable', tables.ActionColumn(verbose_name=_('Disable'),
action='core:disable_module',
visible=lambda m: m.installed and
m.enabled)),
)
)
def objects(self):
return nazs.modules()
def get_object(self, name):
for module in nazs.modules():
if module.name == name:
return module
raise KeyError('Module %s not found' % name)
Add proper css classes to action buttonsfrom django.utils.translation import ugettext as _
from achilles import blocks, tables
import nazs
register = blocks.Library('core')
@register.block(template_name='web/core/welcome.html')
def home():
return {'version': nazs.__version__}
@register.block(template_name='web/core/apply_button.html')
def apply_button():
return {'active': nazs.changed()}
@register.block('modules')
class Modules(tables.Table):
id_field = 'name'
# Module name
name = tables.Column(verbose_name=_('Module'))
# Module status
status = tables.MergeColumn(
verbose_name=_('Status'),
columns=(
('install', tables.ActionColumn(verbose_name=_('Install'),
action='core:install_module',
classes='btn btn-sm btn-primary',
visible=lambda m: not m.installed)),
('enable', tables.ActionColumn(verbose_name=_('Enable'),
action='core:enable_module',
classes='btn btn-sm btn-success',
visible=lambda m: m.installed and
not m.enabled)),
('disable', tables.ActionColumn(verbose_name=_('Disable'),
action='core:disable_module',
classes='btn btn-sm btn-danger',
visible=lambda m: m.installed and
m.enabled)),
)
)
def objects(self):
return nazs.modules()
def get_object(self, name):
for module in nazs.modules():
if module.name == name:
return module
raise KeyError('Module %s not found' % name)
|
<commit_before>from django.utils.translation import ugettext as _
from achilles import blocks, tables
import nazs
register = blocks.Library('core')
@register.block(template_name='web/core/welcome.html')
def home():
return {'version': nazs.__version__}
@register.block(template_name='web/core/apply_button.html')
def apply_button():
return {'active': nazs.changed()}
@register.block('modules')
class Modules(tables.Table):
id_field = 'name'
# Module name
name = tables.Column(verbose_name=_('Module'))
# Module status
status = tables.MergeColumn(
verbose_name=_('Status'),
columns=(
('install', tables.ActionColumn(verbose_name=_('Install'),
action='core:install_module',
visible=lambda m: not m.installed)),
('enable', tables.ActionColumn(verbose_name=_('Enable'),
action='core:enable_module',
visible=lambda m: m.installed and
not m.enabled)),
('disable', tables.ActionColumn(verbose_name=_('Disable'),
action='core:disable_module',
visible=lambda m: m.installed and
m.enabled)),
)
)
def objects(self):
return nazs.modules()
def get_object(self, name):
for module in nazs.modules():
if module.name == name:
return module
raise KeyError('Module %s not found' % name)
<commit_msg>Add proper css classes to action buttons<commit_after>from django.utils.translation import ugettext as _
from achilles import blocks, tables
import nazs
register = blocks.Library('core')
@register.block(template_name='web/core/welcome.html')
def home():
return {'version': nazs.__version__}
@register.block(template_name='web/core/apply_button.html')
def apply_button():
return {'active': nazs.changed()}
@register.block('modules')
class Modules(tables.Table):
id_field = 'name'
# Module name
name = tables.Column(verbose_name=_('Module'))
# Module status
status = tables.MergeColumn(
verbose_name=_('Status'),
columns=(
('install', tables.ActionColumn(verbose_name=_('Install'),
action='core:install_module',
classes='btn btn-sm btn-primary',
visible=lambda m: not m.installed)),
('enable', tables.ActionColumn(verbose_name=_('Enable'),
action='core:enable_module',
classes='btn btn-sm btn-success',
visible=lambda m: m.installed and
not m.enabled)),
('disable', tables.ActionColumn(verbose_name=_('Disable'),
action='core:disable_module',
classes='btn btn-sm btn-danger',
visible=lambda m: m.installed and
m.enabled)),
)
)
def objects(self):
return nazs.modules()
def get_object(self, name):
for module in nazs.modules():
if module.name == name:
return module
raise KeyError('Module %s not found' % name)
|
fc9339307e2e9ef97c59d4512e4de5ff5a43bca0
|
falcon_hateoas/middleware.py
|
falcon_hateoas/middleware.py
|
import json
import decimal
import sqlalchemy
class AlchemyJSONEncoder(json.JSONEncoder):
def default(self, o):
# if isinstance(getattr(o, 'metadata'), sqlalchemy.schema.MetaData):
if issubclass(o.__class__,
sqlalchemy.ext.declarative.DeclarativeMeta.__class__):
d = {}
for col in o.__table__.columns.keys():
if hasattr(getattr(o, col), 'isoformat'):
d[col] = getattr(o, col).isoformat()
elif isinstance(getattr(o, col), decimal.Decimal):
d[col] = float(getattr(o, col))
else:
d[col] = getattr(o, col)
return d
else:
return super(AlchemyJSONEncoder, self).default(o)
class JsonMiddleware:
def process_response(self, req, resp, resource):
resp.set_header('Content-Type', 'application/json; charset=utf-8')
resp.body = json.dumps(resp.body, cls=AlchemyJSONEncoder)
|
import json
import decimal
import sqlalchemy
class AlchemyJSONEncoder(json.JSONEncoder):
def default(self, o):
# if isinstance(getattr(o, 'metadata'), sqlalchemy.schema.MetaData):
if issubclass(o.__class__,
sqlalchemy.ext.declarative.DeclarativeMeta):
d = {}
for col in o.__table__.columns.keys():
if hasattr(getattr(o, col), 'isoformat'):
d[col] = getattr(o, col).isoformat()
elif isinstance(getattr(o, col), decimal.Decimal):
d[col] = float(getattr(o, col))
else:
d[col] = getattr(o, col)
return d
else:
return super(AlchemyJSONEncoder, self).default(o)
class JsonMiddleware:
def process_response(self, req, resp, resource):
resp.set_header('Content-Type', 'application/json; charset=utf-8')
resp.body = json.dumps(resp.body, cls=AlchemyJSONEncoder)
|
Fix comparing with __class__ of class
|
Fix comparing with __class__ of class
Signed-off-by: Michal Juranyi <29976087921aeab920eafb9b583221faa738f3f4@vnet.eu>
|
Python
|
mit
|
Vnet-as/falcon-hateoas
|
import json
import decimal
import sqlalchemy
class AlchemyJSONEncoder(json.JSONEncoder):
def default(self, o):
# if isinstance(getattr(o, 'metadata'), sqlalchemy.schema.MetaData):
if issubclass(o.__class__,
sqlalchemy.ext.declarative.DeclarativeMeta.__class__):
d = {}
for col in o.__table__.columns.keys():
if hasattr(getattr(o, col), 'isoformat'):
d[col] = getattr(o, col).isoformat()
elif isinstance(getattr(o, col), decimal.Decimal):
d[col] = float(getattr(o, col))
else:
d[col] = getattr(o, col)
return d
else:
return super(AlchemyJSONEncoder, self).default(o)
class JsonMiddleware:
def process_response(self, req, resp, resource):
resp.set_header('Content-Type', 'application/json; charset=utf-8')
resp.body = json.dumps(resp.body, cls=AlchemyJSONEncoder)
Fix comparing with __class__ of class
Signed-off-by: Michal Juranyi <29976087921aeab920eafb9b583221faa738f3f4@vnet.eu>
|
import json
import decimal
import sqlalchemy
class AlchemyJSONEncoder(json.JSONEncoder):
def default(self, o):
# if isinstance(getattr(o, 'metadata'), sqlalchemy.schema.MetaData):
if issubclass(o.__class__,
sqlalchemy.ext.declarative.DeclarativeMeta):
d = {}
for col in o.__table__.columns.keys():
if hasattr(getattr(o, col), 'isoformat'):
d[col] = getattr(o, col).isoformat()
elif isinstance(getattr(o, col), decimal.Decimal):
d[col] = float(getattr(o, col))
else:
d[col] = getattr(o, col)
return d
else:
return super(AlchemyJSONEncoder, self).default(o)
class JsonMiddleware:
def process_response(self, req, resp, resource):
resp.set_header('Content-Type', 'application/json; charset=utf-8')
resp.body = json.dumps(resp.body, cls=AlchemyJSONEncoder)
|
<commit_before>import json
import decimal
import sqlalchemy
class AlchemyJSONEncoder(json.JSONEncoder):
def default(self, o):
# if isinstance(getattr(o, 'metadata'), sqlalchemy.schema.MetaData):
if issubclass(o.__class__,
sqlalchemy.ext.declarative.DeclarativeMeta.__class__):
d = {}
for col in o.__table__.columns.keys():
if hasattr(getattr(o, col), 'isoformat'):
d[col] = getattr(o, col).isoformat()
elif isinstance(getattr(o, col), decimal.Decimal):
d[col] = float(getattr(o, col))
else:
d[col] = getattr(o, col)
return d
else:
return super(AlchemyJSONEncoder, self).default(o)
class JsonMiddleware:
def process_response(self, req, resp, resource):
resp.set_header('Content-Type', 'application/json; charset=utf-8')
resp.body = json.dumps(resp.body, cls=AlchemyJSONEncoder)
<commit_msg>Fix comparing with __class__ of class
Signed-off-by: Michal Juranyi <29976087921aeab920eafb9b583221faa738f3f4@vnet.eu><commit_after>
|
import json
import decimal
import sqlalchemy
class AlchemyJSONEncoder(json.JSONEncoder):
def default(self, o):
# if isinstance(getattr(o, 'metadata'), sqlalchemy.schema.MetaData):
if issubclass(o.__class__,
sqlalchemy.ext.declarative.DeclarativeMeta):
d = {}
for col in o.__table__.columns.keys():
if hasattr(getattr(o, col), 'isoformat'):
d[col] = getattr(o, col).isoformat()
elif isinstance(getattr(o, col), decimal.Decimal):
d[col] = float(getattr(o, col))
else:
d[col] = getattr(o, col)
return d
else:
return super(AlchemyJSONEncoder, self).default(o)
class JsonMiddleware:
def process_response(self, req, resp, resource):
resp.set_header('Content-Type', 'application/json; charset=utf-8')
resp.body = json.dumps(resp.body, cls=AlchemyJSONEncoder)
|
import json
import decimal
import sqlalchemy
class AlchemyJSONEncoder(json.JSONEncoder):
def default(self, o):
# if isinstance(getattr(o, 'metadata'), sqlalchemy.schema.MetaData):
if issubclass(o.__class__,
sqlalchemy.ext.declarative.DeclarativeMeta.__class__):
d = {}
for col in o.__table__.columns.keys():
if hasattr(getattr(o, col), 'isoformat'):
d[col] = getattr(o, col).isoformat()
elif isinstance(getattr(o, col), decimal.Decimal):
d[col] = float(getattr(o, col))
else:
d[col] = getattr(o, col)
return d
else:
return super(AlchemyJSONEncoder, self).default(o)
class JsonMiddleware:
def process_response(self, req, resp, resource):
resp.set_header('Content-Type', 'application/json; charset=utf-8')
resp.body = json.dumps(resp.body, cls=AlchemyJSONEncoder)
Fix comparing with __class__ of class
Signed-off-by: Michal Juranyi <29976087921aeab920eafb9b583221faa738f3f4@vnet.eu>import json
import decimal
import sqlalchemy
class AlchemyJSONEncoder(json.JSONEncoder):
def default(self, o):
# if isinstance(getattr(o, 'metadata'), sqlalchemy.schema.MetaData):
if issubclass(o.__class__,
sqlalchemy.ext.declarative.DeclarativeMeta):
d = {}
for col in o.__table__.columns.keys():
if hasattr(getattr(o, col), 'isoformat'):
d[col] = getattr(o, col).isoformat()
elif isinstance(getattr(o, col), decimal.Decimal):
d[col] = float(getattr(o, col))
else:
d[col] = getattr(o, col)
return d
else:
return super(AlchemyJSONEncoder, self).default(o)
class JsonMiddleware:
def process_response(self, req, resp, resource):
resp.set_header('Content-Type', 'application/json; charset=utf-8')
resp.body = json.dumps(resp.body, cls=AlchemyJSONEncoder)
|
<commit_before>import json
import decimal
import sqlalchemy
class AlchemyJSONEncoder(json.JSONEncoder):
def default(self, o):
# if isinstance(getattr(o, 'metadata'), sqlalchemy.schema.MetaData):
if issubclass(o.__class__,
sqlalchemy.ext.declarative.DeclarativeMeta.__class__):
d = {}
for col in o.__table__.columns.keys():
if hasattr(getattr(o, col), 'isoformat'):
d[col] = getattr(o, col).isoformat()
elif isinstance(getattr(o, col), decimal.Decimal):
d[col] = float(getattr(o, col))
else:
d[col] = getattr(o, col)
return d
else:
return super(AlchemyJSONEncoder, self).default(o)
class JsonMiddleware:
def process_response(self, req, resp, resource):
resp.set_header('Content-Type', 'application/json; charset=utf-8')
resp.body = json.dumps(resp.body, cls=AlchemyJSONEncoder)
<commit_msg>Fix comparing with __class__ of class
Signed-off-by: Michal Juranyi <29976087921aeab920eafb9b583221faa738f3f4@vnet.eu><commit_after>import json
import decimal
import sqlalchemy
class AlchemyJSONEncoder(json.JSONEncoder):
def default(self, o):
# if isinstance(getattr(o, 'metadata'), sqlalchemy.schema.MetaData):
if issubclass(o.__class__,
sqlalchemy.ext.declarative.DeclarativeMeta):
d = {}
for col in o.__table__.columns.keys():
if hasattr(getattr(o, col), 'isoformat'):
d[col] = getattr(o, col).isoformat()
elif isinstance(getattr(o, col), decimal.Decimal):
d[col] = float(getattr(o, col))
else:
d[col] = getattr(o, col)
return d
else:
return super(AlchemyJSONEncoder, self).default(o)
class JsonMiddleware:
def process_response(self, req, resp, resource):
resp.set_header('Content-Type', 'application/json; charset=utf-8')
resp.body = json.dumps(resp.body, cls=AlchemyJSONEncoder)
|
c0c73dd73f13e8d1d677cc2d7cad5c2f63217751
|
python/tests/test_rmm.py
|
python/tests/test_rmm.py
|
import pytest
import functools
from itertools import product
import numpy as np
from numba import cuda
from libgdf_cffi import libgdf
from librmm_cffi import ffi, librmm
from .utils import new_column, unwrap_devary, get_dtype, gen_rand, fix_zeros
from .utils import buffer_as_bits
_dtypes = [np.int32]
_nelems = [128]
@pytest.fixture(scope="module")
def rmm():
print("initialize librmm")
assert librmm.initialize() == librmm.RMM_SUCCESS
yield librmm
print("finalize librmm")
assert librmm.finalize() == librmm.RMM_SUCCESS
@pytest.mark.parametrize('dtype,nelem', list(product(_dtypes, _nelems)))
def test_rmm_alloc(dtype, nelem, rmm):
expect_fn = np.add
test_fn = libgdf.gdf_add_generic
#import cffi
#ffi = cffi.FFI()
# data
h_in = gen_rand(dtype, nelem)
h_result = gen_rand(dtype, nelem)
d_in = rmm.to_device(h_in)
d_result = rmm.device_array_like(d_in)
d_result.copy_to_device(d_in)
h_result = d_result.copy_to_host()
print('expect')
print(h_in)
print('got')
print(h_result)
np.testing.assert_array_equal(h_result, h_in)
assert rmm.free_device_array_memory(d_in) == rmm.RMM_SUCCESS
assert rmm.free_device_array_memory(d_result) == rmm.RMM_SUCCESS
|
import pytest
import functools
from itertools import product
import numpy as np
from numba import cuda
from librmm_cffi import librmm as rmm
from .utils import gen_rand
_dtypes = [np.int32]
_nelems = [1, 2, 7, 8, 9, 32, 128]
@pytest.mark.parametrize('dtype,nelem', list(product(_dtypes, _nelems)))
def test_rmm_alloc(dtype, nelem):
# data
h_in = gen_rand(dtype, nelem)
h_result = gen_rand(dtype, nelem)
d_in = rmm.to_device(h_in)
d_result = rmm.device_array_like(d_in)
d_result.copy_to_device(d_in)
h_result = d_result.copy_to_host()
print('expect')
print(h_in)
print('got')
print(h_result)
np.testing.assert_array_equal(h_result, h_in)
|
Improve librmm python API and convert all pytests to use RMM to create device_arrays.
|
Improve librmm python API and convert all pytests to use RMM to create device_arrays.
|
Python
|
apache-2.0
|
gpuopenanalytics/libgdf,gpuopenanalytics/libgdf,gpuopenanalytics/libgdf,gpuopenanalytics/libgdf
|
import pytest
import functools
from itertools import product
import numpy as np
from numba import cuda
from libgdf_cffi import libgdf
from librmm_cffi import ffi, librmm
from .utils import new_column, unwrap_devary, get_dtype, gen_rand, fix_zeros
from .utils import buffer_as_bits
_dtypes = [np.int32]
_nelems = [128]
@pytest.fixture(scope="module")
def rmm():
print("initialize librmm")
assert librmm.initialize() == librmm.RMM_SUCCESS
yield librmm
print("finalize librmm")
assert librmm.finalize() == librmm.RMM_SUCCESS
@pytest.mark.parametrize('dtype,nelem', list(product(_dtypes, _nelems)))
def test_rmm_alloc(dtype, nelem, rmm):
expect_fn = np.add
test_fn = libgdf.gdf_add_generic
#import cffi
#ffi = cffi.FFI()
# data
h_in = gen_rand(dtype, nelem)
h_result = gen_rand(dtype, nelem)
d_in = rmm.to_device(h_in)
d_result = rmm.device_array_like(d_in)
d_result.copy_to_device(d_in)
h_result = d_result.copy_to_host()
print('expect')
print(h_in)
print('got')
print(h_result)
np.testing.assert_array_equal(h_result, h_in)
assert rmm.free_device_array_memory(d_in) == rmm.RMM_SUCCESS
assert rmm.free_device_array_memory(d_result) == rmm.RMM_SUCCESS
Improve librmm python API and convert all pytests to use RMM to create device_arrays.
|
import pytest
import functools
from itertools import product
import numpy as np
from numba import cuda
from librmm_cffi import librmm as rmm
from .utils import gen_rand
_dtypes = [np.int32]
_nelems = [1, 2, 7, 8, 9, 32, 128]
@pytest.mark.parametrize('dtype,nelem', list(product(_dtypes, _nelems)))
def test_rmm_alloc(dtype, nelem):
# data
h_in = gen_rand(dtype, nelem)
h_result = gen_rand(dtype, nelem)
d_in = rmm.to_device(h_in)
d_result = rmm.device_array_like(d_in)
d_result.copy_to_device(d_in)
h_result = d_result.copy_to_host()
print('expect')
print(h_in)
print('got')
print(h_result)
np.testing.assert_array_equal(h_result, h_in)
|
<commit_before>import pytest
import functools
from itertools import product
import numpy as np
from numba import cuda
from libgdf_cffi import libgdf
from librmm_cffi import ffi, librmm
from .utils import new_column, unwrap_devary, get_dtype, gen_rand, fix_zeros
from .utils import buffer_as_bits
_dtypes = [np.int32]
_nelems = [128]
@pytest.fixture(scope="module")
def rmm():
print("initialize librmm")
assert librmm.initialize() == librmm.RMM_SUCCESS
yield librmm
print("finalize librmm")
assert librmm.finalize() == librmm.RMM_SUCCESS
@pytest.mark.parametrize('dtype,nelem', list(product(_dtypes, _nelems)))
def test_rmm_alloc(dtype, nelem, rmm):
expect_fn = np.add
test_fn = libgdf.gdf_add_generic
#import cffi
#ffi = cffi.FFI()
# data
h_in = gen_rand(dtype, nelem)
h_result = gen_rand(dtype, nelem)
d_in = rmm.to_device(h_in)
d_result = rmm.device_array_like(d_in)
d_result.copy_to_device(d_in)
h_result = d_result.copy_to_host()
print('expect')
print(h_in)
print('got')
print(h_result)
np.testing.assert_array_equal(h_result, h_in)
assert rmm.free_device_array_memory(d_in) == rmm.RMM_SUCCESS
assert rmm.free_device_array_memory(d_result) == rmm.RMM_SUCCESS
<commit_msg>Improve librmm python API and convert all pytests to use RMM to create device_arrays.<commit_after>
|
import pytest
import functools
from itertools import product
import numpy as np
from numba import cuda
from librmm_cffi import librmm as rmm
from .utils import gen_rand
_dtypes = [np.int32]
_nelems = [1, 2, 7, 8, 9, 32, 128]
@pytest.mark.parametrize('dtype,nelem', list(product(_dtypes, _nelems)))
def test_rmm_alloc(dtype, nelem):
# data
h_in = gen_rand(dtype, nelem)
h_result = gen_rand(dtype, nelem)
d_in = rmm.to_device(h_in)
d_result = rmm.device_array_like(d_in)
d_result.copy_to_device(d_in)
h_result = d_result.copy_to_host()
print('expect')
print(h_in)
print('got')
print(h_result)
np.testing.assert_array_equal(h_result, h_in)
|
import pytest
import functools
from itertools import product
import numpy as np
from numba import cuda
from libgdf_cffi import libgdf
from librmm_cffi import ffi, librmm
from .utils import new_column, unwrap_devary, get_dtype, gen_rand, fix_zeros
from .utils import buffer_as_bits
_dtypes = [np.int32]
_nelems = [128]
@pytest.fixture(scope="module")
def rmm():
print("initialize librmm")
assert librmm.initialize() == librmm.RMM_SUCCESS
yield librmm
print("finalize librmm")
assert librmm.finalize() == librmm.RMM_SUCCESS
@pytest.mark.parametrize('dtype,nelem', list(product(_dtypes, _nelems)))
def test_rmm_alloc(dtype, nelem, rmm):
expect_fn = np.add
test_fn = libgdf.gdf_add_generic
#import cffi
#ffi = cffi.FFI()
# data
h_in = gen_rand(dtype, nelem)
h_result = gen_rand(dtype, nelem)
d_in = rmm.to_device(h_in)
d_result = rmm.device_array_like(d_in)
d_result.copy_to_device(d_in)
h_result = d_result.copy_to_host()
print('expect')
print(h_in)
print('got')
print(h_result)
np.testing.assert_array_equal(h_result, h_in)
assert rmm.free_device_array_memory(d_in) == rmm.RMM_SUCCESS
assert rmm.free_device_array_memory(d_result) == rmm.RMM_SUCCESS
Improve librmm python API and convert all pytests to use RMM to create device_arrays.import pytest
import functools
from itertools import product
import numpy as np
from numba import cuda
from librmm_cffi import librmm as rmm
from .utils import gen_rand
_dtypes = [np.int32]
_nelems = [1, 2, 7, 8, 9, 32, 128]
@pytest.mark.parametrize('dtype,nelem', list(product(_dtypes, _nelems)))
def test_rmm_alloc(dtype, nelem):
# data
h_in = gen_rand(dtype, nelem)
h_result = gen_rand(dtype, nelem)
d_in = rmm.to_device(h_in)
d_result = rmm.device_array_like(d_in)
d_result.copy_to_device(d_in)
h_result = d_result.copy_to_host()
print('expect')
print(h_in)
print('got')
print(h_result)
np.testing.assert_array_equal(h_result, h_in)
|
<commit_before>import pytest
import functools
from itertools import product
import numpy as np
from numba import cuda
from libgdf_cffi import libgdf
from librmm_cffi import ffi, librmm
from .utils import new_column, unwrap_devary, get_dtype, gen_rand, fix_zeros
from .utils import buffer_as_bits
_dtypes = [np.int32]
_nelems = [128]
@pytest.fixture(scope="module")
def rmm():
print("initialize librmm")
assert librmm.initialize() == librmm.RMM_SUCCESS
yield librmm
print("finalize librmm")
assert librmm.finalize() == librmm.RMM_SUCCESS
@pytest.mark.parametrize('dtype,nelem', list(product(_dtypes, _nelems)))
def test_rmm_alloc(dtype, nelem, rmm):
expect_fn = np.add
test_fn = libgdf.gdf_add_generic
#import cffi
#ffi = cffi.FFI()
# data
h_in = gen_rand(dtype, nelem)
h_result = gen_rand(dtype, nelem)
d_in = rmm.to_device(h_in)
d_result = rmm.device_array_like(d_in)
d_result.copy_to_device(d_in)
h_result = d_result.copy_to_host()
print('expect')
print(h_in)
print('got')
print(h_result)
np.testing.assert_array_equal(h_result, h_in)
assert rmm.free_device_array_memory(d_in) == rmm.RMM_SUCCESS
assert rmm.free_device_array_memory(d_result) == rmm.RMM_SUCCESS
<commit_msg>Improve librmm python API and convert all pytests to use RMM to create device_arrays.<commit_after>import pytest
import functools
from itertools import product
import numpy as np
from numba import cuda
from librmm_cffi import librmm as rmm
from .utils import gen_rand
_dtypes = [np.int32]
_nelems = [1, 2, 7, 8, 9, 32, 128]
@pytest.mark.parametrize('dtype,nelem', list(product(_dtypes, _nelems)))
def test_rmm_alloc(dtype, nelem):
# data
h_in = gen_rand(dtype, nelem)
h_result = gen_rand(dtype, nelem)
d_in = rmm.to_device(h_in)
d_result = rmm.device_array_like(d_in)
d_result.copy_to_device(d_in)
h_result = d_result.copy_to_host()
print('expect')
print(h_in)
print('got')
print(h_result)
np.testing.assert_array_equal(h_result, h_in)
|
46aecccbf543619342dca65a777292ccacbed970
|
src/map_data.py
|
src/map_data.py
|
from mpl_toolkits.basemap import Basemap
import matplotlib.pyplot as plt
import numpy as np
from config import ul, lr
from get_data import get_dataset
def draw_map(file, map=None, show=True):
'''Use Matplotlib's basemap to generate a map of a given BIOCLIM data
file.
You can supply a Basemap object (in any projection) as the optional
keyword argument "map." If none is provided, the default Miller
projection will be used.'''
data = get_dataset(file)
lats = np.linspace(ul[0], lr[0], data.RasterYSize, endpoint=False)
lons = np.linspace(ul[1], lr[1], data.RasterXSize, endpoint=False)
values = data.ReadAsArray()
values = np.ma.masked_where(values==-9999, values)
plt.figure()
plt.title(file)
if map is None: map = Basemap(projection='mill',lon_0=0)
map.drawcoastlines(linewidth=1)
map.drawcountries(linewidth=1)
map.drawstates(linewidth=0.5)
x, y = np.meshgrid(lons, lats)
data = np.zeros(x.shape)
map.pcolormesh(x, y, data=values, latlon=True, cmap=plt.cm.OrRd)
cbar = plt.colorbar()
if show: plt.show()
|
from mpl_toolkits.basemap import Basemap
import matplotlib.pyplot as plt
import numpy as np
from config import ul, lr
from read_headers import variable_names
from get_data import get_dataset
def draw_map(file, map=None, show=True, title=None):
'''Use Matplotlib's basemap to generate a map of a given BIOCLIM data
file.
You can supply a Basemap object (in any projection) as the optional
keyword argument "map." If none is provided, the default Miller
projection will be used.'''
data = get_dataset(file)
lats = np.linspace(ul[0], lr[0], data.RasterYSize, endpoint=False)
lons = np.linspace(ul[1], lr[1], data.RasterXSize, endpoint=False)
values = data.ReadAsArray()
values = np.ma.masked_where(values==-9999, values)
plt.figure()
if title is None: title = '%s: %s' % (file, variable_names[file])
plt.title(title)
if map is None: map = Basemap(projection='mill',lon_0=0)
map.drawcoastlines(linewidth=1)
map.drawcountries(linewidth=1)
map.drawstates(linewidth=0.5)
x, y = np.meshgrid(lons, lats)
data = np.zeros(x.shape)
map.pcolormesh(x, y, data=values, latlon=True, cmap=plt.cm.OrRd)
cbar = plt.colorbar()
if show: plt.show()
|
Use human-readable variable name in map figure title.
|
Use human-readable variable name in map figure title.
|
Python
|
mit
|
bendmorris/pybioclim,bendmorris/pybioclim,xguse/pybioclim
|
from mpl_toolkits.basemap import Basemap
import matplotlib.pyplot as plt
import numpy as np
from config import ul, lr
from get_data import get_dataset
def draw_map(file, map=None, show=True):
'''Use Matplotlib's basemap to generate a map of a given BIOCLIM data
file.
You can supply a Basemap object (in any projection) as the optional
keyword argument "map." If none is provided, the default Miller
projection will be used.'''
data = get_dataset(file)
lats = np.linspace(ul[0], lr[0], data.RasterYSize, endpoint=False)
lons = np.linspace(ul[1], lr[1], data.RasterXSize, endpoint=False)
values = data.ReadAsArray()
values = np.ma.masked_where(values==-9999, values)
plt.figure()
plt.title(file)
if map is None: map = Basemap(projection='mill',lon_0=0)
map.drawcoastlines(linewidth=1)
map.drawcountries(linewidth=1)
map.drawstates(linewidth=0.5)
x, y = np.meshgrid(lons, lats)
data = np.zeros(x.shape)
map.pcolormesh(x, y, data=values, latlon=True, cmap=plt.cm.OrRd)
cbar = plt.colorbar()
if show: plt.show()Use human-readable variable name in map figure title.
|
from mpl_toolkits.basemap import Basemap
import matplotlib.pyplot as plt
import numpy as np
from config import ul, lr
from read_headers import variable_names
from get_data import get_dataset
def draw_map(file, map=None, show=True, title=None):
'''Use Matplotlib's basemap to generate a map of a given BIOCLIM data
file.
You can supply a Basemap object (in any projection) as the optional
keyword argument "map." If none is provided, the default Miller
projection will be used.'''
data = get_dataset(file)
lats = np.linspace(ul[0], lr[0], data.RasterYSize, endpoint=False)
lons = np.linspace(ul[1], lr[1], data.RasterXSize, endpoint=False)
values = data.ReadAsArray()
values = np.ma.masked_where(values==-9999, values)
plt.figure()
if title is None: title = '%s: %s' % (file, variable_names[file])
plt.title(title)
if map is None: map = Basemap(projection='mill',lon_0=0)
map.drawcoastlines(linewidth=1)
map.drawcountries(linewidth=1)
map.drawstates(linewidth=0.5)
x, y = np.meshgrid(lons, lats)
data = np.zeros(x.shape)
map.pcolormesh(x, y, data=values, latlon=True, cmap=plt.cm.OrRd)
cbar = plt.colorbar()
if show: plt.show()
|
<commit_before>from mpl_toolkits.basemap import Basemap
import matplotlib.pyplot as plt
import numpy as np
from config import ul, lr
from get_data import get_dataset
def draw_map(file, map=None, show=True):
'''Use Matplotlib's basemap to generate a map of a given BIOCLIM data
file.
You can supply a Basemap object (in any projection) as the optional
keyword argument "map." If none is provided, the default Miller
projection will be used.'''
data = get_dataset(file)
lats = np.linspace(ul[0], lr[0], data.RasterYSize, endpoint=False)
lons = np.linspace(ul[1], lr[1], data.RasterXSize, endpoint=False)
values = data.ReadAsArray()
values = np.ma.masked_where(values==-9999, values)
plt.figure()
plt.title(file)
if map is None: map = Basemap(projection='mill',lon_0=0)
map.drawcoastlines(linewidth=1)
map.drawcountries(linewidth=1)
map.drawstates(linewidth=0.5)
x, y = np.meshgrid(lons, lats)
data = np.zeros(x.shape)
map.pcolormesh(x, y, data=values, latlon=True, cmap=plt.cm.OrRd)
cbar = plt.colorbar()
if show: plt.show()<commit_msg>Use human-readable variable name in map figure title.<commit_after>
|
from mpl_toolkits.basemap import Basemap
import matplotlib.pyplot as plt
import numpy as np
from config import ul, lr
from read_headers import variable_names
from get_data import get_dataset
def draw_map(file, map=None, show=True, title=None):
'''Use Matplotlib's basemap to generate a map of a given BIOCLIM data
file.
You can supply a Basemap object (in any projection) as the optional
keyword argument "map." If none is provided, the default Miller
projection will be used.'''
data = get_dataset(file)
lats = np.linspace(ul[0], lr[0], data.RasterYSize, endpoint=False)
lons = np.linspace(ul[1], lr[1], data.RasterXSize, endpoint=False)
values = data.ReadAsArray()
values = np.ma.masked_where(values==-9999, values)
plt.figure()
if title is None: title = '%s: %s' % (file, variable_names[file])
plt.title(title)
if map is None: map = Basemap(projection='mill',lon_0=0)
map.drawcoastlines(linewidth=1)
map.drawcountries(linewidth=1)
map.drawstates(linewidth=0.5)
x, y = np.meshgrid(lons, lats)
data = np.zeros(x.shape)
map.pcolormesh(x, y, data=values, latlon=True, cmap=plt.cm.OrRd)
cbar = plt.colorbar()
if show: plt.show()
|
from mpl_toolkits.basemap import Basemap
import matplotlib.pyplot as plt
import numpy as np
from config import ul, lr
from get_data import get_dataset
def draw_map(file, map=None, show=True):
'''Use Matplotlib's basemap to generate a map of a given BIOCLIM data
file.
You can supply a Basemap object (in any projection) as the optional
keyword argument "map." If none is provided, the default Miller
projection will be used.'''
data = get_dataset(file)
lats = np.linspace(ul[0], lr[0], data.RasterYSize, endpoint=False)
lons = np.linspace(ul[1], lr[1], data.RasterXSize, endpoint=False)
values = data.ReadAsArray()
values = np.ma.masked_where(values==-9999, values)
plt.figure()
plt.title(file)
if map is None: map = Basemap(projection='mill',lon_0=0)
map.drawcoastlines(linewidth=1)
map.drawcountries(linewidth=1)
map.drawstates(linewidth=0.5)
x, y = np.meshgrid(lons, lats)
data = np.zeros(x.shape)
map.pcolormesh(x, y, data=values, latlon=True, cmap=plt.cm.OrRd)
cbar = plt.colorbar()
if show: plt.show()Use human-readable variable name in map figure title.from mpl_toolkits.basemap import Basemap
import matplotlib.pyplot as plt
import numpy as np
from config import ul, lr
from read_headers import variable_names
from get_data import get_dataset
def draw_map(file, map=None, show=True, title=None):
'''Use Matplotlib's basemap to generate a map of a given BIOCLIM data
file.
You can supply a Basemap object (in any projection) as the optional
keyword argument "map." If none is provided, the default Miller
projection will be used.'''
data = get_dataset(file)
lats = np.linspace(ul[0], lr[0], data.RasterYSize, endpoint=False)
lons = np.linspace(ul[1], lr[1], data.RasterXSize, endpoint=False)
values = data.ReadAsArray()
values = np.ma.masked_where(values==-9999, values)
plt.figure()
if title is None: title = '%s: %s' % (file, variable_names[file])
plt.title(title)
if map is None: map = Basemap(projection='mill',lon_0=0)
map.drawcoastlines(linewidth=1)
map.drawcountries(linewidth=1)
map.drawstates(linewidth=0.5)
x, y = np.meshgrid(lons, lats)
data = np.zeros(x.shape)
map.pcolormesh(x, y, data=values, latlon=True, cmap=plt.cm.OrRd)
cbar = plt.colorbar()
if show: plt.show()
|
<commit_before>from mpl_toolkits.basemap import Basemap
import matplotlib.pyplot as plt
import numpy as np
from config import ul, lr
from get_data import get_dataset
def draw_map(file, map=None, show=True):
'''Use Matplotlib's basemap to generate a map of a given BIOCLIM data
file.
You can supply a Basemap object (in any projection) as the optional
keyword argument "map." If none is provided, the default Miller
projection will be used.'''
data = get_dataset(file)
lats = np.linspace(ul[0], lr[0], data.RasterYSize, endpoint=False)
lons = np.linspace(ul[1], lr[1], data.RasterXSize, endpoint=False)
values = data.ReadAsArray()
values = np.ma.masked_where(values==-9999, values)
plt.figure()
plt.title(file)
if map is None: map = Basemap(projection='mill',lon_0=0)
map.drawcoastlines(linewidth=1)
map.drawcountries(linewidth=1)
map.drawstates(linewidth=0.5)
x, y = np.meshgrid(lons, lats)
data = np.zeros(x.shape)
map.pcolormesh(x, y, data=values, latlon=True, cmap=plt.cm.OrRd)
cbar = plt.colorbar()
if show: plt.show()<commit_msg>Use human-readable variable name in map figure title.<commit_after>from mpl_toolkits.basemap import Basemap
import matplotlib.pyplot as plt
import numpy as np
from config import ul, lr
from read_headers import variable_names
from get_data import get_dataset
def draw_map(file, map=None, show=True, title=None):
'''Use Matplotlib's basemap to generate a map of a given BIOCLIM data
file.
You can supply a Basemap object (in any projection) as the optional
keyword argument "map." If none is provided, the default Miller
projection will be used.'''
data = get_dataset(file)
lats = np.linspace(ul[0], lr[0], data.RasterYSize, endpoint=False)
lons = np.linspace(ul[1], lr[1], data.RasterXSize, endpoint=False)
values = data.ReadAsArray()
values = np.ma.masked_where(values==-9999, values)
plt.figure()
if title is None: title = '%s: %s' % (file, variable_names[file])
plt.title(title)
if map is None: map = Basemap(projection='mill',lon_0=0)
map.drawcoastlines(linewidth=1)
map.drawcountries(linewidth=1)
map.drawstates(linewidth=0.5)
x, y = np.meshgrid(lons, lats)
data = np.zeros(x.shape)
map.pcolormesh(x, y, data=values, latlon=True, cmap=plt.cm.OrRd)
cbar = plt.colorbar()
if show: plt.show()
|
1351e9ddd8416d35695a2ebb573fc4570d0efe06
|
coveralls.py
|
coveralls.py
|
#!/bin/env/python
import os
import sys
from subprocess import call
if __name__ == '__main__':
if 'TRAVIS' in os.environ:
rc = call('coveralls')
raise SystemExit(rc)
|
#!/bin/env/python
import os
from contextlib import suppress
from distutils.sysconfig import get_python_lib
from subprocess import call
if __name__ == '__main__':
# chdir to the site-packages directory so the report lists relative paths
dot_coverage_path = os.path.join(os.getcwd(), '.coverage')
os.chdir(get_python_lib())
with suppress(OSError):
os.remove('.coverage')
os.symlink(dot_coverage_path, '.coverage')
# create a report from the coverage data
if 'TRAVIS' in os.environ:
rc = call('coveralls')
raise SystemExit(rc)
else:
rc = call(['coverage', 'report'])
raise SystemExit(rc)
|
Create coverage reports with relative paths
|
Create coverage reports with relative paths
|
Python
|
bsd-2-clause
|
jayvdb/citeproc-py
|
#!/bin/env/python
import os
import sys
from subprocess import call
if __name__ == '__main__':
if 'TRAVIS' in os.environ:
rc = call('coveralls')
raise SystemExit(rc)
Create coverage reports with relative paths
|
#!/bin/env/python
import os
from contextlib import suppress
from distutils.sysconfig import get_python_lib
from subprocess import call
if __name__ == '__main__':
# chdir to the site-packages directory so the report lists relative paths
dot_coverage_path = os.path.join(os.getcwd(), '.coverage')
os.chdir(get_python_lib())
with suppress(OSError):
os.remove('.coverage')
os.symlink(dot_coverage_path, '.coverage')
# create a report from the coverage data
if 'TRAVIS' in os.environ:
rc = call('coveralls')
raise SystemExit(rc)
else:
rc = call(['coverage', 'report'])
raise SystemExit(rc)
|
<commit_before>#!/bin/env/python
import os
import sys
from subprocess import call
if __name__ == '__main__':
if 'TRAVIS' in os.environ:
rc = call('coveralls')
raise SystemExit(rc)
<commit_msg>Create coverage reports with relative paths<commit_after>
|
#!/bin/env/python
import os
from contextlib import suppress
from distutils.sysconfig import get_python_lib
from subprocess import call
if __name__ == '__main__':
# chdir to the site-packages directory so the report lists relative paths
dot_coverage_path = os.path.join(os.getcwd(), '.coverage')
os.chdir(get_python_lib())
with suppress(OSError):
os.remove('.coverage')
os.symlink(dot_coverage_path, '.coverage')
# create a report from the coverage data
if 'TRAVIS' in os.environ:
rc = call('coveralls')
raise SystemExit(rc)
else:
rc = call(['coverage', 'report'])
raise SystemExit(rc)
|
#!/bin/env/python
import os
import sys
from subprocess import call
if __name__ == '__main__':
if 'TRAVIS' in os.environ:
rc = call('coveralls')
raise SystemExit(rc)
Create coverage reports with relative paths#!/bin/env/python
import os
from contextlib import suppress
from distutils.sysconfig import get_python_lib
from subprocess import call
if __name__ == '__main__':
# chdir to the site-packages directory so the report lists relative paths
dot_coverage_path = os.path.join(os.getcwd(), '.coverage')
os.chdir(get_python_lib())
with suppress(OSError):
os.remove('.coverage')
os.symlink(dot_coverage_path, '.coverage')
# create a report from the coverage data
if 'TRAVIS' in os.environ:
rc = call('coveralls')
raise SystemExit(rc)
else:
rc = call(['coverage', 'report'])
raise SystemExit(rc)
|
<commit_before>#!/bin/env/python
import os
import sys
from subprocess import call
if __name__ == '__main__':
if 'TRAVIS' in os.environ:
rc = call('coveralls')
raise SystemExit(rc)
<commit_msg>Create coverage reports with relative paths<commit_after>#!/bin/env/python
import os
from contextlib import suppress
from distutils.sysconfig import get_python_lib
from subprocess import call
if __name__ == '__main__':
# chdir to the site-packages directory so the report lists relative paths
dot_coverage_path = os.path.join(os.getcwd(), '.coverage')
os.chdir(get_python_lib())
with suppress(OSError):
os.remove('.coverage')
os.symlink(dot_coverage_path, '.coverage')
# create a report from the coverage data
if 'TRAVIS' in os.environ:
rc = call('coveralls')
raise SystemExit(rc)
else:
rc = call(['coverage', 'report'])
raise SystemExit(rc)
|
5a2e58cd42443e74cda3612e05a2c4d82442d0ef
|
main.py
|
main.py
|
from Board import *
from Player import *
def main():
board = Board()
players = (HumanPlayer('x', board), HumanPlayer('o', board))
turnNum = 0
currentPlayer = None
while not board.endGame():
currentPlayer = players[turnNum % 2]
print "%s's turn" % currentPlayer
currentPlayer.takeTurn()
print board
turnNum += 1
if board.winner != None:
print "%ss win!" % board.winner
else:
print "It's a tie!"
if __name__ == "__main__":
main()
|
from Board import *
from Player import *
def main():
board = Board()
players = (ComputerPlayer('x', board), HumanPlayer('o', board))
turnNum = 0
currentPlayer = None
while not board.endGame():
currentPlayer = players[turnNum % 2]
print "%s's turn" % currentPlayer
currentPlayer.takeTurn()
print board
turnNum += 1
if board.winner != None:
print "%ss win!" % board.winner
else:
print "It's a tie!"
if __name__ == "__main__":
main()
|
Replace 1st human player with computer player
|
Replace 1st human player with computer player
|
Python
|
mit
|
smpcole/tic-tac-toe
|
from Board import *
from Player import *
def main():
board = Board()
players = (HumanPlayer('x', board), HumanPlayer('o', board))
turnNum = 0
currentPlayer = None
while not board.endGame():
currentPlayer = players[turnNum % 2]
print "%s's turn" % currentPlayer
currentPlayer.takeTurn()
print board
turnNum += 1
if board.winner != None:
print "%ss win!" % board.winner
else:
print "It's a tie!"
if __name__ == "__main__":
main()
Replace 1st human player with computer player
|
from Board import *
from Player import *
def main():
board = Board()
players = (ComputerPlayer('x', board), HumanPlayer('o', board))
turnNum = 0
currentPlayer = None
while not board.endGame():
currentPlayer = players[turnNum % 2]
print "%s's turn" % currentPlayer
currentPlayer.takeTurn()
print board
turnNum += 1
if board.winner != None:
print "%ss win!" % board.winner
else:
print "It's a tie!"
if __name__ == "__main__":
main()
|
<commit_before>from Board import *
from Player import *
def main():
board = Board()
players = (HumanPlayer('x', board), HumanPlayer('o', board))
turnNum = 0
currentPlayer = None
while not board.endGame():
currentPlayer = players[turnNum % 2]
print "%s's turn" % currentPlayer
currentPlayer.takeTurn()
print board
turnNum += 1
if board.winner != None:
print "%ss win!" % board.winner
else:
print "It's a tie!"
if __name__ == "__main__":
main()
<commit_msg>Replace 1st human player with computer player<commit_after>
|
from Board import *
from Player import *
def main():
board = Board()
players = (ComputerPlayer('x', board), HumanPlayer('o', board))
turnNum = 0
currentPlayer = None
while not board.endGame():
currentPlayer = players[turnNum % 2]
print "%s's turn" % currentPlayer
currentPlayer.takeTurn()
print board
turnNum += 1
if board.winner != None:
print "%ss win!" % board.winner
else:
print "It's a tie!"
if __name__ == "__main__":
main()
|
from Board import *
from Player import *
def main():
board = Board()
players = (HumanPlayer('x', board), HumanPlayer('o', board))
turnNum = 0
currentPlayer = None
while not board.endGame():
currentPlayer = players[turnNum % 2]
print "%s's turn" % currentPlayer
currentPlayer.takeTurn()
print board
turnNum += 1
if board.winner != None:
print "%ss win!" % board.winner
else:
print "It's a tie!"
if __name__ == "__main__":
main()
Replace 1st human player with computer playerfrom Board import *
from Player import *
def main():
board = Board()
players = (ComputerPlayer('x', board), HumanPlayer('o', board))
turnNum = 0
currentPlayer = None
while not board.endGame():
currentPlayer = players[turnNum % 2]
print "%s's turn" % currentPlayer
currentPlayer.takeTurn()
print board
turnNum += 1
if board.winner != None:
print "%ss win!" % board.winner
else:
print "It's a tie!"
if __name__ == "__main__":
main()
|
<commit_before>from Board import *
from Player import *
def main():
board = Board()
players = (HumanPlayer('x', board), HumanPlayer('o', board))
turnNum = 0
currentPlayer = None
while not board.endGame():
currentPlayer = players[turnNum % 2]
print "%s's turn" % currentPlayer
currentPlayer.takeTurn()
print board
turnNum += 1
if board.winner != None:
print "%ss win!" % board.winner
else:
print "It's a tie!"
if __name__ == "__main__":
main()
<commit_msg>Replace 1st human player with computer player<commit_after>from Board import *
from Player import *
def main():
board = Board()
players = (ComputerPlayer('x', board), HumanPlayer('o', board))
turnNum = 0
currentPlayer = None
while not board.endGame():
currentPlayer = players[turnNum % 2]
print "%s's turn" % currentPlayer
currentPlayer.takeTurn()
print board
turnNum += 1
if board.winner != None:
print "%ss win!" % board.winner
else:
print "It's a tie!"
if __name__ == "__main__":
main()
|
4b8387ed44ef0ac78a4d1c9b9a189500ee397f6d
|
main.py
|
main.py
|
import sys
sys.path.append(".env/lib/python2.7/site-packages")
from flask import Flask
import requests
app = Flask(__name__)
@app.route("/")
def hello():
payload = {
'access_token': '08beec989bccb333439ee3588583f19f02dd6b7e',
'asset': '357322040163096',
'filter': 'BEHAVE_ID'
}
r = requests.post('http://api.mycarcloud.de/resource.php', data=payload)
return r.text
if __name__ == "__main__":
app.debug = True
app.run()
|
import sys
sys.path.append(".env/lib/python2.7/site-packages")
from flask import Flask
import requests
import json
import random
app = Flask(__name__)
FAKE_DATA = True
HARD_BRAKING = 10
HARD_ACCELERATION = 11
payload = {
'access_token': '08beec989bccb333439ee3588583f19f02dd6b7e',
'asset': '357322040163096',
'filter': 'BEHAVE_ID'
}
behaviourEvent = {
u'engine': [],
u'loc': {
u'latitude': 48.18035,
u'longitude': 11.58489
},
u'car': [],
u'pid': [],
u'journey': [],
u'meta': {
u'asset': u'357322040163096'
},
u'behave': {
u'BEHAVE_ID': -1
},
u'time': {
u'recorded_at': u'2015-03-20T19:04:29Z'
},
u'gps': []
}
@app.route("/")
def hello():
# print payload['filter']
# r = requests.post('http://api.mycarcloud.de/resource.php', data=payload)
# print json.loads(r.text)[0]
return "hello"
@app.route("/getBehaviourEvent/current")
def getCurrentBehaviourEvent():
if FAKE_DATA:
hasNewEvent = True if random.randint(1, 5) == 5 else False
if hasNewEvent:
eventType = random.randint(HARD_BRAKING, HARD_ACCELERATION)
behaviourEvent['behave']['BEHAVE_ID'] = eventType
return json.dumps(behaviourEvent)
else:
return json.dumps({})
if __name__ == "__main__":
app.debug = True
app.run()
|
Return fake behaviour data with /getBehaviourEvent/current
|
Return fake behaviour data with /getBehaviourEvent/current
|
Python
|
mit
|
Bensk1/EcoSafeServer,Bensk1/EcoSafeServer,Bensk1/EcoSafeServer
|
import sys
sys.path.append(".env/lib/python2.7/site-packages")
from flask import Flask
import requests
app = Flask(__name__)
@app.route("/")
def hello():
payload = {
'access_token': '08beec989bccb333439ee3588583f19f02dd6b7e',
'asset': '357322040163096',
'filter': 'BEHAVE_ID'
}
r = requests.post('http://api.mycarcloud.de/resource.php', data=payload)
return r.text
if __name__ == "__main__":
app.debug = True
app.run()Return fake behaviour data with /getBehaviourEvent/current
|
import sys
sys.path.append(".env/lib/python2.7/site-packages")
from flask import Flask
import requests
import json
import random
app = Flask(__name__)
FAKE_DATA = True
HARD_BRAKING = 10
HARD_ACCELERATION = 11
payload = {
'access_token': '08beec989bccb333439ee3588583f19f02dd6b7e',
'asset': '357322040163096',
'filter': 'BEHAVE_ID'
}
behaviourEvent = {
u'engine': [],
u'loc': {
u'latitude': 48.18035,
u'longitude': 11.58489
},
u'car': [],
u'pid': [],
u'journey': [],
u'meta': {
u'asset': u'357322040163096'
},
u'behave': {
u'BEHAVE_ID': -1
},
u'time': {
u'recorded_at': u'2015-03-20T19:04:29Z'
},
u'gps': []
}
@app.route("/")
def hello():
# print payload['filter']
# r = requests.post('http://api.mycarcloud.de/resource.php', data=payload)
# print json.loads(r.text)[0]
return "hello"
@app.route("/getBehaviourEvent/current")
def getCurrentBehaviourEvent():
if FAKE_DATA:
hasNewEvent = True if random.randint(1, 5) == 5 else False
if hasNewEvent:
eventType = random.randint(HARD_BRAKING, HARD_ACCELERATION)
behaviourEvent['behave']['BEHAVE_ID'] = eventType
return json.dumps(behaviourEvent)
else:
return json.dumps({})
if __name__ == "__main__":
app.debug = True
app.run()
|
<commit_before>import sys
sys.path.append(".env/lib/python2.7/site-packages")
from flask import Flask
import requests
app = Flask(__name__)
@app.route("/")
def hello():
payload = {
'access_token': '08beec989bccb333439ee3588583f19f02dd6b7e',
'asset': '357322040163096',
'filter': 'BEHAVE_ID'
}
r = requests.post('http://api.mycarcloud.de/resource.php', data=payload)
return r.text
if __name__ == "__main__":
app.debug = True
app.run()<commit_msg>Return fake behaviour data with /getBehaviourEvent/current<commit_after>
|
import sys
sys.path.append(".env/lib/python2.7/site-packages")
from flask import Flask
import requests
import json
import random
app = Flask(__name__)
FAKE_DATA = True
HARD_BRAKING = 10
HARD_ACCELERATION = 11
payload = {
'access_token': '08beec989bccb333439ee3588583f19f02dd6b7e',
'asset': '357322040163096',
'filter': 'BEHAVE_ID'
}
behaviourEvent = {
u'engine': [],
u'loc': {
u'latitude': 48.18035,
u'longitude': 11.58489
},
u'car': [],
u'pid': [],
u'journey': [],
u'meta': {
u'asset': u'357322040163096'
},
u'behave': {
u'BEHAVE_ID': -1
},
u'time': {
u'recorded_at': u'2015-03-20T19:04:29Z'
},
u'gps': []
}
@app.route("/")
def hello():
# print payload['filter']
# r = requests.post('http://api.mycarcloud.de/resource.php', data=payload)
# print json.loads(r.text)[0]
return "hello"
@app.route("/getBehaviourEvent/current")
def getCurrentBehaviourEvent():
if FAKE_DATA:
hasNewEvent = True if random.randint(1, 5) == 5 else False
if hasNewEvent:
eventType = random.randint(HARD_BRAKING, HARD_ACCELERATION)
behaviourEvent['behave']['BEHAVE_ID'] = eventType
return json.dumps(behaviourEvent)
else:
return json.dumps({})
if __name__ == "__main__":
app.debug = True
app.run()
|
import sys
sys.path.append(".env/lib/python2.7/site-packages")
from flask import Flask
import requests
app = Flask(__name__)
@app.route("/")
def hello():
payload = {
'access_token': '08beec989bccb333439ee3588583f19f02dd6b7e',
'asset': '357322040163096',
'filter': 'BEHAVE_ID'
}
r = requests.post('http://api.mycarcloud.de/resource.php', data=payload)
return r.text
if __name__ == "__main__":
app.debug = True
app.run()Return fake behaviour data with /getBehaviourEvent/currentimport sys
sys.path.append(".env/lib/python2.7/site-packages")
from flask import Flask
import requests
import json
import random
app = Flask(__name__)
FAKE_DATA = True
HARD_BRAKING = 10
HARD_ACCELERATION = 11
payload = {
'access_token': '08beec989bccb333439ee3588583f19f02dd6b7e',
'asset': '357322040163096',
'filter': 'BEHAVE_ID'
}
behaviourEvent = {
u'engine': [],
u'loc': {
u'latitude': 48.18035,
u'longitude': 11.58489
},
u'car': [],
u'pid': [],
u'journey': [],
u'meta': {
u'asset': u'357322040163096'
},
u'behave': {
u'BEHAVE_ID': -1
},
u'time': {
u'recorded_at': u'2015-03-20T19:04:29Z'
},
u'gps': []
}
@app.route("/")
def hello():
# print payload['filter']
# r = requests.post('http://api.mycarcloud.de/resource.php', data=payload)
# print json.loads(r.text)[0]
return "hello"
@app.route("/getBehaviourEvent/current")
def getCurrentBehaviourEvent():
if FAKE_DATA:
hasNewEvent = True if random.randint(1, 5) == 5 else False
if hasNewEvent:
eventType = random.randint(HARD_BRAKING, HARD_ACCELERATION)
behaviourEvent['behave']['BEHAVE_ID'] = eventType
return json.dumps(behaviourEvent)
else:
return json.dumps({})
if __name__ == "__main__":
app.debug = True
app.run()
|
<commit_before>import sys
sys.path.append(".env/lib/python2.7/site-packages")
from flask import Flask
import requests
app = Flask(__name__)
@app.route("/")
def hello():
payload = {
'access_token': '08beec989bccb333439ee3588583f19f02dd6b7e',
'asset': '357322040163096',
'filter': 'BEHAVE_ID'
}
r = requests.post('http://api.mycarcloud.de/resource.php', data=payload)
return r.text
if __name__ == "__main__":
app.debug = True
app.run()<commit_msg>Return fake behaviour data with /getBehaviourEvent/current<commit_after>import sys
sys.path.append(".env/lib/python2.7/site-packages")
from flask import Flask
import requests
import json
import random
app = Flask(__name__)
FAKE_DATA = True
HARD_BRAKING = 10
HARD_ACCELERATION = 11
payload = {
'access_token': '08beec989bccb333439ee3588583f19f02dd6b7e',
'asset': '357322040163096',
'filter': 'BEHAVE_ID'
}
behaviourEvent = {
u'engine': [],
u'loc': {
u'latitude': 48.18035,
u'longitude': 11.58489
},
u'car': [],
u'pid': [],
u'journey': [],
u'meta': {
u'asset': u'357322040163096'
},
u'behave': {
u'BEHAVE_ID': -1
},
u'time': {
u'recorded_at': u'2015-03-20T19:04:29Z'
},
u'gps': []
}
@app.route("/")
def hello():
# print payload['filter']
# r = requests.post('http://api.mycarcloud.de/resource.php', data=payload)
# print json.loads(r.text)[0]
return "hello"
@app.route("/getBehaviourEvent/current")
def getCurrentBehaviourEvent():
if FAKE_DATA:
hasNewEvent = True if random.randint(1, 5) == 5 else False
if hasNewEvent:
eventType = random.randint(HARD_BRAKING, HARD_ACCELERATION)
behaviourEvent['behave']['BEHAVE_ID'] = eventType
return json.dumps(behaviourEvent)
else:
return json.dumps({})
if __name__ == "__main__":
app.debug = True
app.run()
|
c9ecacdb04f3f8df4f85057ad0d3c69df9481122
|
core/utils/check_sanity.py
|
core/utils/check_sanity.py
|
import os
from core.exceptions.Exceptions import OPAMConfigurationExeception
def check_environment() -> bool:
__opam_env__ = [
'CAML_LD_LIBRARY_PATH',
'MANPATH',
'PERL5LIB',
'OCAML_TOPLEVEL_PATH',
'PATH'
]
for var in __opam_env__:
if not os.environ.get(var, None):
raise OPAMConfigurationExeception
PATH = os.environ.get('PATH')
for path in PATH.split(':'):
if path.endswith(
os.path.join('.opam', 'system', 'bin')
):
return True
|
import os
from core.utils.Executor import _convert_subprocess_cmd
import subprocess
from core.exceptions.Exceptions import OPAMConfigurationExeception
def check_environment() -> bool:
__opam_env__ = [
'CAML_LD_LIBRARY_PATH',
'MANPATH',
'PERL5LIB',
'OCAML_TOPLEVEL_PATH',
'PATH'
]
for var in __opam_env__:
if not os.environ.get(var, None):
raise OPAMConfigurationExeception
PATH = os.environ.get('PATH')
for path in PATH.split(':'):
if path.endswith(
os.path.join('.opam', 'system', 'bin')
):
return True
def check_mirage():
try:
subprocess.check_call(
_convert_subprocess_cmd('which mirage')
)
except subprocess.CalledProcessError:
return False
else:
return True
|
Check if mirage is installed
|
Check if mirage is installed
|
Python
|
apache-2.0
|
onyb/dune,adyasha/dune,adyasha/dune,adyasha/dune
|
import os
from core.exceptions.Exceptions import OPAMConfigurationExeception
def check_environment() -> bool:
__opam_env__ = [
'CAML_LD_LIBRARY_PATH',
'MANPATH',
'PERL5LIB',
'OCAML_TOPLEVEL_PATH',
'PATH'
]
for var in __opam_env__:
if not os.environ.get(var, None):
raise OPAMConfigurationExeception
PATH = os.environ.get('PATH')
for path in PATH.split(':'):
if path.endswith(
os.path.join('.opam', 'system', 'bin')
):
return True
Check if mirage is installed
|
import os
from core.utils.Executor import _convert_subprocess_cmd
import subprocess
from core.exceptions.Exceptions import OPAMConfigurationExeception
def check_environment() -> bool:
__opam_env__ = [
'CAML_LD_LIBRARY_PATH',
'MANPATH',
'PERL5LIB',
'OCAML_TOPLEVEL_PATH',
'PATH'
]
for var in __opam_env__:
if not os.environ.get(var, None):
raise OPAMConfigurationExeception
PATH = os.environ.get('PATH')
for path in PATH.split(':'):
if path.endswith(
os.path.join('.opam', 'system', 'bin')
):
return True
def check_mirage():
try:
subprocess.check_call(
_convert_subprocess_cmd('which mirage')
)
except subprocess.CalledProcessError:
return False
else:
return True
|
<commit_before>import os
from core.exceptions.Exceptions import OPAMConfigurationExeception
def check_environment() -> bool:
__opam_env__ = [
'CAML_LD_LIBRARY_PATH',
'MANPATH',
'PERL5LIB',
'OCAML_TOPLEVEL_PATH',
'PATH'
]
for var in __opam_env__:
if not os.environ.get(var, None):
raise OPAMConfigurationExeception
PATH = os.environ.get('PATH')
for path in PATH.split(':'):
if path.endswith(
os.path.join('.opam', 'system', 'bin')
):
return True
<commit_msg>Check if mirage is installed<commit_after>
|
import os
from core.utils.Executor import _convert_subprocess_cmd
import subprocess
from core.exceptions.Exceptions import OPAMConfigurationExeception
def check_environment() -> bool:
__opam_env__ = [
'CAML_LD_LIBRARY_PATH',
'MANPATH',
'PERL5LIB',
'OCAML_TOPLEVEL_PATH',
'PATH'
]
for var in __opam_env__:
if not os.environ.get(var, None):
raise OPAMConfigurationExeception
PATH = os.environ.get('PATH')
for path in PATH.split(':'):
if path.endswith(
os.path.join('.opam', 'system', 'bin')
):
return True
def check_mirage():
try:
subprocess.check_call(
_convert_subprocess_cmd('which mirage')
)
except subprocess.CalledProcessError:
return False
else:
return True
|
import os
from core.exceptions.Exceptions import OPAMConfigurationExeception
def check_environment() -> bool:
__opam_env__ = [
'CAML_LD_LIBRARY_PATH',
'MANPATH',
'PERL5LIB',
'OCAML_TOPLEVEL_PATH',
'PATH'
]
for var in __opam_env__:
if not os.environ.get(var, None):
raise OPAMConfigurationExeception
PATH = os.environ.get('PATH')
for path in PATH.split(':'):
if path.endswith(
os.path.join('.opam', 'system', 'bin')
):
return True
Check if mirage is installedimport os
from core.utils.Executor import _convert_subprocess_cmd
import subprocess
from core.exceptions.Exceptions import OPAMConfigurationExeception
def check_environment() -> bool:
__opam_env__ = [
'CAML_LD_LIBRARY_PATH',
'MANPATH',
'PERL5LIB',
'OCAML_TOPLEVEL_PATH',
'PATH'
]
for var in __opam_env__:
if not os.environ.get(var, None):
raise OPAMConfigurationExeception
PATH = os.environ.get('PATH')
for path in PATH.split(':'):
if path.endswith(
os.path.join('.opam', 'system', 'bin')
):
return True
def check_mirage():
try:
subprocess.check_call(
_convert_subprocess_cmd('which mirage')
)
except subprocess.CalledProcessError:
return False
else:
return True
|
<commit_before>import os
from core.exceptions.Exceptions import OPAMConfigurationExeception
def check_environment() -> bool:
__opam_env__ = [
'CAML_LD_LIBRARY_PATH',
'MANPATH',
'PERL5LIB',
'OCAML_TOPLEVEL_PATH',
'PATH'
]
for var in __opam_env__:
if not os.environ.get(var, None):
raise OPAMConfigurationExeception
PATH = os.environ.get('PATH')
for path in PATH.split(':'):
if path.endswith(
os.path.join('.opam', 'system', 'bin')
):
return True
<commit_msg>Check if mirage is installed<commit_after>import os
from core.utils.Executor import _convert_subprocess_cmd
import subprocess
from core.exceptions.Exceptions import OPAMConfigurationExeception
def check_environment() -> bool:
__opam_env__ = [
'CAML_LD_LIBRARY_PATH',
'MANPATH',
'PERL5LIB',
'OCAML_TOPLEVEL_PATH',
'PATH'
]
for var in __opam_env__:
if not os.environ.get(var, None):
raise OPAMConfigurationExeception
PATH = os.environ.get('PATH')
for path in PATH.split(':'):
if path.endswith(
os.path.join('.opam', 'system', 'bin')
):
return True
def check_mirage():
try:
subprocess.check_call(
_convert_subprocess_cmd('which mirage')
)
except subprocess.CalledProcessError:
return False
else:
return True
|
f72a40ef9f757d162a54706ff90b3f5cb10452ab
|
csdms/dakota/bmi_dakota.py
|
csdms/dakota/bmi_dakota.py
|
#!/usr/bin/env python
"""Basic Model Interface for the Dakota iterative systems analysis toolkit."""
from basic_modeling_interface import Bmi
from .core import Dakota
class BmiDakota(Bmi):
"""Perform a Dakota experiment on a component."""
_name = 'Dakota'
def __init__(self):
"""Create a BmiDakota instance."""
self._model = None
def initialize(self, filename=None):
"""Create a Dakota instance and input file.
Parameters
----------
filename : str, optional
Path to a Dakota configuration file.
"""
if filename is None:
self._model = Dakota()
else:
self._model = Dakota.from_file_like(filename)
self._model.write_input_file()
def update(self):
"""Run Dakota."""
self._model.run()
def finalize(self):
"""Remove the Dakota instance."""
self._model = None
def get_component_name(self):
"""Name of the component."""
return self._name
|
#!/usr/bin/env python
"""Basic Model Interface for the Dakota iterative systems analysis toolkit."""
from basic_modeling_interface import Bmi
from .core import Dakota
class BmiDakota(Bmi):
"""Perform a Dakota experiment on a component."""
_name = 'Dakota'
def __init__(self):
"""Create a BmiDakota instance."""
self._model = None
self._time = 0.0
def initialize(self, filename=None):
"""Create a Dakota instance and input file.
Parameters
----------
filename : str, optional
Path to a Dakota configuration file.
"""
if filename is None:
self._model = Dakota()
else:
self._model = Dakota.from_file_like(filename)
self._model.write_input_file()
def update(self):
"""Run Dakota."""
self._model.run()
self._time += self.get_time_step()
def finalize(self):
"""Remove the Dakota instance."""
self._model = None
def get_component_name(self):
"""Name of the component."""
return self._name
def get_start_time(self):
"""Start time of model."""
return 0.0
def get_end_time(self):
"""End time of model."""
return 1.0
def get_current_time(self):
"""Current time of model."""
return self._time
def get_time_step(self):
"""Time step of model."""
return 1.0
|
Add time methods to BmiDakota
|
Add time methods to BmiDakota
|
Python
|
mit
|
csdms/dakota,csdms/dakota
|
#!/usr/bin/env python
"""Basic Model Interface for the Dakota iterative systems analysis toolkit."""
from basic_modeling_interface import Bmi
from .core import Dakota
class BmiDakota(Bmi):
"""Perform a Dakota experiment on a component."""
_name = 'Dakota'
def __init__(self):
"""Create a BmiDakota instance."""
self._model = None
def initialize(self, filename=None):
"""Create a Dakota instance and input file.
Parameters
----------
filename : str, optional
Path to a Dakota configuration file.
"""
if filename is None:
self._model = Dakota()
else:
self._model = Dakota.from_file_like(filename)
self._model.write_input_file()
def update(self):
"""Run Dakota."""
self._model.run()
def finalize(self):
"""Remove the Dakota instance."""
self._model = None
def get_component_name(self):
"""Name of the component."""
return self._name
Add time methods to BmiDakota
|
#!/usr/bin/env python
"""Basic Model Interface for the Dakota iterative systems analysis toolkit."""
from basic_modeling_interface import Bmi
from .core import Dakota
class BmiDakota(Bmi):
"""Perform a Dakota experiment on a component."""
_name = 'Dakota'
def __init__(self):
"""Create a BmiDakota instance."""
self._model = None
self._time = 0.0
def initialize(self, filename=None):
"""Create a Dakota instance and input file.
Parameters
----------
filename : str, optional
Path to a Dakota configuration file.
"""
if filename is None:
self._model = Dakota()
else:
self._model = Dakota.from_file_like(filename)
self._model.write_input_file()
def update(self):
"""Run Dakota."""
self._model.run()
self._time += self.get_time_step()
def finalize(self):
"""Remove the Dakota instance."""
self._model = None
def get_component_name(self):
"""Name of the component."""
return self._name
def get_start_time(self):
"""Start time of model."""
return 0.0
def get_end_time(self):
"""End time of model."""
return 1.0
def get_current_time(self):
"""Current time of model."""
return self._time
def get_time_step(self):
"""Time step of model."""
return 1.0
|
<commit_before>#!/usr/bin/env python
"""Basic Model Interface for the Dakota iterative systems analysis toolkit."""
from basic_modeling_interface import Bmi
from .core import Dakota
class BmiDakota(Bmi):
"""Perform a Dakota experiment on a component."""
_name = 'Dakota'
def __init__(self):
"""Create a BmiDakota instance."""
self._model = None
def initialize(self, filename=None):
"""Create a Dakota instance and input file.
Parameters
----------
filename : str, optional
Path to a Dakota configuration file.
"""
if filename is None:
self._model = Dakota()
else:
self._model = Dakota.from_file_like(filename)
self._model.write_input_file()
def update(self):
"""Run Dakota."""
self._model.run()
def finalize(self):
"""Remove the Dakota instance."""
self._model = None
def get_component_name(self):
"""Name of the component."""
return self._name
<commit_msg>Add time methods to BmiDakota<commit_after>
|
#!/usr/bin/env python
"""Basic Model Interface for the Dakota iterative systems analysis toolkit."""
from basic_modeling_interface import Bmi
from .core import Dakota
class BmiDakota(Bmi):
"""Perform a Dakota experiment on a component."""
_name = 'Dakota'
def __init__(self):
"""Create a BmiDakota instance."""
self._model = None
self._time = 0.0
def initialize(self, filename=None):
"""Create a Dakota instance and input file.
Parameters
----------
filename : str, optional
Path to a Dakota configuration file.
"""
if filename is None:
self._model = Dakota()
else:
self._model = Dakota.from_file_like(filename)
self._model.write_input_file()
def update(self):
"""Run Dakota."""
self._model.run()
self._time += self.get_time_step()
def finalize(self):
"""Remove the Dakota instance."""
self._model = None
def get_component_name(self):
"""Name of the component."""
return self._name
def get_start_time(self):
"""Start time of model."""
return 0.0
def get_end_time(self):
"""End time of model."""
return 1.0
def get_current_time(self):
"""Current time of model."""
return self._time
def get_time_step(self):
"""Time step of model."""
return 1.0
|
#!/usr/bin/env python
"""Basic Model Interface for the Dakota iterative systems analysis toolkit."""
from basic_modeling_interface import Bmi
from .core import Dakota
class BmiDakota(Bmi):
"""Perform a Dakota experiment on a component."""
_name = 'Dakota'
def __init__(self):
"""Create a BmiDakota instance."""
self._model = None
def initialize(self, filename=None):
"""Create a Dakota instance and input file.
Parameters
----------
filename : str, optional
Path to a Dakota configuration file.
"""
if filename is None:
self._model = Dakota()
else:
self._model = Dakota.from_file_like(filename)
self._model.write_input_file()
def update(self):
"""Run Dakota."""
self._model.run()
def finalize(self):
"""Remove the Dakota instance."""
self._model = None
def get_component_name(self):
"""Name of the component."""
return self._name
Add time methods to BmiDakota#!/usr/bin/env python
"""Basic Model Interface for the Dakota iterative systems analysis toolkit."""
from basic_modeling_interface import Bmi
from .core import Dakota
class BmiDakota(Bmi):
"""Perform a Dakota experiment on a component."""
_name = 'Dakota'
def __init__(self):
"""Create a BmiDakota instance."""
self._model = None
self._time = 0.0
def initialize(self, filename=None):
"""Create a Dakota instance and input file.
Parameters
----------
filename : str, optional
Path to a Dakota configuration file.
"""
if filename is None:
self._model = Dakota()
else:
self._model = Dakota.from_file_like(filename)
self._model.write_input_file()
def update(self):
"""Run Dakota."""
self._model.run()
self._time += self.get_time_step()
def finalize(self):
"""Remove the Dakota instance."""
self._model = None
def get_component_name(self):
"""Name of the component."""
return self._name
def get_start_time(self):
"""Start time of model."""
return 0.0
def get_end_time(self):
"""End time of model."""
return 1.0
def get_current_time(self):
"""Current time of model."""
return self._time
def get_time_step(self):
"""Time step of model."""
return 1.0
|
<commit_before>#!/usr/bin/env python
"""Basic Model Interface for the Dakota iterative systems analysis toolkit."""
from basic_modeling_interface import Bmi
from .core import Dakota
class BmiDakota(Bmi):
"""Perform a Dakota experiment on a component."""
_name = 'Dakota'
def __init__(self):
"""Create a BmiDakota instance."""
self._model = None
def initialize(self, filename=None):
"""Create a Dakota instance and input file.
Parameters
----------
filename : str, optional
Path to a Dakota configuration file.
"""
if filename is None:
self._model = Dakota()
else:
self._model = Dakota.from_file_like(filename)
self._model.write_input_file()
def update(self):
"""Run Dakota."""
self._model.run()
def finalize(self):
"""Remove the Dakota instance."""
self._model = None
def get_component_name(self):
"""Name of the component."""
return self._name
<commit_msg>Add time methods to BmiDakota<commit_after>#!/usr/bin/env python
"""Basic Model Interface for the Dakota iterative systems analysis toolkit."""
from basic_modeling_interface import Bmi
from .core import Dakota
class BmiDakota(Bmi):
"""Perform a Dakota experiment on a component."""
_name = 'Dakota'
def __init__(self):
"""Create a BmiDakota instance."""
self._model = None
self._time = 0.0
def initialize(self, filename=None):
"""Create a Dakota instance and input file.
Parameters
----------
filename : str, optional
Path to a Dakota configuration file.
"""
if filename is None:
self._model = Dakota()
else:
self._model = Dakota.from_file_like(filename)
self._model.write_input_file()
def update(self):
"""Run Dakota."""
self._model.run()
self._time += self.get_time_step()
def finalize(self):
"""Remove the Dakota instance."""
self._model = None
def get_component_name(self):
"""Name of the component."""
return self._name
def get_start_time(self):
"""Start time of model."""
return 0.0
def get_end_time(self):
"""End time of model."""
return 1.0
def get_current_time(self):
"""Current time of model."""
return self._time
def get_time_step(self):
"""Time step of model."""
return 1.0
|
514e41f8cb3717f3fcd0c1283e60e9f202b79598
|
saddle-points/saddle_points.py
|
saddle-points/saddle_points.py
|
def saddle_points(m):
mt = transpose(m)
if not m == transpose(mt):
raise ValueError
return set((i, j) for i, row in enumerate(m) for j, col in enumerate(mt)
if (row[j] == min(row) and col[i] == max(col))
or (row[j] == max(row) and col[i] == min(col)))
def transpose(m):
return [list(col) for col in zip(*m)]
|
def saddle_points(m):
mt = transpose(m)
if not m == transpose(mt):
raise ValueError
return set((i, j) for i, row in enumerate(m) for j, col in enumerate(mt)
if (row[j] == max(row) and col[i] == min(col)))
def transpose(m):
return [list(col) for col in zip(*m)]
|
Correct it to actually follow the README...
|
Correct it to actually follow the README...
|
Python
|
agpl-3.0
|
CubicComet/exercism-python-solutions
|
def saddle_points(m):
mt = transpose(m)
if not m == transpose(mt):
raise ValueError
return set((i, j) for i, row in enumerate(m) for j, col in enumerate(mt)
if (row[j] == min(row) and col[i] == max(col))
or (row[j] == max(row) and col[i] == min(col)))
def transpose(m):
return [list(col) for col in zip(*m)]
Correct it to actually follow the README...
|
def saddle_points(m):
mt = transpose(m)
if not m == transpose(mt):
raise ValueError
return set((i, j) for i, row in enumerate(m) for j, col in enumerate(mt)
if (row[j] == max(row) and col[i] == min(col)))
def transpose(m):
return [list(col) for col in zip(*m)]
|
<commit_before>def saddle_points(m):
mt = transpose(m)
if not m == transpose(mt):
raise ValueError
return set((i, j) for i, row in enumerate(m) for j, col in enumerate(mt)
if (row[j] == min(row) and col[i] == max(col))
or (row[j] == max(row) and col[i] == min(col)))
def transpose(m):
return [list(col) for col in zip(*m)]
<commit_msg>Correct it to actually follow the README...<commit_after>
|
def saddle_points(m):
mt = transpose(m)
if not m == transpose(mt):
raise ValueError
return set((i, j) for i, row in enumerate(m) for j, col in enumerate(mt)
if (row[j] == max(row) and col[i] == min(col)))
def transpose(m):
return [list(col) for col in zip(*m)]
|
def saddle_points(m):
mt = transpose(m)
if not m == transpose(mt):
raise ValueError
return set((i, j) for i, row in enumerate(m) for j, col in enumerate(mt)
if (row[j] == min(row) and col[i] == max(col))
or (row[j] == max(row) and col[i] == min(col)))
def transpose(m):
return [list(col) for col in zip(*m)]
Correct it to actually follow the README...def saddle_points(m):
mt = transpose(m)
if not m == transpose(mt):
raise ValueError
return set((i, j) for i, row in enumerate(m) for j, col in enumerate(mt)
if (row[j] == max(row) and col[i] == min(col)))
def transpose(m):
return [list(col) for col in zip(*m)]
|
<commit_before>def saddle_points(m):
mt = transpose(m)
if not m == transpose(mt):
raise ValueError
return set((i, j) for i, row in enumerate(m) for j, col in enumerate(mt)
if (row[j] == min(row) and col[i] == max(col))
or (row[j] == max(row) and col[i] == min(col)))
def transpose(m):
return [list(col) for col in zip(*m)]
<commit_msg>Correct it to actually follow the README...<commit_after>def saddle_points(m):
mt = transpose(m)
if not m == transpose(mt):
raise ValueError
return set((i, j) for i, row in enumerate(m) for j, col in enumerate(mt)
if (row[j] == max(row) and col[i] == min(col)))
def transpose(m):
return [list(col) for col in zip(*m)]
|
6bc68fa898083a696c931ca4fff82984eeec3131
|
acquisition/tomviz/acquisition/__init__.py
|
acquisition/tomviz/acquisition/__init__.py
|
from abc import abstractmethod, ABCMeta
class AbstractSource(object):
__metaclass__ = ABCMeta
@abstractmethod
def set_tilt_angle(self, angle):
pass
@abstractmethod
def preview_scan(self):
pass
@abstractmethod
def stem_acquire(self):
pass
|
from abc import abstractmethod, ABCMeta
class AbstractSource(object):
"""
Abstract interface implemented to define an acquistion source.
"""
__metaclass__ = ABCMeta
@abstractmethod
def set_tilt_angle(self, angle):
"""
Set the tilt angle.
:param angle: The title angle to set.
:type angle: int
:returns: The set tilt angle
"""
pass
@abstractmethod
def preview_scan(self):
"""
Peforms a preview scan.
:returns: The 2D tiff generate by the scan
"""
pass
@abstractmethod
def stem_acquire(self):
"""
Peforms STEM acquire
:returns: The 2D tiff generate by the scan
"""
pass
|
Add doc strings to AbstractSource
|
Add doc strings to AbstractSource
|
Python
|
bsd-3-clause
|
mathturtle/tomviz,mathturtle/tomviz,OpenChemistry/tomviz,mathturtle/tomviz,cjh1/tomviz,OpenChemistry/tomviz,thewtex/tomviz,cryos/tomviz,cjh1/tomviz,cryos/tomviz,OpenChemistry/tomviz,thewtex/tomviz,thewtex/tomviz,cryos/tomviz,OpenChemistry/tomviz,cjh1/tomviz
|
from abc import abstractmethod, ABCMeta
class AbstractSource(object):
__metaclass__ = ABCMeta
@abstractmethod
def set_tilt_angle(self, angle):
pass
@abstractmethod
def preview_scan(self):
pass
@abstractmethod
def stem_acquire(self):
pass
Add doc strings to AbstractSource
|
from abc import abstractmethod, ABCMeta
class AbstractSource(object):
"""
Abstract interface implemented to define an acquistion source.
"""
__metaclass__ = ABCMeta
@abstractmethod
def set_tilt_angle(self, angle):
"""
Set the tilt angle.
:param angle: The title angle to set.
:type angle: int
:returns: The set tilt angle
"""
pass
@abstractmethod
def preview_scan(self):
"""
Peforms a preview scan.
:returns: The 2D tiff generate by the scan
"""
pass
@abstractmethod
def stem_acquire(self):
"""
Peforms STEM acquire
:returns: The 2D tiff generate by the scan
"""
pass
|
<commit_before>from abc import abstractmethod, ABCMeta
class AbstractSource(object):
__metaclass__ = ABCMeta
@abstractmethod
def set_tilt_angle(self, angle):
pass
@abstractmethod
def preview_scan(self):
pass
@abstractmethod
def stem_acquire(self):
pass
<commit_msg>Add doc strings to AbstractSource<commit_after>
|
from abc import abstractmethod, ABCMeta
class AbstractSource(object):
"""
Abstract interface implemented to define an acquistion source.
"""
__metaclass__ = ABCMeta
@abstractmethod
def set_tilt_angle(self, angle):
"""
Set the tilt angle.
:param angle: The title angle to set.
:type angle: int
:returns: The set tilt angle
"""
pass
@abstractmethod
def preview_scan(self):
"""
Peforms a preview scan.
:returns: The 2D tiff generate by the scan
"""
pass
@abstractmethod
def stem_acquire(self):
"""
Peforms STEM acquire
:returns: The 2D tiff generate by the scan
"""
pass
|
from abc import abstractmethod, ABCMeta
class AbstractSource(object):
__metaclass__ = ABCMeta
@abstractmethod
def set_tilt_angle(self, angle):
pass
@abstractmethod
def preview_scan(self):
pass
@abstractmethod
def stem_acquire(self):
pass
Add doc strings to AbstractSourcefrom abc import abstractmethod, ABCMeta
class AbstractSource(object):
"""
Abstract interface implemented to define an acquistion source.
"""
__metaclass__ = ABCMeta
@abstractmethod
def set_tilt_angle(self, angle):
"""
Set the tilt angle.
:param angle: The title angle to set.
:type angle: int
:returns: The set tilt angle
"""
pass
@abstractmethod
def preview_scan(self):
"""
Peforms a preview scan.
:returns: The 2D tiff generate by the scan
"""
pass
@abstractmethod
def stem_acquire(self):
"""
Peforms STEM acquire
:returns: The 2D tiff generate by the scan
"""
pass
|
<commit_before>from abc import abstractmethod, ABCMeta
class AbstractSource(object):
__metaclass__ = ABCMeta
@abstractmethod
def set_tilt_angle(self, angle):
pass
@abstractmethod
def preview_scan(self):
pass
@abstractmethod
def stem_acquire(self):
pass
<commit_msg>Add doc strings to AbstractSource<commit_after>from abc import abstractmethod, ABCMeta
class AbstractSource(object):
"""
Abstract interface implemented to define an acquistion source.
"""
__metaclass__ = ABCMeta
@abstractmethod
def set_tilt_angle(self, angle):
"""
Set the tilt angle.
:param angle: The title angle to set.
:type angle: int
:returns: The set tilt angle
"""
pass
@abstractmethod
def preview_scan(self):
"""
Peforms a preview scan.
:returns: The 2D tiff generate by the scan
"""
pass
@abstractmethod
def stem_acquire(self):
"""
Peforms STEM acquire
:returns: The 2D tiff generate by the scan
"""
pass
|
e60550b894e882abb4be0ff8b69b33fd8596c35e
|
docs/conf.py
|
docs/conf.py
|
from pathlib import Path
extensions = [
'sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.coverage']
master_doc = 'index'
project = 'cairocffi'
copyright = '2013-2019, Simon Sapin'
release = (Path(__file__).parent / 'cairocffi' / 'VERSION').read_text().strip()
version = '.'.join(release.split('.')[:2])
exclude_patterns = ['_build']
autodoc_member_order = 'bysource'
autodoc_default_flags = ['members']
intersphinx_mapping = {
'http://docs.python.org/': None,
'http://cairographics.org/documentation/pycairo/2/': None}
|
from pathlib import Path
extensions = [
'sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.coverage']
master_doc = 'index'
project = 'cairocffi'
copyright = '2013-2019, Simon Sapin'
release = (
Path(__file__).parent.parent / 'cairocffi' / 'VERSION').read_text().strip()
version = '.'.join(release.split('.')[:2])
exclude_patterns = ['_build']
autodoc_member_order = 'bysource'
autodoc_default_flags = ['members']
intersphinx_mapping = {
'http://docs.python.org/': None,
'http://cairographics.org/documentation/pycairo/2/': None}
|
Fix the VERSION path for doc
|
Fix the VERSION path for doc
|
Python
|
bsd-3-clause
|
SimonSapin/cairocffi
|
from pathlib import Path
extensions = [
'sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.coverage']
master_doc = 'index'
project = 'cairocffi'
copyright = '2013-2019, Simon Sapin'
release = (Path(__file__).parent / 'cairocffi' / 'VERSION').read_text().strip()
version = '.'.join(release.split('.')[:2])
exclude_patterns = ['_build']
autodoc_member_order = 'bysource'
autodoc_default_flags = ['members']
intersphinx_mapping = {
'http://docs.python.org/': None,
'http://cairographics.org/documentation/pycairo/2/': None}
Fix the VERSION path for doc
|
from pathlib import Path
extensions = [
'sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.coverage']
master_doc = 'index'
project = 'cairocffi'
copyright = '2013-2019, Simon Sapin'
release = (
Path(__file__).parent.parent / 'cairocffi' / 'VERSION').read_text().strip()
version = '.'.join(release.split('.')[:2])
exclude_patterns = ['_build']
autodoc_member_order = 'bysource'
autodoc_default_flags = ['members']
intersphinx_mapping = {
'http://docs.python.org/': None,
'http://cairographics.org/documentation/pycairo/2/': None}
|
<commit_before>from pathlib import Path
extensions = [
'sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.coverage']
master_doc = 'index'
project = 'cairocffi'
copyright = '2013-2019, Simon Sapin'
release = (Path(__file__).parent / 'cairocffi' / 'VERSION').read_text().strip()
version = '.'.join(release.split('.')[:2])
exclude_patterns = ['_build']
autodoc_member_order = 'bysource'
autodoc_default_flags = ['members']
intersphinx_mapping = {
'http://docs.python.org/': None,
'http://cairographics.org/documentation/pycairo/2/': None}
<commit_msg>Fix the VERSION path for doc<commit_after>
|
from pathlib import Path
extensions = [
'sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.coverage']
master_doc = 'index'
project = 'cairocffi'
copyright = '2013-2019, Simon Sapin'
release = (
Path(__file__).parent.parent / 'cairocffi' / 'VERSION').read_text().strip()
version = '.'.join(release.split('.')[:2])
exclude_patterns = ['_build']
autodoc_member_order = 'bysource'
autodoc_default_flags = ['members']
intersphinx_mapping = {
'http://docs.python.org/': None,
'http://cairographics.org/documentation/pycairo/2/': None}
|
from pathlib import Path
extensions = [
'sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.coverage']
master_doc = 'index'
project = 'cairocffi'
copyright = '2013-2019, Simon Sapin'
release = (Path(__file__).parent / 'cairocffi' / 'VERSION').read_text().strip()
version = '.'.join(release.split('.')[:2])
exclude_patterns = ['_build']
autodoc_member_order = 'bysource'
autodoc_default_flags = ['members']
intersphinx_mapping = {
'http://docs.python.org/': None,
'http://cairographics.org/documentation/pycairo/2/': None}
Fix the VERSION path for docfrom pathlib import Path
extensions = [
'sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.coverage']
master_doc = 'index'
project = 'cairocffi'
copyright = '2013-2019, Simon Sapin'
release = (
Path(__file__).parent.parent / 'cairocffi' / 'VERSION').read_text().strip()
version = '.'.join(release.split('.')[:2])
exclude_patterns = ['_build']
autodoc_member_order = 'bysource'
autodoc_default_flags = ['members']
intersphinx_mapping = {
'http://docs.python.org/': None,
'http://cairographics.org/documentation/pycairo/2/': None}
|
<commit_before>from pathlib import Path
extensions = [
'sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.coverage']
master_doc = 'index'
project = 'cairocffi'
copyright = '2013-2019, Simon Sapin'
release = (Path(__file__).parent / 'cairocffi' / 'VERSION').read_text().strip()
version = '.'.join(release.split('.')[:2])
exclude_patterns = ['_build']
autodoc_member_order = 'bysource'
autodoc_default_flags = ['members']
intersphinx_mapping = {
'http://docs.python.org/': None,
'http://cairographics.org/documentation/pycairo/2/': None}
<commit_msg>Fix the VERSION path for doc<commit_after>from pathlib import Path
extensions = [
'sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.coverage']
master_doc = 'index'
project = 'cairocffi'
copyright = '2013-2019, Simon Sapin'
release = (
Path(__file__).parent.parent / 'cairocffi' / 'VERSION').read_text().strip()
version = '.'.join(release.split('.')[:2])
exclude_patterns = ['_build']
autodoc_member_order = 'bysource'
autodoc_default_flags = ['members']
intersphinx_mapping = {
'http://docs.python.org/': None,
'http://cairographics.org/documentation/pycairo/2/': None}
|
ce4a588f0104498f5cd2491d85ef39806eb2ba7f
|
tests/filter_integration_tests/test_filters_with_mongo_storage.py
|
tests/filter_integration_tests/test_filters_with_mongo_storage.py
|
from tests.base_case import ChatBotMongoTestCase
class RepetitiveResponseFilterTestCase(ChatBotMongoTestCase):
"""
Test case for the RepetitiveResponseFilter class.
"""
def test_filter_selection(self):
"""
Test that repetitive responses are filtered out of the results.
"""
from chatterbot.filters import RepetitiveResponseFilter
from chatterbot.trainers import ListTrainer
self.chatbot.filters = (RepetitiveResponseFilter(), )
self.chatbot.set_trainer(ListTrainer, **self.get_kwargs())
self.chatbot.train([
'Hello',
'Hi',
'Hello',
'Hi',
'Hello',
'Hi, how are you?',
'I am good.'
])
first_response = self.chatbot.get_response('Hello')
second_response = self.chatbot.get_response('Hello')
self.assertEqual(first_response.text, 'Hi')
self.assertEqual(second_response.text, 'Hi, how are you?')
|
from tests.base_case import ChatBotMongoTestCase
class RepetitiveResponseFilterTestCase(ChatBotMongoTestCase):
"""
Test case for the RepetitiveResponseFilter class.
"""
def test_filter_selection(self):
"""
Test that repetitive responses are filtered out of the results.
"""
from chatterbot.filters import RepetitiveResponseFilter
from chatterbot.trainers import ListTrainer
self.chatbot.filters = (RepetitiveResponseFilter(), )
self.chatbot.set_trainer(ListTrainer, **self.get_kwargs())
self.chatbot.train([
'Hello',
'Hi',
'Hello',
'Hi',
'Hello',
'Hi, how are you?',
'I am good.'
])
first_response = self.chatbot.get_response('Hello')
second_response = self.chatbot.get_response('Hello')
self.assertEqual('Hi', first_response.text)
self.assertEqual('Hi, how are you?', second_response.text)
|
Put the calculated value on the right
|
Put the calculated value on the right
|
Python
|
bsd-3-clause
|
gunthercox/ChatterBot,vkosuri/ChatterBot
|
from tests.base_case import ChatBotMongoTestCase
class RepetitiveResponseFilterTestCase(ChatBotMongoTestCase):
"""
Test case for the RepetitiveResponseFilter class.
"""
def test_filter_selection(self):
"""
Test that repetitive responses are filtered out of the results.
"""
from chatterbot.filters import RepetitiveResponseFilter
from chatterbot.trainers import ListTrainer
self.chatbot.filters = (RepetitiveResponseFilter(), )
self.chatbot.set_trainer(ListTrainer, **self.get_kwargs())
self.chatbot.train([
'Hello',
'Hi',
'Hello',
'Hi',
'Hello',
'Hi, how are you?',
'I am good.'
])
first_response = self.chatbot.get_response('Hello')
second_response = self.chatbot.get_response('Hello')
self.assertEqual(first_response.text, 'Hi')
self.assertEqual(second_response.text, 'Hi, how are you?')
Put the calculated value on the right
|
from tests.base_case import ChatBotMongoTestCase
class RepetitiveResponseFilterTestCase(ChatBotMongoTestCase):
"""
Test case for the RepetitiveResponseFilter class.
"""
def test_filter_selection(self):
"""
Test that repetitive responses are filtered out of the results.
"""
from chatterbot.filters import RepetitiveResponseFilter
from chatterbot.trainers import ListTrainer
self.chatbot.filters = (RepetitiveResponseFilter(), )
self.chatbot.set_trainer(ListTrainer, **self.get_kwargs())
self.chatbot.train([
'Hello',
'Hi',
'Hello',
'Hi',
'Hello',
'Hi, how are you?',
'I am good.'
])
first_response = self.chatbot.get_response('Hello')
second_response = self.chatbot.get_response('Hello')
self.assertEqual('Hi', first_response.text)
self.assertEqual('Hi, how are you?', second_response.text)
|
<commit_before>from tests.base_case import ChatBotMongoTestCase
class RepetitiveResponseFilterTestCase(ChatBotMongoTestCase):
"""
Test case for the RepetitiveResponseFilter class.
"""
def test_filter_selection(self):
"""
Test that repetitive responses are filtered out of the results.
"""
from chatterbot.filters import RepetitiveResponseFilter
from chatterbot.trainers import ListTrainer
self.chatbot.filters = (RepetitiveResponseFilter(), )
self.chatbot.set_trainer(ListTrainer, **self.get_kwargs())
self.chatbot.train([
'Hello',
'Hi',
'Hello',
'Hi',
'Hello',
'Hi, how are you?',
'I am good.'
])
first_response = self.chatbot.get_response('Hello')
second_response = self.chatbot.get_response('Hello')
self.assertEqual(first_response.text, 'Hi')
self.assertEqual(second_response.text, 'Hi, how are you?')
<commit_msg>Put the calculated value on the right<commit_after>
|
from tests.base_case import ChatBotMongoTestCase
class RepetitiveResponseFilterTestCase(ChatBotMongoTestCase):
"""
Test case for the RepetitiveResponseFilter class.
"""
def test_filter_selection(self):
"""
Test that repetitive responses are filtered out of the results.
"""
from chatterbot.filters import RepetitiveResponseFilter
from chatterbot.trainers import ListTrainer
self.chatbot.filters = (RepetitiveResponseFilter(), )
self.chatbot.set_trainer(ListTrainer, **self.get_kwargs())
self.chatbot.train([
'Hello',
'Hi',
'Hello',
'Hi',
'Hello',
'Hi, how are you?',
'I am good.'
])
first_response = self.chatbot.get_response('Hello')
second_response = self.chatbot.get_response('Hello')
self.assertEqual('Hi', first_response.text)
self.assertEqual('Hi, how are you?', second_response.text)
|
from tests.base_case import ChatBotMongoTestCase
class RepetitiveResponseFilterTestCase(ChatBotMongoTestCase):
"""
Test case for the RepetitiveResponseFilter class.
"""
def test_filter_selection(self):
"""
Test that repetitive responses are filtered out of the results.
"""
from chatterbot.filters import RepetitiveResponseFilter
from chatterbot.trainers import ListTrainer
self.chatbot.filters = (RepetitiveResponseFilter(), )
self.chatbot.set_trainer(ListTrainer, **self.get_kwargs())
self.chatbot.train([
'Hello',
'Hi',
'Hello',
'Hi',
'Hello',
'Hi, how are you?',
'I am good.'
])
first_response = self.chatbot.get_response('Hello')
second_response = self.chatbot.get_response('Hello')
self.assertEqual(first_response.text, 'Hi')
self.assertEqual(second_response.text, 'Hi, how are you?')
Put the calculated value on the rightfrom tests.base_case import ChatBotMongoTestCase
class RepetitiveResponseFilterTestCase(ChatBotMongoTestCase):
"""
Test case for the RepetitiveResponseFilter class.
"""
def test_filter_selection(self):
"""
Test that repetitive responses are filtered out of the results.
"""
from chatterbot.filters import RepetitiveResponseFilter
from chatterbot.trainers import ListTrainer
self.chatbot.filters = (RepetitiveResponseFilter(), )
self.chatbot.set_trainer(ListTrainer, **self.get_kwargs())
self.chatbot.train([
'Hello',
'Hi',
'Hello',
'Hi',
'Hello',
'Hi, how are you?',
'I am good.'
])
first_response = self.chatbot.get_response('Hello')
second_response = self.chatbot.get_response('Hello')
self.assertEqual('Hi', first_response.text)
self.assertEqual('Hi, how are you?', second_response.text)
|
<commit_before>from tests.base_case import ChatBotMongoTestCase
class RepetitiveResponseFilterTestCase(ChatBotMongoTestCase):
"""
Test case for the RepetitiveResponseFilter class.
"""
def test_filter_selection(self):
"""
Test that repetitive responses are filtered out of the results.
"""
from chatterbot.filters import RepetitiveResponseFilter
from chatterbot.trainers import ListTrainer
self.chatbot.filters = (RepetitiveResponseFilter(), )
self.chatbot.set_trainer(ListTrainer, **self.get_kwargs())
self.chatbot.train([
'Hello',
'Hi',
'Hello',
'Hi',
'Hello',
'Hi, how are you?',
'I am good.'
])
first_response = self.chatbot.get_response('Hello')
second_response = self.chatbot.get_response('Hello')
self.assertEqual(first_response.text, 'Hi')
self.assertEqual(second_response.text, 'Hi, how are you?')
<commit_msg>Put the calculated value on the right<commit_after>from tests.base_case import ChatBotMongoTestCase
class RepetitiveResponseFilterTestCase(ChatBotMongoTestCase):
"""
Test case for the RepetitiveResponseFilter class.
"""
def test_filter_selection(self):
"""
Test that repetitive responses are filtered out of the results.
"""
from chatterbot.filters import RepetitiveResponseFilter
from chatterbot.trainers import ListTrainer
self.chatbot.filters = (RepetitiveResponseFilter(), )
self.chatbot.set_trainer(ListTrainer, **self.get_kwargs())
self.chatbot.train([
'Hello',
'Hi',
'Hello',
'Hi',
'Hello',
'Hi, how are you?',
'I am good.'
])
first_response = self.chatbot.get_response('Hello')
second_response = self.chatbot.get_response('Hello')
self.assertEqual('Hi', first_response.text)
self.assertEqual('Hi, how are you?', second_response.text)
|
7abfa8d52565855cfa1c55c0622b5d599cd04c2f
|
spiff/subscription/management/commands/process_subscriptions.py
|
spiff/subscription/management/commands/process_subscriptions.py
|
from django.core.management import BaseCommand
from spiff.payment.models import Invoice
from spiff.subscription.models import SubscriptionPlan
from spiff.api.plugins import find_api_classes
from spiff.membership.utils import monthRange
from spiff.membership.models import Member, RankLineItem
class Command(BaseCommand):
help = 'Bills active members for the month'
def handle(self, *args, **options):
startOfMonth, endOfMonth = monthRange()
lineItems = {}
for planCls in find_api_classes('models', SubscriptionPlan):
plans = planCls.objects.all()
for plan in plans:
for subscription in plan.subscriptions.filter(active=True):
if subscription.user not in lineItems:
lineItems[subscription.user] = {'subscriptions': [], 'lineItems': []}
items = plan.process(subscription)
if len(items) > 0 and subscription not in lineItems[subscription.user]['subscriptions']:
lineItems[subscription.user]['subscriptions'].append(subscription)
lineItems[subscription.user]['lineItems'] += items
for user, data in lineItems.iteritems():
invoice = Invoice.bundleLineItems(user, endOfMonth, data['lineItems'])
if invoice:
print "Created invoice", invoice
for subscription in data['subscriptions']:
subscription.save()
|
from django.core.management import BaseCommand
from spiff.payment.models import Invoice
from spiff.subscription.models import SubscriptionPlan
from spiff.api.plugins import find_api_classes
from spiff.membership.utils import monthRange
from spiff.membership.models import Member, RankLineItem
import stripe
class Command(BaseCommand):
help = 'Bills active members for the month'
def handle(self, *args, **options):
startOfMonth, endOfMonth = monthRange()
lineItems = {}
for planCls in find_api_classes('models', SubscriptionPlan):
plans = planCls.objects.all()
for plan in plans:
for subscription in plan.subscriptions.filter(active=True):
if subscription.user not in lineItems:
lineItems[subscription.user] = {'subscriptions': [], 'lineItems': []}
items = plan.process(subscription)
if len(items) > 0 and subscription not in lineItems[subscription.user]['subscriptions']:
lineItems[subscription.user]['subscriptions'].append(subscription)
lineItems[subscription.user]['lineItems'] += items
invoices = []
for user, data in lineItems.iteritems():
invoice = Invoice.bundleLineItems(user, endOfMonth, data['lineItems'])
if invoice:
print "Created invoice", invoice
invoices.append(invoice)
for subscription in data['subscriptions']:
subscription.save()
for invoice in invoices:
invoice.draft = False
invoice.save()
|
Set invoices to non-draft when done
|
Set invoices to non-draft when done
|
Python
|
agpl-3.0
|
SYNHAK/spiff,SYNHAK/spiff,SYNHAK/spiff
|
from django.core.management import BaseCommand
from spiff.payment.models import Invoice
from spiff.subscription.models import SubscriptionPlan
from spiff.api.plugins import find_api_classes
from spiff.membership.utils import monthRange
from spiff.membership.models import Member, RankLineItem
class Command(BaseCommand):
help = 'Bills active members for the month'
def handle(self, *args, **options):
startOfMonth, endOfMonth = monthRange()
lineItems = {}
for planCls in find_api_classes('models', SubscriptionPlan):
plans = planCls.objects.all()
for plan in plans:
for subscription in plan.subscriptions.filter(active=True):
if subscription.user not in lineItems:
lineItems[subscription.user] = {'subscriptions': [], 'lineItems': []}
items = plan.process(subscription)
if len(items) > 0 and subscription not in lineItems[subscription.user]['subscriptions']:
lineItems[subscription.user]['subscriptions'].append(subscription)
lineItems[subscription.user]['lineItems'] += items
for user, data in lineItems.iteritems():
invoice = Invoice.bundleLineItems(user, endOfMonth, data['lineItems'])
if invoice:
print "Created invoice", invoice
for subscription in data['subscriptions']:
subscription.save()
Set invoices to non-draft when done
|
from django.core.management import BaseCommand
from spiff.payment.models import Invoice
from spiff.subscription.models import SubscriptionPlan
from spiff.api.plugins import find_api_classes
from spiff.membership.utils import monthRange
from spiff.membership.models import Member, RankLineItem
import stripe
class Command(BaseCommand):
help = 'Bills active members for the month'
def handle(self, *args, **options):
startOfMonth, endOfMonth = monthRange()
lineItems = {}
for planCls in find_api_classes('models', SubscriptionPlan):
plans = planCls.objects.all()
for plan in plans:
for subscription in plan.subscriptions.filter(active=True):
if subscription.user not in lineItems:
lineItems[subscription.user] = {'subscriptions': [], 'lineItems': []}
items = plan.process(subscription)
if len(items) > 0 and subscription not in lineItems[subscription.user]['subscriptions']:
lineItems[subscription.user]['subscriptions'].append(subscription)
lineItems[subscription.user]['lineItems'] += items
invoices = []
for user, data in lineItems.iteritems():
invoice = Invoice.bundleLineItems(user, endOfMonth, data['lineItems'])
if invoice:
print "Created invoice", invoice
invoices.append(invoice)
for subscription in data['subscriptions']:
subscription.save()
for invoice in invoices:
invoice.draft = False
invoice.save()
|
<commit_before>from django.core.management import BaseCommand
from spiff.payment.models import Invoice
from spiff.subscription.models import SubscriptionPlan
from spiff.api.plugins import find_api_classes
from spiff.membership.utils import monthRange
from spiff.membership.models import Member, RankLineItem
class Command(BaseCommand):
help = 'Bills active members for the month'
def handle(self, *args, **options):
startOfMonth, endOfMonth = monthRange()
lineItems = {}
for planCls in find_api_classes('models', SubscriptionPlan):
plans = planCls.objects.all()
for plan in plans:
for subscription in plan.subscriptions.filter(active=True):
if subscription.user not in lineItems:
lineItems[subscription.user] = {'subscriptions': [], 'lineItems': []}
items = plan.process(subscription)
if len(items) > 0 and subscription not in lineItems[subscription.user]['subscriptions']:
lineItems[subscription.user]['subscriptions'].append(subscription)
lineItems[subscription.user]['lineItems'] += items
for user, data in lineItems.iteritems():
invoice = Invoice.bundleLineItems(user, endOfMonth, data['lineItems'])
if invoice:
print "Created invoice", invoice
for subscription in data['subscriptions']:
subscription.save()
<commit_msg>Set invoices to non-draft when done<commit_after>
|
from django.core.management import BaseCommand
from spiff.payment.models import Invoice
from spiff.subscription.models import SubscriptionPlan
from spiff.api.plugins import find_api_classes
from spiff.membership.utils import monthRange
from spiff.membership.models import Member, RankLineItem
import stripe
class Command(BaseCommand):
help = 'Bills active members for the month'
def handle(self, *args, **options):
startOfMonth, endOfMonth = monthRange()
lineItems = {}
for planCls in find_api_classes('models', SubscriptionPlan):
plans = planCls.objects.all()
for plan in plans:
for subscription in plan.subscriptions.filter(active=True):
if subscription.user not in lineItems:
lineItems[subscription.user] = {'subscriptions': [], 'lineItems': []}
items = plan.process(subscription)
if len(items) > 0 and subscription not in lineItems[subscription.user]['subscriptions']:
lineItems[subscription.user]['subscriptions'].append(subscription)
lineItems[subscription.user]['lineItems'] += items
invoices = []
for user, data in lineItems.iteritems():
invoice = Invoice.bundleLineItems(user, endOfMonth, data['lineItems'])
if invoice:
print "Created invoice", invoice
invoices.append(invoice)
for subscription in data['subscriptions']:
subscription.save()
for invoice in invoices:
invoice.draft = False
invoice.save()
|
from django.core.management import BaseCommand
from spiff.payment.models import Invoice
from spiff.subscription.models import SubscriptionPlan
from spiff.api.plugins import find_api_classes
from spiff.membership.utils import monthRange
from spiff.membership.models import Member, RankLineItem
class Command(BaseCommand):
help = 'Bills active members for the month'
def handle(self, *args, **options):
startOfMonth, endOfMonth = monthRange()
lineItems = {}
for planCls in find_api_classes('models', SubscriptionPlan):
plans = planCls.objects.all()
for plan in plans:
for subscription in plan.subscriptions.filter(active=True):
if subscription.user not in lineItems:
lineItems[subscription.user] = {'subscriptions': [], 'lineItems': []}
items = plan.process(subscription)
if len(items) > 0 and subscription not in lineItems[subscription.user]['subscriptions']:
lineItems[subscription.user]['subscriptions'].append(subscription)
lineItems[subscription.user]['lineItems'] += items
for user, data in lineItems.iteritems():
invoice = Invoice.bundleLineItems(user, endOfMonth, data['lineItems'])
if invoice:
print "Created invoice", invoice
for subscription in data['subscriptions']:
subscription.save()
Set invoices to non-draft when donefrom django.core.management import BaseCommand
from spiff.payment.models import Invoice
from spiff.subscription.models import SubscriptionPlan
from spiff.api.plugins import find_api_classes
from spiff.membership.utils import monthRange
from spiff.membership.models import Member, RankLineItem
import stripe
class Command(BaseCommand):
help = 'Bills active members for the month'
def handle(self, *args, **options):
startOfMonth, endOfMonth = monthRange()
lineItems = {}
for planCls in find_api_classes('models', SubscriptionPlan):
plans = planCls.objects.all()
for plan in plans:
for subscription in plan.subscriptions.filter(active=True):
if subscription.user not in lineItems:
lineItems[subscription.user] = {'subscriptions': [], 'lineItems': []}
items = plan.process(subscription)
if len(items) > 0 and subscription not in lineItems[subscription.user]['subscriptions']:
lineItems[subscription.user]['subscriptions'].append(subscription)
lineItems[subscription.user]['lineItems'] += items
invoices = []
for user, data in lineItems.iteritems():
invoice = Invoice.bundleLineItems(user, endOfMonth, data['lineItems'])
if invoice:
print "Created invoice", invoice
invoices.append(invoice)
for subscription in data['subscriptions']:
subscription.save()
for invoice in invoices:
invoice.draft = False
invoice.save()
|
<commit_before>from django.core.management import BaseCommand
from spiff.payment.models import Invoice
from spiff.subscription.models import SubscriptionPlan
from spiff.api.plugins import find_api_classes
from spiff.membership.utils import monthRange
from spiff.membership.models import Member, RankLineItem
class Command(BaseCommand):
help = 'Bills active members for the month'
def handle(self, *args, **options):
startOfMonth, endOfMonth = monthRange()
lineItems = {}
for planCls in find_api_classes('models', SubscriptionPlan):
plans = planCls.objects.all()
for plan in plans:
for subscription in plan.subscriptions.filter(active=True):
if subscription.user not in lineItems:
lineItems[subscription.user] = {'subscriptions': [], 'lineItems': []}
items = plan.process(subscription)
if len(items) > 0 and subscription not in lineItems[subscription.user]['subscriptions']:
lineItems[subscription.user]['subscriptions'].append(subscription)
lineItems[subscription.user]['lineItems'] += items
for user, data in lineItems.iteritems():
invoice = Invoice.bundleLineItems(user, endOfMonth, data['lineItems'])
if invoice:
print "Created invoice", invoice
for subscription in data['subscriptions']:
subscription.save()
<commit_msg>Set invoices to non-draft when done<commit_after>from django.core.management import BaseCommand
from spiff.payment.models import Invoice
from spiff.subscription.models import SubscriptionPlan
from spiff.api.plugins import find_api_classes
from spiff.membership.utils import monthRange
from spiff.membership.models import Member, RankLineItem
import stripe
class Command(BaseCommand):
help = 'Bills active members for the month'
def handle(self, *args, **options):
startOfMonth, endOfMonth = monthRange()
lineItems = {}
for planCls in find_api_classes('models', SubscriptionPlan):
plans = planCls.objects.all()
for plan in plans:
for subscription in plan.subscriptions.filter(active=True):
if subscription.user not in lineItems:
lineItems[subscription.user] = {'subscriptions': [], 'lineItems': []}
items = plan.process(subscription)
if len(items) > 0 and subscription not in lineItems[subscription.user]['subscriptions']:
lineItems[subscription.user]['subscriptions'].append(subscription)
lineItems[subscription.user]['lineItems'] += items
invoices = []
for user, data in lineItems.iteritems():
invoice = Invoice.bundleLineItems(user, endOfMonth, data['lineItems'])
if invoice:
print "Created invoice", invoice
invoices.append(invoice)
for subscription in data['subscriptions']:
subscription.save()
for invoice in invoices:
invoice.draft = False
invoice.save()
|
f9a4ae44f33279632396716fbd808e80773f0a71
|
widelanguagedemo/assets.py
|
widelanguagedemo/assets.py
|
# -*- coding: utf-8 -*-
from flask.ext.assets import Bundle, Environment
css = Bundle(
"libs/bootstrap/dist/css/bootstrap.css",
"css/style.css",
filters="cssmin",
output="public/css/common.css"
)
js = Bundle(
"libs/jQuery/dist/jquery.js",
"libs/bootstrap/dist/js/bootstrap.js",
"libs/typeahead.bundle.js",
"libs/handlebars/handlebars.js",
"js/plugins.js",
filters='jsmin',
output="public/js/common.js"
)
assets = Environment()
assets.register("js_all", js)
assets.register("css_all", css)
|
# -*- coding: utf-8 -*-
from flask.ext.assets import Bundle, Environment
css = Bundle(
"libs/bootstrap/dist/css/bootstrap.css",
"css/style.css",
filters="cssmin",
output="public/css/common.css"
)
js = Bundle(
"libs/jQuery/dist/jquery.js",
"libs/bootstrap/dist/js/bootstrap.js",
"libs/typeahead.bundle.js",
"libs/handlebars/handlebars.js",
"js/plugins.js",
filters='rjsmin',
output="public/js/common.js"
)
assets = Environment()
assets.register("js_all", js)
assets.register("css_all", css)
|
Work around a js minification bug.
|
Work around a js minification bug.
|
Python
|
bsd-3-clause
|
larsyencken/wide-language-demo,larsyencken/wide-language-demo,larsyencken/wide-language-demo,larsyencken/wide-language-demo,larsyencken/wide-language-demo
|
# -*- coding: utf-8 -*-
from flask.ext.assets import Bundle, Environment
css = Bundle(
"libs/bootstrap/dist/css/bootstrap.css",
"css/style.css",
filters="cssmin",
output="public/css/common.css"
)
js = Bundle(
"libs/jQuery/dist/jquery.js",
"libs/bootstrap/dist/js/bootstrap.js",
"libs/typeahead.bundle.js",
"libs/handlebars/handlebars.js",
"js/plugins.js",
filters='jsmin',
output="public/js/common.js"
)
assets = Environment()
assets.register("js_all", js)
assets.register("css_all", css)
Work around a js minification bug.
|
# -*- coding: utf-8 -*-
from flask.ext.assets import Bundle, Environment
css = Bundle(
"libs/bootstrap/dist/css/bootstrap.css",
"css/style.css",
filters="cssmin",
output="public/css/common.css"
)
js = Bundle(
"libs/jQuery/dist/jquery.js",
"libs/bootstrap/dist/js/bootstrap.js",
"libs/typeahead.bundle.js",
"libs/handlebars/handlebars.js",
"js/plugins.js",
filters='rjsmin',
output="public/js/common.js"
)
assets = Environment()
assets.register("js_all", js)
assets.register("css_all", css)
|
<commit_before># -*- coding: utf-8 -*-
from flask.ext.assets import Bundle, Environment
css = Bundle(
"libs/bootstrap/dist/css/bootstrap.css",
"css/style.css",
filters="cssmin",
output="public/css/common.css"
)
js = Bundle(
"libs/jQuery/dist/jquery.js",
"libs/bootstrap/dist/js/bootstrap.js",
"libs/typeahead.bundle.js",
"libs/handlebars/handlebars.js",
"js/plugins.js",
filters='jsmin',
output="public/js/common.js"
)
assets = Environment()
assets.register("js_all", js)
assets.register("css_all", css)
<commit_msg>Work around a js minification bug.<commit_after>
|
# -*- coding: utf-8 -*-
from flask.ext.assets import Bundle, Environment
css = Bundle(
"libs/bootstrap/dist/css/bootstrap.css",
"css/style.css",
filters="cssmin",
output="public/css/common.css"
)
js = Bundle(
"libs/jQuery/dist/jquery.js",
"libs/bootstrap/dist/js/bootstrap.js",
"libs/typeahead.bundle.js",
"libs/handlebars/handlebars.js",
"js/plugins.js",
filters='rjsmin',
output="public/js/common.js"
)
assets = Environment()
assets.register("js_all", js)
assets.register("css_all", css)
|
# -*- coding: utf-8 -*-
from flask.ext.assets import Bundle, Environment
css = Bundle(
"libs/bootstrap/dist/css/bootstrap.css",
"css/style.css",
filters="cssmin",
output="public/css/common.css"
)
js = Bundle(
"libs/jQuery/dist/jquery.js",
"libs/bootstrap/dist/js/bootstrap.js",
"libs/typeahead.bundle.js",
"libs/handlebars/handlebars.js",
"js/plugins.js",
filters='jsmin',
output="public/js/common.js"
)
assets = Environment()
assets.register("js_all", js)
assets.register("css_all", css)
Work around a js minification bug.# -*- coding: utf-8 -*-
from flask.ext.assets import Bundle, Environment
css = Bundle(
"libs/bootstrap/dist/css/bootstrap.css",
"css/style.css",
filters="cssmin",
output="public/css/common.css"
)
js = Bundle(
"libs/jQuery/dist/jquery.js",
"libs/bootstrap/dist/js/bootstrap.js",
"libs/typeahead.bundle.js",
"libs/handlebars/handlebars.js",
"js/plugins.js",
filters='rjsmin',
output="public/js/common.js"
)
assets = Environment()
assets.register("js_all", js)
assets.register("css_all", css)
|
<commit_before># -*- coding: utf-8 -*-
from flask.ext.assets import Bundle, Environment
css = Bundle(
"libs/bootstrap/dist/css/bootstrap.css",
"css/style.css",
filters="cssmin",
output="public/css/common.css"
)
js = Bundle(
"libs/jQuery/dist/jquery.js",
"libs/bootstrap/dist/js/bootstrap.js",
"libs/typeahead.bundle.js",
"libs/handlebars/handlebars.js",
"js/plugins.js",
filters='jsmin',
output="public/js/common.js"
)
assets = Environment()
assets.register("js_all", js)
assets.register("css_all", css)
<commit_msg>Work around a js minification bug.<commit_after># -*- coding: utf-8 -*-
from flask.ext.assets import Bundle, Environment
css = Bundle(
"libs/bootstrap/dist/css/bootstrap.css",
"css/style.css",
filters="cssmin",
output="public/css/common.css"
)
js = Bundle(
"libs/jQuery/dist/jquery.js",
"libs/bootstrap/dist/js/bootstrap.js",
"libs/typeahead.bundle.js",
"libs/handlebars/handlebars.js",
"js/plugins.js",
filters='rjsmin',
output="public/js/common.js"
)
assets = Environment()
assets.register("js_all", js)
assets.register("css_all", css)
|
2621e71926942113e8c9c85fe48d7448a790f916
|
bluebottle/bb_organizations/serializers.py
|
bluebottle/bb_organizations/serializers.py
|
from rest_framework import serializers
from bluebottle.utils.model_dispatcher import get_organization_model, get_organizationmember_model
ORGANIZATION_MODEL = get_organization_model()
MEMBER_MODEL = get_organizationmember_model()
ORGANIZATION_FIELDS = ( 'id', 'name', 'slug', 'address_line1', 'address_line2',
'city', 'state', 'country', 'postal_code', 'phone_number',
'email', 'person')
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = ORGANIZATION_MODEL
fields = ORGANIZATION_FIELDS
class ManageOrganizationSerializer(OrganizationSerializer):
slug = serializers.SlugField(required=False)
name = serializers.CharField(required=True)
email = serializers.EmailField(required=False)
class Meta:
model = ORGANIZATION_MODEL
fields = ORGANIZATION_FIELDS + ('partner_organizations',
'created', 'updated')
|
from rest_framework import serializers
from bluebottle.utils.model_dispatcher import get_organization_model, get_organizationmember_model
ORGANIZATION_MODEL = get_organization_model()
MEMBER_MODEL = get_organizationmember_model()
ORGANIZATION_FIELDS = ( 'id', 'name', 'slug', 'address_line1', 'address_line2',
'city', 'state', 'country', 'postal_code', 'phone_number',
'email')
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = ORGANIZATION_MODEL
fields = ORGANIZATION_FIELDS
class ManageOrganizationSerializer(OrganizationSerializer):
slug = serializers.SlugField(required=False)
name = serializers.CharField(required=True)
email = serializers.EmailField(required=False)
class Meta:
model = ORGANIZATION_MODEL
fields = ORGANIZATION_FIELDS + ('partner_organizations',
'created', 'updated')
|
Remove person from organization serializer
|
Remove person from organization serializer
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle
|
from rest_framework import serializers
from bluebottle.utils.model_dispatcher import get_organization_model, get_organizationmember_model
ORGANIZATION_MODEL = get_organization_model()
MEMBER_MODEL = get_organizationmember_model()
ORGANIZATION_FIELDS = ( 'id', 'name', 'slug', 'address_line1', 'address_line2',
'city', 'state', 'country', 'postal_code', 'phone_number',
'email', 'person')
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = ORGANIZATION_MODEL
fields = ORGANIZATION_FIELDS
class ManageOrganizationSerializer(OrganizationSerializer):
slug = serializers.SlugField(required=False)
name = serializers.CharField(required=True)
email = serializers.EmailField(required=False)
class Meta:
model = ORGANIZATION_MODEL
fields = ORGANIZATION_FIELDS + ('partner_organizations',
'created', 'updated')
Remove person from organization serializer
|
from rest_framework import serializers
from bluebottle.utils.model_dispatcher import get_organization_model, get_organizationmember_model
ORGANIZATION_MODEL = get_organization_model()
MEMBER_MODEL = get_organizationmember_model()
ORGANIZATION_FIELDS = ( 'id', 'name', 'slug', 'address_line1', 'address_line2',
'city', 'state', 'country', 'postal_code', 'phone_number',
'email')
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = ORGANIZATION_MODEL
fields = ORGANIZATION_FIELDS
class ManageOrganizationSerializer(OrganizationSerializer):
slug = serializers.SlugField(required=False)
name = serializers.CharField(required=True)
email = serializers.EmailField(required=False)
class Meta:
model = ORGANIZATION_MODEL
fields = ORGANIZATION_FIELDS + ('partner_organizations',
'created', 'updated')
|
<commit_before>from rest_framework import serializers
from bluebottle.utils.model_dispatcher import get_organization_model, get_organizationmember_model
ORGANIZATION_MODEL = get_organization_model()
MEMBER_MODEL = get_organizationmember_model()
ORGANIZATION_FIELDS = ( 'id', 'name', 'slug', 'address_line1', 'address_line2',
'city', 'state', 'country', 'postal_code', 'phone_number',
'email', 'person')
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = ORGANIZATION_MODEL
fields = ORGANIZATION_FIELDS
class ManageOrganizationSerializer(OrganizationSerializer):
slug = serializers.SlugField(required=False)
name = serializers.CharField(required=True)
email = serializers.EmailField(required=False)
class Meta:
model = ORGANIZATION_MODEL
fields = ORGANIZATION_FIELDS + ('partner_organizations',
'created', 'updated')
<commit_msg>Remove person from organization serializer<commit_after>
|
from rest_framework import serializers
from bluebottle.utils.model_dispatcher import get_organization_model, get_organizationmember_model
ORGANIZATION_MODEL = get_organization_model()
MEMBER_MODEL = get_organizationmember_model()
ORGANIZATION_FIELDS = ( 'id', 'name', 'slug', 'address_line1', 'address_line2',
'city', 'state', 'country', 'postal_code', 'phone_number',
'email')
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = ORGANIZATION_MODEL
fields = ORGANIZATION_FIELDS
class ManageOrganizationSerializer(OrganizationSerializer):
slug = serializers.SlugField(required=False)
name = serializers.CharField(required=True)
email = serializers.EmailField(required=False)
class Meta:
model = ORGANIZATION_MODEL
fields = ORGANIZATION_FIELDS + ('partner_organizations',
'created', 'updated')
|
from rest_framework import serializers
from bluebottle.utils.model_dispatcher import get_organization_model, get_organizationmember_model
ORGANIZATION_MODEL = get_organization_model()
MEMBER_MODEL = get_organizationmember_model()
ORGANIZATION_FIELDS = ( 'id', 'name', 'slug', 'address_line1', 'address_line2',
'city', 'state', 'country', 'postal_code', 'phone_number',
'email', 'person')
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = ORGANIZATION_MODEL
fields = ORGANIZATION_FIELDS
class ManageOrganizationSerializer(OrganizationSerializer):
slug = serializers.SlugField(required=False)
name = serializers.CharField(required=True)
email = serializers.EmailField(required=False)
class Meta:
model = ORGANIZATION_MODEL
fields = ORGANIZATION_FIELDS + ('partner_organizations',
'created', 'updated')
Remove person from organization serializerfrom rest_framework import serializers
from bluebottle.utils.model_dispatcher import get_organization_model, get_organizationmember_model
ORGANIZATION_MODEL = get_organization_model()
MEMBER_MODEL = get_organizationmember_model()
ORGANIZATION_FIELDS = ( 'id', 'name', 'slug', 'address_line1', 'address_line2',
'city', 'state', 'country', 'postal_code', 'phone_number',
'email')
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = ORGANIZATION_MODEL
fields = ORGANIZATION_FIELDS
class ManageOrganizationSerializer(OrganizationSerializer):
slug = serializers.SlugField(required=False)
name = serializers.CharField(required=True)
email = serializers.EmailField(required=False)
class Meta:
model = ORGANIZATION_MODEL
fields = ORGANIZATION_FIELDS + ('partner_organizations',
'created', 'updated')
|
<commit_before>from rest_framework import serializers
from bluebottle.utils.model_dispatcher import get_organization_model, get_organizationmember_model
ORGANIZATION_MODEL = get_organization_model()
MEMBER_MODEL = get_organizationmember_model()
ORGANIZATION_FIELDS = ( 'id', 'name', 'slug', 'address_line1', 'address_line2',
'city', 'state', 'country', 'postal_code', 'phone_number',
'email', 'person')
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = ORGANIZATION_MODEL
fields = ORGANIZATION_FIELDS
class ManageOrganizationSerializer(OrganizationSerializer):
slug = serializers.SlugField(required=False)
name = serializers.CharField(required=True)
email = serializers.EmailField(required=False)
class Meta:
model = ORGANIZATION_MODEL
fields = ORGANIZATION_FIELDS + ('partner_organizations',
'created', 'updated')
<commit_msg>Remove person from organization serializer<commit_after>from rest_framework import serializers
from bluebottle.utils.model_dispatcher import get_organization_model, get_organizationmember_model
ORGANIZATION_MODEL = get_organization_model()
MEMBER_MODEL = get_organizationmember_model()
ORGANIZATION_FIELDS = ( 'id', 'name', 'slug', 'address_line1', 'address_line2',
'city', 'state', 'country', 'postal_code', 'phone_number',
'email')
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = ORGANIZATION_MODEL
fields = ORGANIZATION_FIELDS
class ManageOrganizationSerializer(OrganizationSerializer):
slug = serializers.SlugField(required=False)
name = serializers.CharField(required=True)
email = serializers.EmailField(required=False)
class Meta:
model = ORGANIZATION_MODEL
fields = ORGANIZATION_FIELDS + ('partner_organizations',
'created', 'updated')
|
a8066d91dc0d1e37514de0623bf382b55abcf4c7
|
esridump/cli.py
|
esridump/cli.py
|
import argparse
import simplejson as json
from esridump.dumper import EsriDumper
def main():
parser = argparse.ArgumentParser()
parser.add_argument("url")
parser.add_argument("outfile", type=argparse.FileType('w'))
parser.add_argument("--jsonlines", action='store_true', default=False)
args = parser.parse_args()
dumper = EsriDumper(args.url)
if not args.jsonlines:
args.outfile.write('{"type":"FeatureCollection","features":[\n')
for feature in dumper.iter():
args.outfile.write(json.dumps(feature))
if not args.jsonlines:
args.outfile.write(',')
args.outfile.write('\n')
if not args.jsonlines:
# args.outfile.seek(-2)
args.outfile.write(']}')
if __name__ == '__main__':
main()
|
import argparse
import simplejson as json
from esridump.dumper import EsriDumper
def main():
parser = argparse.ArgumentParser()
parser.add_argument("url")
parser.add_argument("outfile", type=argparse.FileType('w'))
parser.add_argument("--jsonlines", action='store_true', default=False)
args = parser.parse_args()
dumper = EsriDumper(args.url)
if args.jsonlines:
for feature in dumper.iter():
args.outfile.write(json.dumps(feature))
args.outfile.write('\n')
else:
args.outfile.write('{"type":"FeatureCollection","features":[\n')
feature_iter = dumper.iter()
try:
feature = feature_iter.next()
while True:
args.outfile.write(json.dumps(feature))
feature = feature_iter.next()
args.outfile.write(',\n')
except StopIteration:
args.outfile.write('\n')
args.outfile.write(']}')
if __name__ == '__main__':
main()
|
Remove the extra comma at the end.
|
Remove the extra comma at the end.
Fixes #7
|
Python
|
mit
|
iandees/esri-dump,openaddresses/pyesridump
|
import argparse
import simplejson as json
from esridump.dumper import EsriDumper
def main():
parser = argparse.ArgumentParser()
parser.add_argument("url")
parser.add_argument("outfile", type=argparse.FileType('w'))
parser.add_argument("--jsonlines", action='store_true', default=False)
args = parser.parse_args()
dumper = EsriDumper(args.url)
if not args.jsonlines:
args.outfile.write('{"type":"FeatureCollection","features":[\n')
for feature in dumper.iter():
args.outfile.write(json.dumps(feature))
if not args.jsonlines:
args.outfile.write(',')
args.outfile.write('\n')
if not args.jsonlines:
# args.outfile.seek(-2)
args.outfile.write(']}')
if __name__ == '__main__':
main()
Remove the extra comma at the end.
Fixes #7
|
import argparse
import simplejson as json
from esridump.dumper import EsriDumper
def main():
parser = argparse.ArgumentParser()
parser.add_argument("url")
parser.add_argument("outfile", type=argparse.FileType('w'))
parser.add_argument("--jsonlines", action='store_true', default=False)
args = parser.parse_args()
dumper = EsriDumper(args.url)
if args.jsonlines:
for feature in dumper.iter():
args.outfile.write(json.dumps(feature))
args.outfile.write('\n')
else:
args.outfile.write('{"type":"FeatureCollection","features":[\n')
feature_iter = dumper.iter()
try:
feature = feature_iter.next()
while True:
args.outfile.write(json.dumps(feature))
feature = feature_iter.next()
args.outfile.write(',\n')
except StopIteration:
args.outfile.write('\n')
args.outfile.write(']}')
if __name__ == '__main__':
main()
|
<commit_before>import argparse
import simplejson as json
from esridump.dumper import EsriDumper
def main():
parser = argparse.ArgumentParser()
parser.add_argument("url")
parser.add_argument("outfile", type=argparse.FileType('w'))
parser.add_argument("--jsonlines", action='store_true', default=False)
args = parser.parse_args()
dumper = EsriDumper(args.url)
if not args.jsonlines:
args.outfile.write('{"type":"FeatureCollection","features":[\n')
for feature in dumper.iter():
args.outfile.write(json.dumps(feature))
if not args.jsonlines:
args.outfile.write(',')
args.outfile.write('\n')
if not args.jsonlines:
# args.outfile.seek(-2)
args.outfile.write(']}')
if __name__ == '__main__':
main()
<commit_msg>Remove the extra comma at the end.
Fixes #7<commit_after>
|
import argparse
import simplejson as json
from esridump.dumper import EsriDumper
def main():
parser = argparse.ArgumentParser()
parser.add_argument("url")
parser.add_argument("outfile", type=argparse.FileType('w'))
parser.add_argument("--jsonlines", action='store_true', default=False)
args = parser.parse_args()
dumper = EsriDumper(args.url)
if args.jsonlines:
for feature in dumper.iter():
args.outfile.write(json.dumps(feature))
args.outfile.write('\n')
else:
args.outfile.write('{"type":"FeatureCollection","features":[\n')
feature_iter = dumper.iter()
try:
feature = feature_iter.next()
while True:
args.outfile.write(json.dumps(feature))
feature = feature_iter.next()
args.outfile.write(',\n')
except StopIteration:
args.outfile.write('\n')
args.outfile.write(']}')
if __name__ == '__main__':
main()
|
import argparse
import simplejson as json
from esridump.dumper import EsriDumper
def main():
parser = argparse.ArgumentParser()
parser.add_argument("url")
parser.add_argument("outfile", type=argparse.FileType('w'))
parser.add_argument("--jsonlines", action='store_true', default=False)
args = parser.parse_args()
dumper = EsriDumper(args.url)
if not args.jsonlines:
args.outfile.write('{"type":"FeatureCollection","features":[\n')
for feature in dumper.iter():
args.outfile.write(json.dumps(feature))
if not args.jsonlines:
args.outfile.write(',')
args.outfile.write('\n')
if not args.jsonlines:
# args.outfile.seek(-2)
args.outfile.write(']}')
if __name__ == '__main__':
main()
Remove the extra comma at the end.
Fixes #7import argparse
import simplejson as json
from esridump.dumper import EsriDumper
def main():
parser = argparse.ArgumentParser()
parser.add_argument("url")
parser.add_argument("outfile", type=argparse.FileType('w'))
parser.add_argument("--jsonlines", action='store_true', default=False)
args = parser.parse_args()
dumper = EsriDumper(args.url)
if args.jsonlines:
for feature in dumper.iter():
args.outfile.write(json.dumps(feature))
args.outfile.write('\n')
else:
args.outfile.write('{"type":"FeatureCollection","features":[\n')
feature_iter = dumper.iter()
try:
feature = feature_iter.next()
while True:
args.outfile.write(json.dumps(feature))
feature = feature_iter.next()
args.outfile.write(',\n')
except StopIteration:
args.outfile.write('\n')
args.outfile.write(']}')
if __name__ == '__main__':
main()
|
<commit_before>import argparse
import simplejson as json
from esridump.dumper import EsriDumper
def main():
parser = argparse.ArgumentParser()
parser.add_argument("url")
parser.add_argument("outfile", type=argparse.FileType('w'))
parser.add_argument("--jsonlines", action='store_true', default=False)
args = parser.parse_args()
dumper = EsriDumper(args.url)
if not args.jsonlines:
args.outfile.write('{"type":"FeatureCollection","features":[\n')
for feature in dumper.iter():
args.outfile.write(json.dumps(feature))
if not args.jsonlines:
args.outfile.write(',')
args.outfile.write('\n')
if not args.jsonlines:
# args.outfile.seek(-2)
args.outfile.write(']}')
if __name__ == '__main__':
main()
<commit_msg>Remove the extra comma at the end.
Fixes #7<commit_after>import argparse
import simplejson as json
from esridump.dumper import EsriDumper
def main():
parser = argparse.ArgumentParser()
parser.add_argument("url")
parser.add_argument("outfile", type=argparse.FileType('w'))
parser.add_argument("--jsonlines", action='store_true', default=False)
args = parser.parse_args()
dumper = EsriDumper(args.url)
if args.jsonlines:
for feature in dumper.iter():
args.outfile.write(json.dumps(feature))
args.outfile.write('\n')
else:
args.outfile.write('{"type":"FeatureCollection","features":[\n')
feature_iter = dumper.iter()
try:
feature = feature_iter.next()
while True:
args.outfile.write(json.dumps(feature))
feature = feature_iter.next()
args.outfile.write(',\n')
except StopIteration:
args.outfile.write('\n')
args.outfile.write(']}')
if __name__ == '__main__':
main()
|
eb0714767cf5c0fd89ff4e50e22445a5e436f94c
|
iopath/tabular/tabular_io.py
|
iopath/tabular/tabular_io.py
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
from typing import Any, Iterable
from iopath.common.file_io import PathHandler
class TabularUriParser:
def parse_uri(self, uri: str) -> None:
pass
class TabularPathHandler(PathHandler):
def _opent(
self, path: str, mode: str = "r", buffering: int = 32, **kwargs: Any
) -> Iterable[Any]:
assert mode == "r"
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
from typing import Any
from iopath.common.file_io import PathHandler, TabularIO
class TabularUriParser:
def parse_uri(self, uri: str) -> None:
pass
class TabularPathHandler(PathHandler):
def _opent(
self, path: str, mode: str = "r", buffering: int = 32, **kwargs: Any
) -> TabularIO:
assert mode == "r"
|
Update type signature of AIRStorePathHandler.opent()
|
Update type signature of AIRStorePathHandler.opent()
Summary:
The previous diff updated the type signature of the
`PathHandler.opent()` method to return a custom context manager. Here,
we update the return type of the overriden `AIRStorePathHandler.opent()`
method to return an implementation of the `PathHandlerContext` protocol,
namely the `AIRStoreRowDataLoader` instead of `Iterable[Any]` to allow
Pyre to carry out static type checking.
Reviewed By: mackorone
Differential Revision: D33833561
fbshipit-source-id: f642110645b147a955f4375fc24d4c29cdca6f26
|
Python
|
mit
|
facebookresearch/iopath,facebookresearch/iopath
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
from typing import Any, Iterable
from iopath.common.file_io import PathHandler
class TabularUriParser:
def parse_uri(self, uri: str) -> None:
pass
class TabularPathHandler(PathHandler):
def _opent(
self, path: str, mode: str = "r", buffering: int = 32, **kwargs: Any
) -> Iterable[Any]:
assert mode == "r"
Update type signature of AIRStorePathHandler.opent()
Summary:
The previous diff updated the type signature of the
`PathHandler.opent()` method to return a custom context manager. Here,
we update the return type of the overriden `AIRStorePathHandler.opent()`
method to return an implementation of the `PathHandlerContext` protocol,
namely the `AIRStoreRowDataLoader` instead of `Iterable[Any]` to allow
Pyre to carry out static type checking.
Reviewed By: mackorone
Differential Revision: D33833561
fbshipit-source-id: f642110645b147a955f4375fc24d4c29cdca6f26
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
from typing import Any
from iopath.common.file_io import PathHandler, TabularIO
class TabularUriParser:
def parse_uri(self, uri: str) -> None:
pass
class TabularPathHandler(PathHandler):
def _opent(
self, path: str, mode: str = "r", buffering: int = 32, **kwargs: Any
) -> TabularIO:
assert mode == "r"
|
<commit_before># Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
from typing import Any, Iterable
from iopath.common.file_io import PathHandler
class TabularUriParser:
def parse_uri(self, uri: str) -> None:
pass
class TabularPathHandler(PathHandler):
def _opent(
self, path: str, mode: str = "r", buffering: int = 32, **kwargs: Any
) -> Iterable[Any]:
assert mode == "r"
<commit_msg>Update type signature of AIRStorePathHandler.opent()
Summary:
The previous diff updated the type signature of the
`PathHandler.opent()` method to return a custom context manager. Here,
we update the return type of the overriden `AIRStorePathHandler.opent()`
method to return an implementation of the `PathHandlerContext` protocol,
namely the `AIRStoreRowDataLoader` instead of `Iterable[Any]` to allow
Pyre to carry out static type checking.
Reviewed By: mackorone
Differential Revision: D33833561
fbshipit-source-id: f642110645b147a955f4375fc24d4c29cdca6f26<commit_after>
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
from typing import Any
from iopath.common.file_io import PathHandler, TabularIO
class TabularUriParser:
def parse_uri(self, uri: str) -> None:
pass
class TabularPathHandler(PathHandler):
def _opent(
self, path: str, mode: str = "r", buffering: int = 32, **kwargs: Any
) -> TabularIO:
assert mode == "r"
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
from typing import Any, Iterable
from iopath.common.file_io import PathHandler
class TabularUriParser:
def parse_uri(self, uri: str) -> None:
pass
class TabularPathHandler(PathHandler):
def _opent(
self, path: str, mode: str = "r", buffering: int = 32, **kwargs: Any
) -> Iterable[Any]:
assert mode == "r"
Update type signature of AIRStorePathHandler.opent()
Summary:
The previous diff updated the type signature of the
`PathHandler.opent()` method to return a custom context manager. Here,
we update the return type of the overriden `AIRStorePathHandler.opent()`
method to return an implementation of the `PathHandlerContext` protocol,
namely the `AIRStoreRowDataLoader` instead of `Iterable[Any]` to allow
Pyre to carry out static type checking.
Reviewed By: mackorone
Differential Revision: D33833561
fbshipit-source-id: f642110645b147a955f4375fc24d4c29cdca6f26# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
from typing import Any
from iopath.common.file_io import PathHandler, TabularIO
class TabularUriParser:
def parse_uri(self, uri: str) -> None:
pass
class TabularPathHandler(PathHandler):
def _opent(
self, path: str, mode: str = "r", buffering: int = 32, **kwargs: Any
) -> TabularIO:
assert mode == "r"
|
<commit_before># Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
from typing import Any, Iterable
from iopath.common.file_io import PathHandler
class TabularUriParser:
def parse_uri(self, uri: str) -> None:
pass
class TabularPathHandler(PathHandler):
def _opent(
self, path: str, mode: str = "r", buffering: int = 32, **kwargs: Any
) -> Iterable[Any]:
assert mode == "r"
<commit_msg>Update type signature of AIRStorePathHandler.opent()
Summary:
The previous diff updated the type signature of the
`PathHandler.opent()` method to return a custom context manager. Here,
we update the return type of the overriden `AIRStorePathHandler.opent()`
method to return an implementation of the `PathHandlerContext` protocol,
namely the `AIRStoreRowDataLoader` instead of `Iterable[Any]` to allow
Pyre to carry out static type checking.
Reviewed By: mackorone
Differential Revision: D33833561
fbshipit-source-id: f642110645b147a955f4375fc24d4c29cdca6f26<commit_after># Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
from typing import Any
from iopath.common.file_io import PathHandler, TabularIO
class TabularUriParser:
def parse_uri(self, uri: str) -> None:
pass
class TabularPathHandler(PathHandler):
def _opent(
self, path: str, mode: str = "r", buffering: int = 32, **kwargs: Any
) -> TabularIO:
assert mode == "r"
|
70da8399e85c8b264663b1807b51a8eda05d7f19
|
plugin/src/py/android_screenshot_tests/simple_puller.py
|
plugin/src/py/android_screenshot_tests/simple_puller.py
|
#!/usr/bin/env python
#
# Copyright (c) 2014-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
#
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import subprocess
from . import common
from .common import get_adb
class SimplePuller:
"""Pulls a given file from the device"""
def __init__(self, adb_args=[]):
self._adb_args = list(adb_args)
def remote_file_exists(self, src):
output = common.check_output(
[get_adb()] + self._adb_args + ["shell",
"test -e %s && echo EXISTS || echo DOES_NOT_EXIST" % src])
return "EXISTS" in output
def pull(self, src, dest):
subprocess.check_call(
[get_adb()] + self._adb_args + ["pull", src, dest],
stderr=subprocess.STDOUT)
def get_external_data_dir(self):
output = common.check_output(
[get_adb()] + self._adb_args + ["shell", "echo", "$EXTERNAL_STORAGE"])
return output.strip().split()[-1]
|
#!/usr/bin/env python
#
# Copyright (c) 2014-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
#
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import subprocess
from . import common
from .common import get_adb
class SimplePuller:
"""Pulls a given file from the device"""
def __init__(self, adb_args=[]):
self._adb_args = list(adb_args)
def remote_file_exists(self, src):
output = common.check_output(
[get_adb()] + self._adb_args + ["shell",
"ls %s && echo EXISTS || echo DOES_NOT_EXIST" % src])
return "EXISTS" in output
def pull(self, src, dest):
subprocess.check_call(
[get_adb()] + self._adb_args + ["pull", src, dest],
stderr=subprocess.STDOUT)
def get_external_data_dir(self):
output = common.check_output(
[get_adb()] + self._adb_args + ["shell", "echo", "$EXTERNAL_STORAGE"])
return output.strip().split()[-1]
|
Use ls instead of test
|
Use ls instead of test
Older versions of Android do not ship with test, and so pulling
screenshots always fails. This changes the method for testing
the existence of metadata.xml to use ls instead, which is
available on all Android versions.
|
Python
|
apache-2.0
|
facebook/screenshot-tests-for-android,facebook/screenshot-tests-for-android,facebook/screenshot-tests-for-android,facebook/screenshot-tests-for-android
|
#!/usr/bin/env python
#
# Copyright (c) 2014-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
#
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import subprocess
from . import common
from .common import get_adb
class SimplePuller:
"""Pulls a given file from the device"""
def __init__(self, adb_args=[]):
self._adb_args = list(adb_args)
def remote_file_exists(self, src):
output = common.check_output(
[get_adb()] + self._adb_args + ["shell",
"test -e %s && echo EXISTS || echo DOES_NOT_EXIST" % src])
return "EXISTS" in output
def pull(self, src, dest):
subprocess.check_call(
[get_adb()] + self._adb_args + ["pull", src, dest],
stderr=subprocess.STDOUT)
def get_external_data_dir(self):
output = common.check_output(
[get_adb()] + self._adb_args + ["shell", "echo", "$EXTERNAL_STORAGE"])
return output.strip().split()[-1]
Use ls instead of test
Older versions of Android do not ship with test, and so pulling
screenshots always fails. This changes the method for testing
the existence of metadata.xml to use ls instead, which is
available on all Android versions.
|
#!/usr/bin/env python
#
# Copyright (c) 2014-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
#
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import subprocess
from . import common
from .common import get_adb
class SimplePuller:
"""Pulls a given file from the device"""
def __init__(self, adb_args=[]):
self._adb_args = list(adb_args)
def remote_file_exists(self, src):
output = common.check_output(
[get_adb()] + self._adb_args + ["shell",
"ls %s && echo EXISTS || echo DOES_NOT_EXIST" % src])
return "EXISTS" in output
def pull(self, src, dest):
subprocess.check_call(
[get_adb()] + self._adb_args + ["pull", src, dest],
stderr=subprocess.STDOUT)
def get_external_data_dir(self):
output = common.check_output(
[get_adb()] + self._adb_args + ["shell", "echo", "$EXTERNAL_STORAGE"])
return output.strip().split()[-1]
|
<commit_before>#!/usr/bin/env python
#
# Copyright (c) 2014-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
#
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import subprocess
from . import common
from .common import get_adb
class SimplePuller:
"""Pulls a given file from the device"""
def __init__(self, adb_args=[]):
self._adb_args = list(adb_args)
def remote_file_exists(self, src):
output = common.check_output(
[get_adb()] + self._adb_args + ["shell",
"test -e %s && echo EXISTS || echo DOES_NOT_EXIST" % src])
return "EXISTS" in output
def pull(self, src, dest):
subprocess.check_call(
[get_adb()] + self._adb_args + ["pull", src, dest],
stderr=subprocess.STDOUT)
def get_external_data_dir(self):
output = common.check_output(
[get_adb()] + self._adb_args + ["shell", "echo", "$EXTERNAL_STORAGE"])
return output.strip().split()[-1]
<commit_msg>Use ls instead of test
Older versions of Android do not ship with test, and so pulling
screenshots always fails. This changes the method for testing
the existence of metadata.xml to use ls instead, which is
available on all Android versions.<commit_after>
|
#!/usr/bin/env python
#
# Copyright (c) 2014-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
#
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import subprocess
from . import common
from .common import get_adb
class SimplePuller:
"""Pulls a given file from the device"""
def __init__(self, adb_args=[]):
self._adb_args = list(adb_args)
def remote_file_exists(self, src):
output = common.check_output(
[get_adb()] + self._adb_args + ["shell",
"ls %s && echo EXISTS || echo DOES_NOT_EXIST" % src])
return "EXISTS" in output
def pull(self, src, dest):
subprocess.check_call(
[get_adb()] + self._adb_args + ["pull", src, dest],
stderr=subprocess.STDOUT)
def get_external_data_dir(self):
output = common.check_output(
[get_adb()] + self._adb_args + ["shell", "echo", "$EXTERNAL_STORAGE"])
return output.strip().split()[-1]
|
#!/usr/bin/env python
#
# Copyright (c) 2014-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
#
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import subprocess
from . import common
from .common import get_adb
class SimplePuller:
"""Pulls a given file from the device"""
def __init__(self, adb_args=[]):
self._adb_args = list(adb_args)
def remote_file_exists(self, src):
output = common.check_output(
[get_adb()] + self._adb_args + ["shell",
"test -e %s && echo EXISTS || echo DOES_NOT_EXIST" % src])
return "EXISTS" in output
def pull(self, src, dest):
subprocess.check_call(
[get_adb()] + self._adb_args + ["pull", src, dest],
stderr=subprocess.STDOUT)
def get_external_data_dir(self):
output = common.check_output(
[get_adb()] + self._adb_args + ["shell", "echo", "$EXTERNAL_STORAGE"])
return output.strip().split()[-1]
Use ls instead of test
Older versions of Android do not ship with test, and so pulling
screenshots always fails. This changes the method for testing
the existence of metadata.xml to use ls instead, which is
available on all Android versions.#!/usr/bin/env python
#
# Copyright (c) 2014-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
#
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import subprocess
from . import common
from .common import get_adb
class SimplePuller:
"""Pulls a given file from the device"""
def __init__(self, adb_args=[]):
self._adb_args = list(adb_args)
def remote_file_exists(self, src):
output = common.check_output(
[get_adb()] + self._adb_args + ["shell",
"ls %s && echo EXISTS || echo DOES_NOT_EXIST" % src])
return "EXISTS" in output
def pull(self, src, dest):
subprocess.check_call(
[get_adb()] + self._adb_args + ["pull", src, dest],
stderr=subprocess.STDOUT)
def get_external_data_dir(self):
output = common.check_output(
[get_adb()] + self._adb_args + ["shell", "echo", "$EXTERNAL_STORAGE"])
return output.strip().split()[-1]
|
<commit_before>#!/usr/bin/env python
#
# Copyright (c) 2014-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
#
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import subprocess
from . import common
from .common import get_adb
class SimplePuller:
"""Pulls a given file from the device"""
def __init__(self, adb_args=[]):
self._adb_args = list(adb_args)
def remote_file_exists(self, src):
output = common.check_output(
[get_adb()] + self._adb_args + ["shell",
"test -e %s && echo EXISTS || echo DOES_NOT_EXIST" % src])
return "EXISTS" in output
def pull(self, src, dest):
subprocess.check_call(
[get_adb()] + self._adb_args + ["pull", src, dest],
stderr=subprocess.STDOUT)
def get_external_data_dir(self):
output = common.check_output(
[get_adb()] + self._adb_args + ["shell", "echo", "$EXTERNAL_STORAGE"])
return output.strip().split()[-1]
<commit_msg>Use ls instead of test
Older versions of Android do not ship with test, and so pulling
screenshots always fails. This changes the method for testing
the existence of metadata.xml to use ls instead, which is
available on all Android versions.<commit_after>#!/usr/bin/env python
#
# Copyright (c) 2014-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
#
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import subprocess
from . import common
from .common import get_adb
class SimplePuller:
"""Pulls a given file from the device"""
def __init__(self, adb_args=[]):
self._adb_args = list(adb_args)
def remote_file_exists(self, src):
output = common.check_output(
[get_adb()] + self._adb_args + ["shell",
"ls %s && echo EXISTS || echo DOES_NOT_EXIST" % src])
return "EXISTS" in output
def pull(self, src, dest):
subprocess.check_call(
[get_adb()] + self._adb_args + ["pull", src, dest],
stderr=subprocess.STDOUT)
def get_external_data_dir(self):
output = common.check_output(
[get_adb()] + self._adb_args + ["shell", "echo", "$EXTERNAL_STORAGE"])
return output.strip().split()[-1]
|
6aad731cd3808e784530e8632cf778c2b9e19543
|
gcsa/util/date_time_util.py
|
gcsa/util/date_time_util.py
|
from datetime import datetime, timedelta, date
import pytz
from tzlocal import get_localzone
def get_utc_datetime(dt, *args, **kwargs):
if isinstance(dt, datetime):
return dt.isoformat()
else:
return datetime(dt, *args, **kwargs).isoformat()
def date_range(start_date, day_count):
for n in range(day_count):
yield start_date + timedelta(n)
def insure_localisation(dt, timezone=str(get_localzone())):
"""Insures localisation with provided timezone on "datetime" object.
Does nothing to object of type "date"."""
if isinstance(dt, datetime):
tz = pytz.timezone(timezone)
if dt.tzinfo is None:
dt = tz.localize(dt)
return dt
elif isinstance(dt, date):
return dt
else:
raise TypeError('"date" or "datetime" object expected, not {!r}.'.format(dt.__class__.__name__))
|
from datetime import datetime, date
import pytz
from tzlocal import get_localzone
def insure_localisation(dt, timezone=str(get_localzone())):
"""Insures localisation with provided timezone on "datetime" object.
Does nothing to object of type "date"."""
if isinstance(dt, datetime):
tz = pytz.timezone(timezone)
if dt.tzinfo is None:
dt = tz.localize(dt)
return dt
elif isinstance(dt, date):
return dt
else:
raise TypeError('"date" or "datetime" object expected, not {!r}.'.format(dt.__class__.__name__))
|
Remove unused functions from util
|
Remove unused functions from util
|
Python
|
mit
|
kuzmoyev/Google-Calendar-Simple-API
|
from datetime import datetime, timedelta, date
import pytz
from tzlocal import get_localzone
def get_utc_datetime(dt, *args, **kwargs):
if isinstance(dt, datetime):
return dt.isoformat()
else:
return datetime(dt, *args, **kwargs).isoformat()
def date_range(start_date, day_count):
for n in range(day_count):
yield start_date + timedelta(n)
def insure_localisation(dt, timezone=str(get_localzone())):
"""Insures localisation with provided timezone on "datetime" object.
Does nothing to object of type "date"."""
if isinstance(dt, datetime):
tz = pytz.timezone(timezone)
if dt.tzinfo is None:
dt = tz.localize(dt)
return dt
elif isinstance(dt, date):
return dt
else:
raise TypeError('"date" or "datetime" object expected, not {!r}.'.format(dt.__class__.__name__))
Remove unused functions from util
|
from datetime import datetime, date
import pytz
from tzlocal import get_localzone
def insure_localisation(dt, timezone=str(get_localzone())):
"""Insures localisation with provided timezone on "datetime" object.
Does nothing to object of type "date"."""
if isinstance(dt, datetime):
tz = pytz.timezone(timezone)
if dt.tzinfo is None:
dt = tz.localize(dt)
return dt
elif isinstance(dt, date):
return dt
else:
raise TypeError('"date" or "datetime" object expected, not {!r}.'.format(dt.__class__.__name__))
|
<commit_before>from datetime import datetime, timedelta, date
import pytz
from tzlocal import get_localzone
def get_utc_datetime(dt, *args, **kwargs):
if isinstance(dt, datetime):
return dt.isoformat()
else:
return datetime(dt, *args, **kwargs).isoformat()
def date_range(start_date, day_count):
for n in range(day_count):
yield start_date + timedelta(n)
def insure_localisation(dt, timezone=str(get_localzone())):
"""Insures localisation with provided timezone on "datetime" object.
Does nothing to object of type "date"."""
if isinstance(dt, datetime):
tz = pytz.timezone(timezone)
if dt.tzinfo is None:
dt = tz.localize(dt)
return dt
elif isinstance(dt, date):
return dt
else:
raise TypeError('"date" or "datetime" object expected, not {!r}.'.format(dt.__class__.__name__))
<commit_msg>Remove unused functions from util<commit_after>
|
from datetime import datetime, date
import pytz
from tzlocal import get_localzone
def insure_localisation(dt, timezone=str(get_localzone())):
"""Insures localisation with provided timezone on "datetime" object.
Does nothing to object of type "date"."""
if isinstance(dt, datetime):
tz = pytz.timezone(timezone)
if dt.tzinfo is None:
dt = tz.localize(dt)
return dt
elif isinstance(dt, date):
return dt
else:
raise TypeError('"date" or "datetime" object expected, not {!r}.'.format(dt.__class__.__name__))
|
from datetime import datetime, timedelta, date
import pytz
from tzlocal import get_localzone
def get_utc_datetime(dt, *args, **kwargs):
if isinstance(dt, datetime):
return dt.isoformat()
else:
return datetime(dt, *args, **kwargs).isoformat()
def date_range(start_date, day_count):
for n in range(day_count):
yield start_date + timedelta(n)
def insure_localisation(dt, timezone=str(get_localzone())):
"""Insures localisation with provided timezone on "datetime" object.
Does nothing to object of type "date"."""
if isinstance(dt, datetime):
tz = pytz.timezone(timezone)
if dt.tzinfo is None:
dt = tz.localize(dt)
return dt
elif isinstance(dt, date):
return dt
else:
raise TypeError('"date" or "datetime" object expected, not {!r}.'.format(dt.__class__.__name__))
Remove unused functions from utilfrom datetime import datetime, date
import pytz
from tzlocal import get_localzone
def insure_localisation(dt, timezone=str(get_localzone())):
"""Insures localisation with provided timezone on "datetime" object.
Does nothing to object of type "date"."""
if isinstance(dt, datetime):
tz = pytz.timezone(timezone)
if dt.tzinfo is None:
dt = tz.localize(dt)
return dt
elif isinstance(dt, date):
return dt
else:
raise TypeError('"date" or "datetime" object expected, not {!r}.'.format(dt.__class__.__name__))
|
<commit_before>from datetime import datetime, timedelta, date
import pytz
from tzlocal import get_localzone
def get_utc_datetime(dt, *args, **kwargs):
if isinstance(dt, datetime):
return dt.isoformat()
else:
return datetime(dt, *args, **kwargs).isoformat()
def date_range(start_date, day_count):
for n in range(day_count):
yield start_date + timedelta(n)
def insure_localisation(dt, timezone=str(get_localzone())):
"""Insures localisation with provided timezone on "datetime" object.
Does nothing to object of type "date"."""
if isinstance(dt, datetime):
tz = pytz.timezone(timezone)
if dt.tzinfo is None:
dt = tz.localize(dt)
return dt
elif isinstance(dt, date):
return dt
else:
raise TypeError('"date" or "datetime" object expected, not {!r}.'.format(dt.__class__.__name__))
<commit_msg>Remove unused functions from util<commit_after>from datetime import datetime, date
import pytz
from tzlocal import get_localzone
def insure_localisation(dt, timezone=str(get_localzone())):
"""Insures localisation with provided timezone on "datetime" object.
Does nothing to object of type "date"."""
if isinstance(dt, datetime):
tz = pytz.timezone(timezone)
if dt.tzinfo is None:
dt = tz.localize(dt)
return dt
elif isinstance(dt, date):
return dt
else:
raise TypeError('"date" or "datetime" object expected, not {!r}.'.format(dt.__class__.__name__))
|
a95f6806ab4e591cfb404624631306932fd69e85
|
ninja/__init__.py
|
ninja/__init__.py
|
import os
import platform
import subprocess
import sys
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
DATA = os.path.join(os.path.dirname(__file__), 'data')
# Support running tests from the source tree
if not os.path.exists(DATA):
_data = os.path.abspath(os.path.join(
os.path.dirname(__file__), '../_skbuild/cmake-install/ninja/data'))
if os.path.exists(_data):
DATA = _data
if platform.system().lower() == "darwin":
DATA = os.path.join(DATA, 'CMake.app', 'Contents')
BIN_DIR = os.path.join(DATA, 'bin')
def _program(name, args):
return subprocess.call([os.path.join(BIN_DIR, name)] + args)
def ninja():
raise SystemExit(_program('ninja', sys.argv[1:]))
|
import os
import platform
import subprocess
import sys
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
DATA = os.path.join(os.path.dirname(__file__), 'data')
# Support running tests from the source tree
if not os.path.exists(DATA):
_data = os.path.abspath(os.path.join(
os.path.dirname(__file__), '../_skbuild/cmake-install/ninja/data'))
if os.path.exists(_data):
DATA = _data
BIN_DIR = os.path.join(DATA, 'bin')
def _program(name, args):
return subprocess.call([os.path.join(BIN_DIR, name)] + args)
def ninja():
raise SystemExit(_program('ninja', sys.argv[1:]))
|
Fix lookup of ninja executable on MacOSX
|
ninja: Fix lookup of ninja executable on MacOSX
|
Python
|
apache-2.0
|
scikit-build/ninja-python-distributions
|
import os
import platform
import subprocess
import sys
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
DATA = os.path.join(os.path.dirname(__file__), 'data')
# Support running tests from the source tree
if not os.path.exists(DATA):
_data = os.path.abspath(os.path.join(
os.path.dirname(__file__), '../_skbuild/cmake-install/ninja/data'))
if os.path.exists(_data):
DATA = _data
if platform.system().lower() == "darwin":
DATA = os.path.join(DATA, 'CMake.app', 'Contents')
BIN_DIR = os.path.join(DATA, 'bin')
def _program(name, args):
return subprocess.call([os.path.join(BIN_DIR, name)] + args)
def ninja():
raise SystemExit(_program('ninja', sys.argv[1:]))
ninja: Fix lookup of ninja executable on MacOSX
|
import os
import platform
import subprocess
import sys
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
DATA = os.path.join(os.path.dirname(__file__), 'data')
# Support running tests from the source tree
if not os.path.exists(DATA):
_data = os.path.abspath(os.path.join(
os.path.dirname(__file__), '../_skbuild/cmake-install/ninja/data'))
if os.path.exists(_data):
DATA = _data
BIN_DIR = os.path.join(DATA, 'bin')
def _program(name, args):
return subprocess.call([os.path.join(BIN_DIR, name)] + args)
def ninja():
raise SystemExit(_program('ninja', sys.argv[1:]))
|
<commit_before>import os
import platform
import subprocess
import sys
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
DATA = os.path.join(os.path.dirname(__file__), 'data')
# Support running tests from the source tree
if not os.path.exists(DATA):
_data = os.path.abspath(os.path.join(
os.path.dirname(__file__), '../_skbuild/cmake-install/ninja/data'))
if os.path.exists(_data):
DATA = _data
if platform.system().lower() == "darwin":
DATA = os.path.join(DATA, 'CMake.app', 'Contents')
BIN_DIR = os.path.join(DATA, 'bin')
def _program(name, args):
return subprocess.call([os.path.join(BIN_DIR, name)] + args)
def ninja():
raise SystemExit(_program('ninja', sys.argv[1:]))
<commit_msg>ninja: Fix lookup of ninja executable on MacOSX<commit_after>
|
import os
import platform
import subprocess
import sys
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
DATA = os.path.join(os.path.dirname(__file__), 'data')
# Support running tests from the source tree
if not os.path.exists(DATA):
_data = os.path.abspath(os.path.join(
os.path.dirname(__file__), '../_skbuild/cmake-install/ninja/data'))
if os.path.exists(_data):
DATA = _data
BIN_DIR = os.path.join(DATA, 'bin')
def _program(name, args):
return subprocess.call([os.path.join(BIN_DIR, name)] + args)
def ninja():
raise SystemExit(_program('ninja', sys.argv[1:]))
|
import os
import platform
import subprocess
import sys
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
DATA = os.path.join(os.path.dirname(__file__), 'data')
# Support running tests from the source tree
if not os.path.exists(DATA):
_data = os.path.abspath(os.path.join(
os.path.dirname(__file__), '../_skbuild/cmake-install/ninja/data'))
if os.path.exists(_data):
DATA = _data
if platform.system().lower() == "darwin":
DATA = os.path.join(DATA, 'CMake.app', 'Contents')
BIN_DIR = os.path.join(DATA, 'bin')
def _program(name, args):
return subprocess.call([os.path.join(BIN_DIR, name)] + args)
def ninja():
raise SystemExit(_program('ninja', sys.argv[1:]))
ninja: Fix lookup of ninja executable on MacOSXimport os
import platform
import subprocess
import sys
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
DATA = os.path.join(os.path.dirname(__file__), 'data')
# Support running tests from the source tree
if not os.path.exists(DATA):
_data = os.path.abspath(os.path.join(
os.path.dirname(__file__), '../_skbuild/cmake-install/ninja/data'))
if os.path.exists(_data):
DATA = _data
BIN_DIR = os.path.join(DATA, 'bin')
def _program(name, args):
return subprocess.call([os.path.join(BIN_DIR, name)] + args)
def ninja():
raise SystemExit(_program('ninja', sys.argv[1:]))
|
<commit_before>import os
import platform
import subprocess
import sys
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
DATA = os.path.join(os.path.dirname(__file__), 'data')
# Support running tests from the source tree
if not os.path.exists(DATA):
_data = os.path.abspath(os.path.join(
os.path.dirname(__file__), '../_skbuild/cmake-install/ninja/data'))
if os.path.exists(_data):
DATA = _data
if platform.system().lower() == "darwin":
DATA = os.path.join(DATA, 'CMake.app', 'Contents')
BIN_DIR = os.path.join(DATA, 'bin')
def _program(name, args):
return subprocess.call([os.path.join(BIN_DIR, name)] + args)
def ninja():
raise SystemExit(_program('ninja', sys.argv[1:]))
<commit_msg>ninja: Fix lookup of ninja executable on MacOSX<commit_after>import os
import platform
import subprocess
import sys
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
DATA = os.path.join(os.path.dirname(__file__), 'data')
# Support running tests from the source tree
if not os.path.exists(DATA):
_data = os.path.abspath(os.path.join(
os.path.dirname(__file__), '../_skbuild/cmake-install/ninja/data'))
if os.path.exists(_data):
DATA = _data
BIN_DIR = os.path.join(DATA, 'bin')
def _program(name, args):
return subprocess.call([os.path.join(BIN_DIR, name)] + args)
def ninja():
raise SystemExit(_program('ninja', sys.argv[1:]))
|
3d34983c5176e744b876563ec9ea4ae8e2d57d17
|
datapipe/targets/objects.py
|
datapipe/targets/objects.py
|
from ..target import Target
import hashlib
import dill
import joblib
class PyTarget(Target):
def __init__(self, name, obj=None):
self._name = name
self._obj = obj
super(PyTarget, self).__init__()
if not obj is None:
self.set(obj)
def identifier(self):
return self._name
def get(self):
return self._obj
def set(self, obj):
self._obj = obj
self._memory['obj'] = dill.dumps(obj).encode('base64')
def is_damaged(self):
mem = self.stored()
if 'obj' in mem:
if self._obj is None:
self._memory['obj'] = mem['obj']
self._obj = dill.loads(mem['obj'].decode('base64'))
return self._obj is None
else:
return joblib.hash(self._obj) != joblib.hash(dill.loads(mem['obj'].decode('base64')))
else:
return self._obj is None
|
from ..target import Target
import hashlib
import dill
import joblib
class PyTarget(Target):
def __init__(self, name, obj=None):
self._name = name
self._obj = obj
super(PyTarget, self).__init__()
if not obj is None:
self.set(obj)
def identifier(self):
return self._name
def get(self):
return self._obj
def set(self, obj):
self._obj = obj
self._memory['obj'] = dill.dumps(obj).encode('base64')
def is_damaged(self):
mem = self.stored()
if mem and 'obj' in mem:
if self._obj is None:
self._memory['obj'] = mem['obj']
self._obj = dill.loads(mem['obj'].decode('base64'))
return self._obj is None
else:
return joblib.hash(self._obj) != joblib.hash(dill.loads(mem['obj'].decode('base64')))
else:
return self._obj is None
|
Add missing check for None
|
Add missing check for None
|
Python
|
mit
|
ibab/datapipe
|
from ..target import Target
import hashlib
import dill
import joblib
class PyTarget(Target):
def __init__(self, name, obj=None):
self._name = name
self._obj = obj
super(PyTarget, self).__init__()
if not obj is None:
self.set(obj)
def identifier(self):
return self._name
def get(self):
return self._obj
def set(self, obj):
self._obj = obj
self._memory['obj'] = dill.dumps(obj).encode('base64')
def is_damaged(self):
mem = self.stored()
if 'obj' in mem:
if self._obj is None:
self._memory['obj'] = mem['obj']
self._obj = dill.loads(mem['obj'].decode('base64'))
return self._obj is None
else:
return joblib.hash(self._obj) != joblib.hash(dill.loads(mem['obj'].decode('base64')))
else:
return self._obj is None
Add missing check for None
|
from ..target import Target
import hashlib
import dill
import joblib
class PyTarget(Target):
def __init__(self, name, obj=None):
self._name = name
self._obj = obj
super(PyTarget, self).__init__()
if not obj is None:
self.set(obj)
def identifier(self):
return self._name
def get(self):
return self._obj
def set(self, obj):
self._obj = obj
self._memory['obj'] = dill.dumps(obj).encode('base64')
def is_damaged(self):
mem = self.stored()
if mem and 'obj' in mem:
if self._obj is None:
self._memory['obj'] = mem['obj']
self._obj = dill.loads(mem['obj'].decode('base64'))
return self._obj is None
else:
return joblib.hash(self._obj) != joblib.hash(dill.loads(mem['obj'].decode('base64')))
else:
return self._obj is None
|
<commit_before>from ..target import Target
import hashlib
import dill
import joblib
class PyTarget(Target):
def __init__(self, name, obj=None):
self._name = name
self._obj = obj
super(PyTarget, self).__init__()
if not obj is None:
self.set(obj)
def identifier(self):
return self._name
def get(self):
return self._obj
def set(self, obj):
self._obj = obj
self._memory['obj'] = dill.dumps(obj).encode('base64')
def is_damaged(self):
mem = self.stored()
if 'obj' in mem:
if self._obj is None:
self._memory['obj'] = mem['obj']
self._obj = dill.loads(mem['obj'].decode('base64'))
return self._obj is None
else:
return joblib.hash(self._obj) != joblib.hash(dill.loads(mem['obj'].decode('base64')))
else:
return self._obj is None
<commit_msg>Add missing check for None<commit_after>
|
from ..target import Target
import hashlib
import dill
import joblib
class PyTarget(Target):
def __init__(self, name, obj=None):
self._name = name
self._obj = obj
super(PyTarget, self).__init__()
if not obj is None:
self.set(obj)
def identifier(self):
return self._name
def get(self):
return self._obj
def set(self, obj):
self._obj = obj
self._memory['obj'] = dill.dumps(obj).encode('base64')
def is_damaged(self):
mem = self.stored()
if mem and 'obj' in mem:
if self._obj is None:
self._memory['obj'] = mem['obj']
self._obj = dill.loads(mem['obj'].decode('base64'))
return self._obj is None
else:
return joblib.hash(self._obj) != joblib.hash(dill.loads(mem['obj'].decode('base64')))
else:
return self._obj is None
|
from ..target import Target
import hashlib
import dill
import joblib
class PyTarget(Target):
def __init__(self, name, obj=None):
self._name = name
self._obj = obj
super(PyTarget, self).__init__()
if not obj is None:
self.set(obj)
def identifier(self):
return self._name
def get(self):
return self._obj
def set(self, obj):
self._obj = obj
self._memory['obj'] = dill.dumps(obj).encode('base64')
def is_damaged(self):
mem = self.stored()
if 'obj' in mem:
if self._obj is None:
self._memory['obj'] = mem['obj']
self._obj = dill.loads(mem['obj'].decode('base64'))
return self._obj is None
else:
return joblib.hash(self._obj) != joblib.hash(dill.loads(mem['obj'].decode('base64')))
else:
return self._obj is None
Add missing check for Nonefrom ..target import Target
import hashlib
import dill
import joblib
class PyTarget(Target):
def __init__(self, name, obj=None):
self._name = name
self._obj = obj
super(PyTarget, self).__init__()
if not obj is None:
self.set(obj)
def identifier(self):
return self._name
def get(self):
return self._obj
def set(self, obj):
self._obj = obj
self._memory['obj'] = dill.dumps(obj).encode('base64')
def is_damaged(self):
mem = self.stored()
if mem and 'obj' in mem:
if self._obj is None:
self._memory['obj'] = mem['obj']
self._obj = dill.loads(mem['obj'].decode('base64'))
return self._obj is None
else:
return joblib.hash(self._obj) != joblib.hash(dill.loads(mem['obj'].decode('base64')))
else:
return self._obj is None
|
<commit_before>from ..target import Target
import hashlib
import dill
import joblib
class PyTarget(Target):
def __init__(self, name, obj=None):
self._name = name
self._obj = obj
super(PyTarget, self).__init__()
if not obj is None:
self.set(obj)
def identifier(self):
return self._name
def get(self):
return self._obj
def set(self, obj):
self._obj = obj
self._memory['obj'] = dill.dumps(obj).encode('base64')
def is_damaged(self):
mem = self.stored()
if 'obj' in mem:
if self._obj is None:
self._memory['obj'] = mem['obj']
self._obj = dill.loads(mem['obj'].decode('base64'))
return self._obj is None
else:
return joblib.hash(self._obj) != joblib.hash(dill.loads(mem['obj'].decode('base64')))
else:
return self._obj is None
<commit_msg>Add missing check for None<commit_after>from ..target import Target
import hashlib
import dill
import joblib
class PyTarget(Target):
def __init__(self, name, obj=None):
self._name = name
self._obj = obj
super(PyTarget, self).__init__()
if not obj is None:
self.set(obj)
def identifier(self):
return self._name
def get(self):
return self._obj
def set(self, obj):
self._obj = obj
self._memory['obj'] = dill.dumps(obj).encode('base64')
def is_damaged(self):
mem = self.stored()
if mem and 'obj' in mem:
if self._obj is None:
self._memory['obj'] = mem['obj']
self._obj = dill.loads(mem['obj'].decode('base64'))
return self._obj is None
else:
return joblib.hash(self._obj) != joblib.hash(dill.loads(mem['obj'].decode('base64')))
else:
return self._obj is None
|
db9afab144c12391c9c54174b8973ec187455b9c
|
webpack/conf.py
|
webpack/conf.py
|
import os
from optional_django import conf
class Conf(conf.Conf):
# Environment configuration
STATIC_ROOT = None
STATIC_URL = None
BUILD_SERVER_URL = 'http://127.0.0.1:9009'
OUTPUT_DIR = 'webpack_assets'
CONFIG_DIRS = None
CONTEXT = None
# Watching
WATCH = True # TODO: should default to False
AGGREGATE_TIMEOUT = 200
POLL = None
HMR = False
# Caching
CACHE = True
CACHE_DIR = None
def get_path_to_output_dir(self):
return os.path.join(self.STATIC_ROOT, self.OUTPUT_DIR)
def get_public_path(self):
static_url = self.STATIC_URL
if static_url and static_url.endswith('/'):
static_url = static_url[0:-1]
return '/'.join([static_url, self.OUTPUT_DIR])
settings = Conf()
|
import os
from optional_django import conf
class Conf(conf.Conf):
# Environment configuration
STATIC_ROOT = None
STATIC_URL = None
BUILD_SERVER_URL = 'http://127.0.0.1:9009'
OUTPUT_DIR = 'webpack_assets'
CONFIG_DIRS = None
CONTEXT = None
# Watching
WATCH = False
AGGREGATE_TIMEOUT = 200
POLL = None
HMR = False
# Caching
CACHE = True
CACHE_DIR = None
def get_path_to_output_dir(self):
return os.path.join(self.STATIC_ROOT, self.OUTPUT_DIR)
def get_public_path(self):
static_url = self.STATIC_URL
if static_url and static_url.endswith('/'):
static_url = static_url[0:-1]
return '/'.join([static_url, self.OUTPUT_DIR])
settings = Conf()
|
WATCH now defaults to False
|
WATCH now defaults to False
|
Python
|
mit
|
markfinger/python-webpack,markfinger/python-webpack
|
import os
from optional_django import conf
class Conf(conf.Conf):
# Environment configuration
STATIC_ROOT = None
STATIC_URL = None
BUILD_SERVER_URL = 'http://127.0.0.1:9009'
OUTPUT_DIR = 'webpack_assets'
CONFIG_DIRS = None
CONTEXT = None
# Watching
WATCH = True # TODO: should default to False
AGGREGATE_TIMEOUT = 200
POLL = None
HMR = False
# Caching
CACHE = True
CACHE_DIR = None
def get_path_to_output_dir(self):
return os.path.join(self.STATIC_ROOT, self.OUTPUT_DIR)
def get_public_path(self):
static_url = self.STATIC_URL
if static_url and static_url.endswith('/'):
static_url = static_url[0:-1]
return '/'.join([static_url, self.OUTPUT_DIR])
settings = Conf()
WATCH now defaults to False
|
import os
from optional_django import conf
class Conf(conf.Conf):
# Environment configuration
STATIC_ROOT = None
STATIC_URL = None
BUILD_SERVER_URL = 'http://127.0.0.1:9009'
OUTPUT_DIR = 'webpack_assets'
CONFIG_DIRS = None
CONTEXT = None
# Watching
WATCH = False
AGGREGATE_TIMEOUT = 200
POLL = None
HMR = False
# Caching
CACHE = True
CACHE_DIR = None
def get_path_to_output_dir(self):
return os.path.join(self.STATIC_ROOT, self.OUTPUT_DIR)
def get_public_path(self):
static_url = self.STATIC_URL
if static_url and static_url.endswith('/'):
static_url = static_url[0:-1]
return '/'.join([static_url, self.OUTPUT_DIR])
settings = Conf()
|
<commit_before>import os
from optional_django import conf
class Conf(conf.Conf):
# Environment configuration
STATIC_ROOT = None
STATIC_URL = None
BUILD_SERVER_URL = 'http://127.0.0.1:9009'
OUTPUT_DIR = 'webpack_assets'
CONFIG_DIRS = None
CONTEXT = None
# Watching
WATCH = True # TODO: should default to False
AGGREGATE_TIMEOUT = 200
POLL = None
HMR = False
# Caching
CACHE = True
CACHE_DIR = None
def get_path_to_output_dir(self):
return os.path.join(self.STATIC_ROOT, self.OUTPUT_DIR)
def get_public_path(self):
static_url = self.STATIC_URL
if static_url and static_url.endswith('/'):
static_url = static_url[0:-1]
return '/'.join([static_url, self.OUTPUT_DIR])
settings = Conf()
<commit_msg>WATCH now defaults to False<commit_after>
|
import os
from optional_django import conf
class Conf(conf.Conf):
# Environment configuration
STATIC_ROOT = None
STATIC_URL = None
BUILD_SERVER_URL = 'http://127.0.0.1:9009'
OUTPUT_DIR = 'webpack_assets'
CONFIG_DIRS = None
CONTEXT = None
# Watching
WATCH = False
AGGREGATE_TIMEOUT = 200
POLL = None
HMR = False
# Caching
CACHE = True
CACHE_DIR = None
def get_path_to_output_dir(self):
return os.path.join(self.STATIC_ROOT, self.OUTPUT_DIR)
def get_public_path(self):
static_url = self.STATIC_URL
if static_url and static_url.endswith('/'):
static_url = static_url[0:-1]
return '/'.join([static_url, self.OUTPUT_DIR])
settings = Conf()
|
import os
from optional_django import conf
class Conf(conf.Conf):
# Environment configuration
STATIC_ROOT = None
STATIC_URL = None
BUILD_SERVER_URL = 'http://127.0.0.1:9009'
OUTPUT_DIR = 'webpack_assets'
CONFIG_DIRS = None
CONTEXT = None
# Watching
WATCH = True # TODO: should default to False
AGGREGATE_TIMEOUT = 200
POLL = None
HMR = False
# Caching
CACHE = True
CACHE_DIR = None
def get_path_to_output_dir(self):
return os.path.join(self.STATIC_ROOT, self.OUTPUT_DIR)
def get_public_path(self):
static_url = self.STATIC_URL
if static_url and static_url.endswith('/'):
static_url = static_url[0:-1]
return '/'.join([static_url, self.OUTPUT_DIR])
settings = Conf()
WATCH now defaults to Falseimport os
from optional_django import conf
class Conf(conf.Conf):
# Environment configuration
STATIC_ROOT = None
STATIC_URL = None
BUILD_SERVER_URL = 'http://127.0.0.1:9009'
OUTPUT_DIR = 'webpack_assets'
CONFIG_DIRS = None
CONTEXT = None
# Watching
WATCH = False
AGGREGATE_TIMEOUT = 200
POLL = None
HMR = False
# Caching
CACHE = True
CACHE_DIR = None
def get_path_to_output_dir(self):
return os.path.join(self.STATIC_ROOT, self.OUTPUT_DIR)
def get_public_path(self):
static_url = self.STATIC_URL
if static_url and static_url.endswith('/'):
static_url = static_url[0:-1]
return '/'.join([static_url, self.OUTPUT_DIR])
settings = Conf()
|
<commit_before>import os
from optional_django import conf
class Conf(conf.Conf):
# Environment configuration
STATIC_ROOT = None
STATIC_URL = None
BUILD_SERVER_URL = 'http://127.0.0.1:9009'
OUTPUT_DIR = 'webpack_assets'
CONFIG_DIRS = None
CONTEXT = None
# Watching
WATCH = True # TODO: should default to False
AGGREGATE_TIMEOUT = 200
POLL = None
HMR = False
# Caching
CACHE = True
CACHE_DIR = None
def get_path_to_output_dir(self):
return os.path.join(self.STATIC_ROOT, self.OUTPUT_DIR)
def get_public_path(self):
static_url = self.STATIC_URL
if static_url and static_url.endswith('/'):
static_url = static_url[0:-1]
return '/'.join([static_url, self.OUTPUT_DIR])
settings = Conf()
<commit_msg>WATCH now defaults to False<commit_after>import os
from optional_django import conf
class Conf(conf.Conf):
# Environment configuration
STATIC_ROOT = None
STATIC_URL = None
BUILD_SERVER_URL = 'http://127.0.0.1:9009'
OUTPUT_DIR = 'webpack_assets'
CONFIG_DIRS = None
CONTEXT = None
# Watching
WATCH = False
AGGREGATE_TIMEOUT = 200
POLL = None
HMR = False
# Caching
CACHE = True
CACHE_DIR = None
def get_path_to_output_dir(self):
return os.path.join(self.STATIC_ROOT, self.OUTPUT_DIR)
def get_public_path(self):
static_url = self.STATIC_URL
if static_url and static_url.endswith('/'):
static_url = static_url[0:-1]
return '/'.join([static_url, self.OUTPUT_DIR])
settings = Conf()
|
460c2f02a32e71ffa8332ad156d505a5ab15be1c
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
import client
VERSION = client.__version__
setup(
name='okpy',
version=VERSION,
author='John Denero, Soumya Basu, Stephen Martinis, Sharad Vikram, Albert Wu',
# author_email='',
description=('ok.py supports programming projects by running tests, '
'tracking progress, and assisting in debugging.'),
# long_description=long_description,
url='https://github.com/Cal-CS-61A-Staff/ok-client',
# download_url='https://github.com/Cal-CS-61A-Staff/ok/releases/download/v{}/ok'.format(VERSION),
license='Apache License, Version 2.0',
keywords=['education', 'autograding'],
packages=find_packages(include=[
'client',
'client.*',
]),
package_data={
'client': ['config.ok'],
},
# install_requires=[],
entry_points={
'console_scripts': [
'ok=client.cli.ok:main',
'ok-publish=client.cli.publish:main',
'ok-lock=client.cli.lock:main',
'ok-test=client.cli.test:main',
],
},
classifiers=[
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
|
from setuptools import setup, find_packages
import client
VERSION = client.__version__
setup(
name='okpy',
version='1.6.14-1', # version=VERSION,
author='John Denero, Soumya Basu, Stephen Martinis, Sharad Vikram, Albert Wu',
# author_email='',
description=('ok.py supports programming projects by running tests, '
'tracking progress, and assisting in debugging.'),
# long_description=long_description,
url='https://github.com/Cal-CS-61A-Staff/ok-client',
# download_url='https://github.com/Cal-CS-61A-Staff/ok/releases/download/v{}/ok'.format(VERSION),
license='Apache License, Version 2.0',
keywords=['education', 'autograding'],
packages=find_packages(include=[
'client',
'client.*',
]),
package_data={
'client': ['config.ok'],
},
# install_requires=[],
entry_points={
'console_scripts': [
'ok=client.cli.ok:main',
'ok-publish=client.cli.publish:main',
'ok-lock=client.cli.lock:main',
'ok-test=client.cli.test:main',
],
},
classifiers=[
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
|
Change pypi version to 1.6.14-1
|
Change pypi version to 1.6.14-1
|
Python
|
apache-2.0
|
Cal-CS-61A-Staff/ok-client
|
from setuptools import setup, find_packages
import client
VERSION = client.__version__
setup(
name='okpy',
version=VERSION,
author='John Denero, Soumya Basu, Stephen Martinis, Sharad Vikram, Albert Wu',
# author_email='',
description=('ok.py supports programming projects by running tests, '
'tracking progress, and assisting in debugging.'),
# long_description=long_description,
url='https://github.com/Cal-CS-61A-Staff/ok-client',
# download_url='https://github.com/Cal-CS-61A-Staff/ok/releases/download/v{}/ok'.format(VERSION),
license='Apache License, Version 2.0',
keywords=['education', 'autograding'],
packages=find_packages(include=[
'client',
'client.*',
]),
package_data={
'client': ['config.ok'],
},
# install_requires=[],
entry_points={
'console_scripts': [
'ok=client.cli.ok:main',
'ok-publish=client.cli.publish:main',
'ok-lock=client.cli.lock:main',
'ok-test=client.cli.test:main',
],
},
classifiers=[
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
Change pypi version to 1.6.14-1
|
from setuptools import setup, find_packages
import client
VERSION = client.__version__
setup(
name='okpy',
version='1.6.14-1', # version=VERSION,
author='John Denero, Soumya Basu, Stephen Martinis, Sharad Vikram, Albert Wu',
# author_email='',
description=('ok.py supports programming projects by running tests, '
'tracking progress, and assisting in debugging.'),
# long_description=long_description,
url='https://github.com/Cal-CS-61A-Staff/ok-client',
# download_url='https://github.com/Cal-CS-61A-Staff/ok/releases/download/v{}/ok'.format(VERSION),
license='Apache License, Version 2.0',
keywords=['education', 'autograding'],
packages=find_packages(include=[
'client',
'client.*',
]),
package_data={
'client': ['config.ok'],
},
# install_requires=[],
entry_points={
'console_scripts': [
'ok=client.cli.ok:main',
'ok-publish=client.cli.publish:main',
'ok-lock=client.cli.lock:main',
'ok-test=client.cli.test:main',
],
},
classifiers=[
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
|
<commit_before>from setuptools import setup, find_packages
import client
VERSION = client.__version__
setup(
name='okpy',
version=VERSION,
author='John Denero, Soumya Basu, Stephen Martinis, Sharad Vikram, Albert Wu',
# author_email='',
description=('ok.py supports programming projects by running tests, '
'tracking progress, and assisting in debugging.'),
# long_description=long_description,
url='https://github.com/Cal-CS-61A-Staff/ok-client',
# download_url='https://github.com/Cal-CS-61A-Staff/ok/releases/download/v{}/ok'.format(VERSION),
license='Apache License, Version 2.0',
keywords=['education', 'autograding'],
packages=find_packages(include=[
'client',
'client.*',
]),
package_data={
'client': ['config.ok'],
},
# install_requires=[],
entry_points={
'console_scripts': [
'ok=client.cli.ok:main',
'ok-publish=client.cli.publish:main',
'ok-lock=client.cli.lock:main',
'ok-test=client.cli.test:main',
],
},
classifiers=[
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
<commit_msg>Change pypi version to 1.6.14-1<commit_after>
|
from setuptools import setup, find_packages
import client
VERSION = client.__version__
setup(
name='okpy',
version='1.6.14-1', # version=VERSION,
author='John Denero, Soumya Basu, Stephen Martinis, Sharad Vikram, Albert Wu',
# author_email='',
description=('ok.py supports programming projects by running tests, '
'tracking progress, and assisting in debugging.'),
# long_description=long_description,
url='https://github.com/Cal-CS-61A-Staff/ok-client',
# download_url='https://github.com/Cal-CS-61A-Staff/ok/releases/download/v{}/ok'.format(VERSION),
license='Apache License, Version 2.0',
keywords=['education', 'autograding'],
packages=find_packages(include=[
'client',
'client.*',
]),
package_data={
'client': ['config.ok'],
},
# install_requires=[],
entry_points={
'console_scripts': [
'ok=client.cli.ok:main',
'ok-publish=client.cli.publish:main',
'ok-lock=client.cli.lock:main',
'ok-test=client.cli.test:main',
],
},
classifiers=[
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
|
from setuptools import setup, find_packages
import client
VERSION = client.__version__
setup(
name='okpy',
version=VERSION,
author='John Denero, Soumya Basu, Stephen Martinis, Sharad Vikram, Albert Wu',
# author_email='',
description=('ok.py supports programming projects by running tests, '
'tracking progress, and assisting in debugging.'),
# long_description=long_description,
url='https://github.com/Cal-CS-61A-Staff/ok-client',
# download_url='https://github.com/Cal-CS-61A-Staff/ok/releases/download/v{}/ok'.format(VERSION),
license='Apache License, Version 2.0',
keywords=['education', 'autograding'],
packages=find_packages(include=[
'client',
'client.*',
]),
package_data={
'client': ['config.ok'],
},
# install_requires=[],
entry_points={
'console_scripts': [
'ok=client.cli.ok:main',
'ok-publish=client.cli.publish:main',
'ok-lock=client.cli.lock:main',
'ok-test=client.cli.test:main',
],
},
classifiers=[
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
Change pypi version to 1.6.14-1from setuptools import setup, find_packages
import client
VERSION = client.__version__
setup(
name='okpy',
version='1.6.14-1', # version=VERSION,
author='John Denero, Soumya Basu, Stephen Martinis, Sharad Vikram, Albert Wu',
# author_email='',
description=('ok.py supports programming projects by running tests, '
'tracking progress, and assisting in debugging.'),
# long_description=long_description,
url='https://github.com/Cal-CS-61A-Staff/ok-client',
# download_url='https://github.com/Cal-CS-61A-Staff/ok/releases/download/v{}/ok'.format(VERSION),
license='Apache License, Version 2.0',
keywords=['education', 'autograding'],
packages=find_packages(include=[
'client',
'client.*',
]),
package_data={
'client': ['config.ok'],
},
# install_requires=[],
entry_points={
'console_scripts': [
'ok=client.cli.ok:main',
'ok-publish=client.cli.publish:main',
'ok-lock=client.cli.lock:main',
'ok-test=client.cli.test:main',
],
},
classifiers=[
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
|
<commit_before>from setuptools import setup, find_packages
import client
VERSION = client.__version__
setup(
name='okpy',
version=VERSION,
author='John Denero, Soumya Basu, Stephen Martinis, Sharad Vikram, Albert Wu',
# author_email='',
description=('ok.py supports programming projects by running tests, '
'tracking progress, and assisting in debugging.'),
# long_description=long_description,
url='https://github.com/Cal-CS-61A-Staff/ok-client',
# download_url='https://github.com/Cal-CS-61A-Staff/ok/releases/download/v{}/ok'.format(VERSION),
license='Apache License, Version 2.0',
keywords=['education', 'autograding'],
packages=find_packages(include=[
'client',
'client.*',
]),
package_data={
'client': ['config.ok'],
},
# install_requires=[],
entry_points={
'console_scripts': [
'ok=client.cli.ok:main',
'ok-publish=client.cli.publish:main',
'ok-lock=client.cli.lock:main',
'ok-test=client.cli.test:main',
],
},
classifiers=[
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
<commit_msg>Change pypi version to 1.6.14-1<commit_after>from setuptools import setup, find_packages
import client
VERSION = client.__version__
setup(
name='okpy',
version='1.6.14-1', # version=VERSION,
author='John Denero, Soumya Basu, Stephen Martinis, Sharad Vikram, Albert Wu',
# author_email='',
description=('ok.py supports programming projects by running tests, '
'tracking progress, and assisting in debugging.'),
# long_description=long_description,
url='https://github.com/Cal-CS-61A-Staff/ok-client',
# download_url='https://github.com/Cal-CS-61A-Staff/ok/releases/download/v{}/ok'.format(VERSION),
license='Apache License, Version 2.0',
keywords=['education', 'autograding'],
packages=find_packages(include=[
'client',
'client.*',
]),
package_data={
'client': ['config.ok'],
},
# install_requires=[],
entry_points={
'console_scripts': [
'ok=client.cli.ok:main',
'ok-publish=client.cli.publish:main',
'ok-lock=client.cli.lock:main',
'ok-test=client.cli.test:main',
],
},
classifiers=[
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
|
7c506db388dc38ce024fbcd1de12071a0ea2a009
|
setup.py
|
setup.py
|
from setuptools import setup
requirements = [
'pyqt5'
]
test_requirements = [
'pytest',
'pytest-cov',
'pytest-faulthandler',
'pytest-mock',
'pytest-qt',
'pytest-xvfb',
]
setup(
name='EasyEdit',
version='0.0.1',
description="A PyQt5 cross-platform text editor",
author="Matthew S. Klosak",
author_email='msklosak@gmail.com',
url='https://github.com/msklosak/EasyEdit',
packages=['easyedit', 'tests'],
entry_points={
'console_scripts': [
'EasyEdit=easyedit.editor:main'
]
},
install_requires=requirements,
zip_safe=False,
keywords='EasyEdit',
classifiers=[
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
|
from setuptools import setup
requirements = [
'pyqt5',
'qscintilla'
]
test_requirements = [
'pytest',
'pytest-cov',
'pytest-faulthandler',
'pytest-mock',
'pytest-qt',
'pytest-xvfb',
]
setup(
name='EasyEdit',
version='0.0.1',
description="A PyQt5 cross-platform text editor",
author="Matthew S. Klosak",
author_email='msklosak@gmail.com',
url='https://github.com/msklosak/EasyEdit',
packages=['easyedit', 'tests'],
entry_points={
'console_scripts': [
'EasyEdit=easyedit.editor:main'
]
},
install_requires=requirements,
zip_safe=False,
keywords='EasyEdit',
classifiers=[
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
|
Add 'qscintilla' to the program requirements
|
Add 'qscintilla' to the program requirements
|
Python
|
mit
|
msklosak/EasyEdit
|
from setuptools import setup
requirements = [
'pyqt5'
]
test_requirements = [
'pytest',
'pytest-cov',
'pytest-faulthandler',
'pytest-mock',
'pytest-qt',
'pytest-xvfb',
]
setup(
name='EasyEdit',
version='0.0.1',
description="A PyQt5 cross-platform text editor",
author="Matthew S. Klosak",
author_email='msklosak@gmail.com',
url='https://github.com/msklosak/EasyEdit',
packages=['easyedit', 'tests'],
entry_points={
'console_scripts': [
'EasyEdit=easyedit.editor:main'
]
},
install_requires=requirements,
zip_safe=False,
keywords='EasyEdit',
classifiers=[
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
Add 'qscintilla' to the program requirements
|
from setuptools import setup
requirements = [
'pyqt5',
'qscintilla'
]
test_requirements = [
'pytest',
'pytest-cov',
'pytest-faulthandler',
'pytest-mock',
'pytest-qt',
'pytest-xvfb',
]
setup(
name='EasyEdit',
version='0.0.1',
description="A PyQt5 cross-platform text editor",
author="Matthew S. Klosak",
author_email='msklosak@gmail.com',
url='https://github.com/msklosak/EasyEdit',
packages=['easyedit', 'tests'],
entry_points={
'console_scripts': [
'EasyEdit=easyedit.editor:main'
]
},
install_requires=requirements,
zip_safe=False,
keywords='EasyEdit',
classifiers=[
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
|
<commit_before>from setuptools import setup
requirements = [
'pyqt5'
]
test_requirements = [
'pytest',
'pytest-cov',
'pytest-faulthandler',
'pytest-mock',
'pytest-qt',
'pytest-xvfb',
]
setup(
name='EasyEdit',
version='0.0.1',
description="A PyQt5 cross-platform text editor",
author="Matthew S. Klosak",
author_email='msklosak@gmail.com',
url='https://github.com/msklosak/EasyEdit',
packages=['easyedit', 'tests'],
entry_points={
'console_scripts': [
'EasyEdit=easyedit.editor:main'
]
},
install_requires=requirements,
zip_safe=False,
keywords='EasyEdit',
classifiers=[
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
<commit_msg>Add 'qscintilla' to the program requirements<commit_after>
|
from setuptools import setup
requirements = [
'pyqt5',
'qscintilla'
]
test_requirements = [
'pytest',
'pytest-cov',
'pytest-faulthandler',
'pytest-mock',
'pytest-qt',
'pytest-xvfb',
]
setup(
name='EasyEdit',
version='0.0.1',
description="A PyQt5 cross-platform text editor",
author="Matthew S. Klosak",
author_email='msklosak@gmail.com',
url='https://github.com/msklosak/EasyEdit',
packages=['easyedit', 'tests'],
entry_points={
'console_scripts': [
'EasyEdit=easyedit.editor:main'
]
},
install_requires=requirements,
zip_safe=False,
keywords='EasyEdit',
classifiers=[
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
|
from setuptools import setup
requirements = [
'pyqt5'
]
test_requirements = [
'pytest',
'pytest-cov',
'pytest-faulthandler',
'pytest-mock',
'pytest-qt',
'pytest-xvfb',
]
setup(
name='EasyEdit',
version='0.0.1',
description="A PyQt5 cross-platform text editor",
author="Matthew S. Klosak",
author_email='msklosak@gmail.com',
url='https://github.com/msklosak/EasyEdit',
packages=['easyedit', 'tests'],
entry_points={
'console_scripts': [
'EasyEdit=easyedit.editor:main'
]
},
install_requires=requirements,
zip_safe=False,
keywords='EasyEdit',
classifiers=[
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
Add 'qscintilla' to the program requirementsfrom setuptools import setup
requirements = [
'pyqt5',
'qscintilla'
]
test_requirements = [
'pytest',
'pytest-cov',
'pytest-faulthandler',
'pytest-mock',
'pytest-qt',
'pytest-xvfb',
]
setup(
name='EasyEdit',
version='0.0.1',
description="A PyQt5 cross-platform text editor",
author="Matthew S. Klosak",
author_email='msklosak@gmail.com',
url='https://github.com/msklosak/EasyEdit',
packages=['easyedit', 'tests'],
entry_points={
'console_scripts': [
'EasyEdit=easyedit.editor:main'
]
},
install_requires=requirements,
zip_safe=False,
keywords='EasyEdit',
classifiers=[
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
|
<commit_before>from setuptools import setup
requirements = [
'pyqt5'
]
test_requirements = [
'pytest',
'pytest-cov',
'pytest-faulthandler',
'pytest-mock',
'pytest-qt',
'pytest-xvfb',
]
setup(
name='EasyEdit',
version='0.0.1',
description="A PyQt5 cross-platform text editor",
author="Matthew S. Klosak",
author_email='msklosak@gmail.com',
url='https://github.com/msklosak/EasyEdit',
packages=['easyedit', 'tests'],
entry_points={
'console_scripts': [
'EasyEdit=easyedit.editor:main'
]
},
install_requires=requirements,
zip_safe=False,
keywords='EasyEdit',
classifiers=[
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
<commit_msg>Add 'qscintilla' to the program requirements<commit_after>from setuptools import setup
requirements = [
'pyqt5',
'qscintilla'
]
test_requirements = [
'pytest',
'pytest-cov',
'pytest-faulthandler',
'pytest-mock',
'pytest-qt',
'pytest-xvfb',
]
setup(
name='EasyEdit',
version='0.0.1',
description="A PyQt5 cross-platform text editor",
author="Matthew S. Klosak",
author_email='msklosak@gmail.com',
url='https://github.com/msklosak/EasyEdit',
packages=['easyedit', 'tests'],
entry_points={
'console_scripts': [
'EasyEdit=easyedit.editor:main'
]
},
install_requires=requirements,
zip_safe=False,
keywords='EasyEdit',
classifiers=[
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
|
2e2b020537ca2e1c1ba0294413066d3ac293fd6c
|
setup.py
|
setup.py
|
import sys
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
packages = setuptools.find_namespace_packages(include=['simple_parsing*'])
print("PACKAGES FOUND:", packages)
print(sys.version_info)
setuptools.setup(
name="simple_parsing",
version="0.0.14.post1",
author="Fabrice Normandin",
author_email="fabrice.normandin@gmail.com",
description="A small utility for simplifying and cleaning up argument parsing scripts.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/lebrice/SimpleParsing",
packages=packages,
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
install_requires=[
"typing_inspect",
"dataclasses;python_version<'3.7'",
],
)
|
import sys
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
packages = setuptools.find_namespace_packages(include=['simple_parsing*'])
print("PACKAGES FOUND:", packages)
print(sys.version_info)
setuptools.setup(
name="simple_parsing",
version="0.0.15",
author="Fabrice Normandin",
author_email="fabrice.normandin@gmail.com",
description="A small utility for simplifying and cleaning up argument parsing scripts.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/lebrice/SimpleParsing",
packages=packages,
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
install_requires=[
"typing_inspect",
"dataclasses;python_version<'3.7'",
],
)
|
Upgrade version to 0.0.15 (remove numpy dep)
|
Upgrade version to 0.0.15 (remove numpy dep)
Signed-off-by: Fabrice Normandin <ee438dab901b32439200d6bb23a0e635234ed3f0@gmail.com>
|
Python
|
mit
|
lebrice/SimpleParsing
|
import sys
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
packages = setuptools.find_namespace_packages(include=['simple_parsing*'])
print("PACKAGES FOUND:", packages)
print(sys.version_info)
setuptools.setup(
name="simple_parsing",
version="0.0.14.post1",
author="Fabrice Normandin",
author_email="fabrice.normandin@gmail.com",
description="A small utility for simplifying and cleaning up argument parsing scripts.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/lebrice/SimpleParsing",
packages=packages,
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
install_requires=[
"typing_inspect",
"dataclasses;python_version<'3.7'",
],
)
Upgrade version to 0.0.15 (remove numpy dep)
Signed-off-by: Fabrice Normandin <ee438dab901b32439200d6bb23a0e635234ed3f0@gmail.com>
|
import sys
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
packages = setuptools.find_namespace_packages(include=['simple_parsing*'])
print("PACKAGES FOUND:", packages)
print(sys.version_info)
setuptools.setup(
name="simple_parsing",
version="0.0.15",
author="Fabrice Normandin",
author_email="fabrice.normandin@gmail.com",
description="A small utility for simplifying and cleaning up argument parsing scripts.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/lebrice/SimpleParsing",
packages=packages,
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
install_requires=[
"typing_inspect",
"dataclasses;python_version<'3.7'",
],
)
|
<commit_before>import sys
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
packages = setuptools.find_namespace_packages(include=['simple_parsing*'])
print("PACKAGES FOUND:", packages)
print(sys.version_info)
setuptools.setup(
name="simple_parsing",
version="0.0.14.post1",
author="Fabrice Normandin",
author_email="fabrice.normandin@gmail.com",
description="A small utility for simplifying and cleaning up argument parsing scripts.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/lebrice/SimpleParsing",
packages=packages,
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
install_requires=[
"typing_inspect",
"dataclasses;python_version<'3.7'",
],
)
<commit_msg>Upgrade version to 0.0.15 (remove numpy dep)
Signed-off-by: Fabrice Normandin <ee438dab901b32439200d6bb23a0e635234ed3f0@gmail.com><commit_after>
|
import sys
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
packages = setuptools.find_namespace_packages(include=['simple_parsing*'])
print("PACKAGES FOUND:", packages)
print(sys.version_info)
setuptools.setup(
name="simple_parsing",
version="0.0.15",
author="Fabrice Normandin",
author_email="fabrice.normandin@gmail.com",
description="A small utility for simplifying and cleaning up argument parsing scripts.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/lebrice/SimpleParsing",
packages=packages,
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
install_requires=[
"typing_inspect",
"dataclasses;python_version<'3.7'",
],
)
|
import sys
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
packages = setuptools.find_namespace_packages(include=['simple_parsing*'])
print("PACKAGES FOUND:", packages)
print(sys.version_info)
setuptools.setup(
name="simple_parsing",
version="0.0.14.post1",
author="Fabrice Normandin",
author_email="fabrice.normandin@gmail.com",
description="A small utility for simplifying and cleaning up argument parsing scripts.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/lebrice/SimpleParsing",
packages=packages,
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
install_requires=[
"typing_inspect",
"dataclasses;python_version<'3.7'",
],
)
Upgrade version to 0.0.15 (remove numpy dep)
Signed-off-by: Fabrice Normandin <ee438dab901b32439200d6bb23a0e635234ed3f0@gmail.com>import sys
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
packages = setuptools.find_namespace_packages(include=['simple_parsing*'])
print("PACKAGES FOUND:", packages)
print(sys.version_info)
setuptools.setup(
name="simple_parsing",
version="0.0.15",
author="Fabrice Normandin",
author_email="fabrice.normandin@gmail.com",
description="A small utility for simplifying and cleaning up argument parsing scripts.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/lebrice/SimpleParsing",
packages=packages,
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
install_requires=[
"typing_inspect",
"dataclasses;python_version<'3.7'",
],
)
|
<commit_before>import sys
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
packages = setuptools.find_namespace_packages(include=['simple_parsing*'])
print("PACKAGES FOUND:", packages)
print(sys.version_info)
setuptools.setup(
name="simple_parsing",
version="0.0.14.post1",
author="Fabrice Normandin",
author_email="fabrice.normandin@gmail.com",
description="A small utility for simplifying and cleaning up argument parsing scripts.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/lebrice/SimpleParsing",
packages=packages,
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
install_requires=[
"typing_inspect",
"dataclasses;python_version<'3.7'",
],
)
<commit_msg>Upgrade version to 0.0.15 (remove numpy dep)
Signed-off-by: Fabrice Normandin <ee438dab901b32439200d6bb23a0e635234ed3f0@gmail.com><commit_after>import sys
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
packages = setuptools.find_namespace_packages(include=['simple_parsing*'])
print("PACKAGES FOUND:", packages)
print(sys.version_info)
setuptools.setup(
name="simple_parsing",
version="0.0.15",
author="Fabrice Normandin",
author_email="fabrice.normandin@gmail.com",
description="A small utility for simplifying and cleaning up argument parsing scripts.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/lebrice/SimpleParsing",
packages=packages,
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
install_requires=[
"typing_inspect",
"dataclasses;python_version<'3.7'",
],
)
|
cb048cc483754b003d70844ae99a4c512d35d2ee
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
rid
~~~~~~~~~~~
"The Regressive Imagery Dictionary (RID) is a coding scheme for text analysis
that is designed to measure 'primordial' and conceptual content. Primordial
thought is the kind of free-form, associative thinking involved in fantasy
and dreams. Like Freud's id, I guess. Conceptual (or secondary) thought is
logical, reality-based and focused on problem solving."
via: John Wiseman (http://lemonodor.com/archives/001511.html)
"""
from setuptools import setup, find_packages
setup(
name='regressive-imagery-dictionary',
version='0.1.7',
url='https://github.com/jefftriplett/rid.py',
license='MIT',
description='The Regressive Imagery Dictionary (RID) is a coding scheme for text analysis that is designed to measure "primordial" and conceptual content.',
long_description=__doc__,
author='John Wiseman',
maintainer='Jeff Triplett',
maintainer_email='jeff.triplett@gmail.com',
packages=find_packages(),
package_data={},
py_modules=['rid'],
entry_points={
'console_scripts': [
'rid = rid:main',
]
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: Public Domain',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'
],
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
rid
~~~~~~~~~~~
"The Regressive Imagery Dictionary (RID) is a coding scheme for text analysis
that is designed to measure 'primordial' and conceptual content. Primordial
thought is the kind of free-form, associative thinking involved in fantasy
and dreams. Like Freud's id, I guess. Conceptual (or secondary) thought is
logical, reality-based and focused on problem solving."
via: John Wiseman (http://lemonodor.com/archives/001511.html)
"""
from setuptools import setup, find_packages
setup(
name='regressive-imagery-dictionary',
version='0.1.7',
url='https://github.com/jefftriplett/rid.py',
license='MIT',
description='The Regressive Imagery Dictionary (RID) is a coding scheme for text analysis that is designed to measure "primordial" and conceptual content.',
long_description=__doc__,
maintainer='Jeff Triplett',
maintainer_email='jeff.triplett@gmail.com',
packages=find_packages(),
package_data={},
py_modules=['rid'],
entry_points={
'console_scripts': [
'rid = rid:main',
]
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: Public Domain',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'
],
)
|
Fix for missing author info
|
Fix for missing author info
|
Python
|
mit
|
jefftriplett/rid.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
rid
~~~~~~~~~~~
"The Regressive Imagery Dictionary (RID) is a coding scheme for text analysis
that is designed to measure 'primordial' and conceptual content. Primordial
thought is the kind of free-form, associative thinking involved in fantasy
and dreams. Like Freud's id, I guess. Conceptual (or secondary) thought is
logical, reality-based and focused on problem solving."
via: John Wiseman (http://lemonodor.com/archives/001511.html)
"""
from setuptools import setup, find_packages
setup(
name='regressive-imagery-dictionary',
version='0.1.7',
url='https://github.com/jefftriplett/rid.py',
license='MIT',
description='The Regressive Imagery Dictionary (RID) is a coding scheme for text analysis that is designed to measure "primordial" and conceptual content.',
long_description=__doc__,
author='John Wiseman',
maintainer='Jeff Triplett',
maintainer_email='jeff.triplett@gmail.com',
packages=find_packages(),
package_data={},
py_modules=['rid'],
entry_points={
'console_scripts': [
'rid = rid:main',
]
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: Public Domain',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'
],
)
Fix for missing author info
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
rid
~~~~~~~~~~~
"The Regressive Imagery Dictionary (RID) is a coding scheme for text analysis
that is designed to measure 'primordial' and conceptual content. Primordial
thought is the kind of free-form, associative thinking involved in fantasy
and dreams. Like Freud's id, I guess. Conceptual (or secondary) thought is
logical, reality-based and focused on problem solving."
via: John Wiseman (http://lemonodor.com/archives/001511.html)
"""
from setuptools import setup, find_packages
setup(
name='regressive-imagery-dictionary',
version='0.1.7',
url='https://github.com/jefftriplett/rid.py',
license='MIT',
description='The Regressive Imagery Dictionary (RID) is a coding scheme for text analysis that is designed to measure "primordial" and conceptual content.',
long_description=__doc__,
maintainer='Jeff Triplett',
maintainer_email='jeff.triplett@gmail.com',
packages=find_packages(),
package_data={},
py_modules=['rid'],
entry_points={
'console_scripts': [
'rid = rid:main',
]
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: Public Domain',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'
],
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
rid
~~~~~~~~~~~
"The Regressive Imagery Dictionary (RID) is a coding scheme for text analysis
that is designed to measure 'primordial' and conceptual content. Primordial
thought is the kind of free-form, associative thinking involved in fantasy
and dreams. Like Freud's id, I guess. Conceptual (or secondary) thought is
logical, reality-based and focused on problem solving."
via: John Wiseman (http://lemonodor.com/archives/001511.html)
"""
from setuptools import setup, find_packages
setup(
name='regressive-imagery-dictionary',
version='0.1.7',
url='https://github.com/jefftriplett/rid.py',
license='MIT',
description='The Regressive Imagery Dictionary (RID) is a coding scheme for text analysis that is designed to measure "primordial" and conceptual content.',
long_description=__doc__,
author='John Wiseman',
maintainer='Jeff Triplett',
maintainer_email='jeff.triplett@gmail.com',
packages=find_packages(),
package_data={},
py_modules=['rid'],
entry_points={
'console_scripts': [
'rid = rid:main',
]
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: Public Domain',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'
],
)
<commit_msg>Fix for missing author info<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
rid
~~~~~~~~~~~
"The Regressive Imagery Dictionary (RID) is a coding scheme for text analysis
that is designed to measure 'primordial' and conceptual content. Primordial
thought is the kind of free-form, associative thinking involved in fantasy
and dreams. Like Freud's id, I guess. Conceptual (or secondary) thought is
logical, reality-based and focused on problem solving."
via: John Wiseman (http://lemonodor.com/archives/001511.html)
"""
from setuptools import setup, find_packages
setup(
name='regressive-imagery-dictionary',
version='0.1.7',
url='https://github.com/jefftriplett/rid.py',
license='MIT',
description='The Regressive Imagery Dictionary (RID) is a coding scheme for text analysis that is designed to measure "primordial" and conceptual content.',
long_description=__doc__,
maintainer='Jeff Triplett',
maintainer_email='jeff.triplett@gmail.com',
packages=find_packages(),
package_data={},
py_modules=['rid'],
entry_points={
'console_scripts': [
'rid = rid:main',
]
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: Public Domain',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'
],
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
rid
~~~~~~~~~~~
"The Regressive Imagery Dictionary (RID) is a coding scheme for text analysis
that is designed to measure 'primordial' and conceptual content. Primordial
thought is the kind of free-form, associative thinking involved in fantasy
and dreams. Like Freud's id, I guess. Conceptual (or secondary) thought is
logical, reality-based and focused on problem solving."
via: John Wiseman (http://lemonodor.com/archives/001511.html)
"""
from setuptools import setup, find_packages
setup(
name='regressive-imagery-dictionary',
version='0.1.7',
url='https://github.com/jefftriplett/rid.py',
license='MIT',
description='The Regressive Imagery Dictionary (RID) is a coding scheme for text analysis that is designed to measure "primordial" and conceptual content.',
long_description=__doc__,
author='John Wiseman',
maintainer='Jeff Triplett',
maintainer_email='jeff.triplett@gmail.com',
packages=find_packages(),
package_data={},
py_modules=['rid'],
entry_points={
'console_scripts': [
'rid = rid:main',
]
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: Public Domain',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'
],
)
Fix for missing author info#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
rid
~~~~~~~~~~~
"The Regressive Imagery Dictionary (RID) is a coding scheme for text analysis
that is designed to measure 'primordial' and conceptual content. Primordial
thought is the kind of free-form, associative thinking involved in fantasy
and dreams. Like Freud's id, I guess. Conceptual (or secondary) thought is
logical, reality-based and focused on problem solving."
via: John Wiseman (http://lemonodor.com/archives/001511.html)
"""
from setuptools import setup, find_packages
setup(
name='regressive-imagery-dictionary',
version='0.1.7',
url='https://github.com/jefftriplett/rid.py',
license='MIT',
description='The Regressive Imagery Dictionary (RID) is a coding scheme for text analysis that is designed to measure "primordial" and conceptual content.',
long_description=__doc__,
maintainer='Jeff Triplett',
maintainer_email='jeff.triplett@gmail.com',
packages=find_packages(),
package_data={},
py_modules=['rid'],
entry_points={
'console_scripts': [
'rid = rid:main',
]
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: Public Domain',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'
],
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
rid
~~~~~~~~~~~
"The Regressive Imagery Dictionary (RID) is a coding scheme for text analysis
that is designed to measure 'primordial' and conceptual content. Primordial
thought is the kind of free-form, associative thinking involved in fantasy
and dreams. Like Freud's id, I guess. Conceptual (or secondary) thought is
logical, reality-based and focused on problem solving."
via: John Wiseman (http://lemonodor.com/archives/001511.html)
"""
from setuptools import setup, find_packages
setup(
name='regressive-imagery-dictionary',
version='0.1.7',
url='https://github.com/jefftriplett/rid.py',
license='MIT',
description='The Regressive Imagery Dictionary (RID) is a coding scheme for text analysis that is designed to measure "primordial" and conceptual content.',
long_description=__doc__,
author='John Wiseman',
maintainer='Jeff Triplett',
maintainer_email='jeff.triplett@gmail.com',
packages=find_packages(),
package_data={},
py_modules=['rid'],
entry_points={
'console_scripts': [
'rid = rid:main',
]
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: Public Domain',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'
],
)
<commit_msg>Fix for missing author info<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
rid
~~~~~~~~~~~
"The Regressive Imagery Dictionary (RID) is a coding scheme for text analysis
that is designed to measure 'primordial' and conceptual content. Primordial
thought is the kind of free-form, associative thinking involved in fantasy
and dreams. Like Freud's id, I guess. Conceptual (or secondary) thought is
logical, reality-based and focused on problem solving."
via: John Wiseman (http://lemonodor.com/archives/001511.html)
"""
from setuptools import setup, find_packages
setup(
name='regressive-imagery-dictionary',
version='0.1.7',
url='https://github.com/jefftriplett/rid.py',
license='MIT',
description='The Regressive Imagery Dictionary (RID) is a coding scheme for text analysis that is designed to measure "primordial" and conceptual content.',
long_description=__doc__,
maintainer='Jeff Triplett',
maintainer_email='jeff.triplett@gmail.com',
packages=find_packages(),
package_data={},
py_modules=['rid'],
entry_points={
'console_scripts': [
'rid = rid:main',
]
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: Public Domain',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'
],
)
|
46816c4d8470192e76e730969ddcedeb8391fdcf
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from distutils.core import setup
setup(name="Neighborhoodize",
version='0.9',
description='Utility for translating lat, long coordinates into '
'neighborhoods in various cities',
author='Brian Lange',
author_email='brian.lange@datascopeanalytics.com',
url='https://github.com/bjlange/neighborhoodize',
packages=['neighborhoodize', ],
package_data={'neighborhoodize': ['data/*']},
install_requires=[
"Shapely >= 1.5.7",
"beautifulsoup4 >= 4.3.2",
"fastkml >= 0.9"],
)
|
#!/usr/bin/env python
from distutils.core import setup
setup(name="Neighborhoodize",
version='0.9',
description='Utility for translating lat, long coordinates into '
'neighborhoods in various cities',
author='Brian Lange',
author_email='brian.lange@datascopeanalytics.com',
url='https://github.com/bjlange/neighborhoodize',
packages=['neighborhoodize', ],
package_data={'neighborhoodize': ['data/*']},
download_url = 'https://github.com/bjlange/neighborhoodize/tarball/0.9',
install_requires=[
"Shapely >= 1.5.7",
"beautifulsoup4 >= 4.3.2",
"fastkml >= 0.9"],
)
|
Add download url for pypi
|
Add download url for pypi
|
Python
|
mit
|
bjlange/neighborhoodize
|
#!/usr/bin/env python
from distutils.core import setup
setup(name="Neighborhoodize",
version='0.9',
description='Utility for translating lat, long coordinates into '
'neighborhoods in various cities',
author='Brian Lange',
author_email='brian.lange@datascopeanalytics.com',
url='https://github.com/bjlange/neighborhoodize',
packages=['neighborhoodize', ],
package_data={'neighborhoodize': ['data/*']},
install_requires=[
"Shapely >= 1.5.7",
"beautifulsoup4 >= 4.3.2",
"fastkml >= 0.9"],
)
Add download url for pypi
|
#!/usr/bin/env python
from distutils.core import setup
setup(name="Neighborhoodize",
version='0.9',
description='Utility for translating lat, long coordinates into '
'neighborhoods in various cities',
author='Brian Lange',
author_email='brian.lange@datascopeanalytics.com',
url='https://github.com/bjlange/neighborhoodize',
packages=['neighborhoodize', ],
package_data={'neighborhoodize': ['data/*']},
download_url = 'https://github.com/bjlange/neighborhoodize/tarball/0.9',
install_requires=[
"Shapely >= 1.5.7",
"beautifulsoup4 >= 4.3.2",
"fastkml >= 0.9"],
)
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
setup(name="Neighborhoodize",
version='0.9',
description='Utility for translating lat, long coordinates into '
'neighborhoods in various cities',
author='Brian Lange',
author_email='brian.lange@datascopeanalytics.com',
url='https://github.com/bjlange/neighborhoodize',
packages=['neighborhoodize', ],
package_data={'neighborhoodize': ['data/*']},
install_requires=[
"Shapely >= 1.5.7",
"beautifulsoup4 >= 4.3.2",
"fastkml >= 0.9"],
)
<commit_msg>Add download url for pypi<commit_after>
|
#!/usr/bin/env python
from distutils.core import setup
setup(name="Neighborhoodize",
version='0.9',
description='Utility for translating lat, long coordinates into '
'neighborhoods in various cities',
author='Brian Lange',
author_email='brian.lange@datascopeanalytics.com',
url='https://github.com/bjlange/neighborhoodize',
packages=['neighborhoodize', ],
package_data={'neighborhoodize': ['data/*']},
download_url = 'https://github.com/bjlange/neighborhoodize/tarball/0.9',
install_requires=[
"Shapely >= 1.5.7",
"beautifulsoup4 >= 4.3.2",
"fastkml >= 0.9"],
)
|
#!/usr/bin/env python
from distutils.core import setup
setup(name="Neighborhoodize",
version='0.9',
description='Utility for translating lat, long coordinates into '
'neighborhoods in various cities',
author='Brian Lange',
author_email='brian.lange@datascopeanalytics.com',
url='https://github.com/bjlange/neighborhoodize',
packages=['neighborhoodize', ],
package_data={'neighborhoodize': ['data/*']},
install_requires=[
"Shapely >= 1.5.7",
"beautifulsoup4 >= 4.3.2",
"fastkml >= 0.9"],
)
Add download url for pypi#!/usr/bin/env python
from distutils.core import setup
setup(name="Neighborhoodize",
version='0.9',
description='Utility for translating lat, long coordinates into '
'neighborhoods in various cities',
author='Brian Lange',
author_email='brian.lange@datascopeanalytics.com',
url='https://github.com/bjlange/neighborhoodize',
packages=['neighborhoodize', ],
package_data={'neighborhoodize': ['data/*']},
download_url = 'https://github.com/bjlange/neighborhoodize/tarball/0.9',
install_requires=[
"Shapely >= 1.5.7",
"beautifulsoup4 >= 4.3.2",
"fastkml >= 0.9"],
)
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
setup(name="Neighborhoodize",
version='0.9',
description='Utility for translating lat, long coordinates into '
'neighborhoods in various cities',
author='Brian Lange',
author_email='brian.lange@datascopeanalytics.com',
url='https://github.com/bjlange/neighborhoodize',
packages=['neighborhoodize', ],
package_data={'neighborhoodize': ['data/*']},
install_requires=[
"Shapely >= 1.5.7",
"beautifulsoup4 >= 4.3.2",
"fastkml >= 0.9"],
)
<commit_msg>Add download url for pypi<commit_after>#!/usr/bin/env python
from distutils.core import setup
setup(name="Neighborhoodize",
version='0.9',
description='Utility for translating lat, long coordinates into '
'neighborhoods in various cities',
author='Brian Lange',
author_email='brian.lange@datascopeanalytics.com',
url='https://github.com/bjlange/neighborhoodize',
packages=['neighborhoodize', ],
package_data={'neighborhoodize': ['data/*']},
download_url = 'https://github.com/bjlange/neighborhoodize/tarball/0.9',
install_requires=[
"Shapely >= 1.5.7",
"beautifulsoup4 >= 4.3.2",
"fastkml >= 0.9"],
)
|
f50d192b9384664ded37326ff48a1351843f76c6
|
setup.py
|
setup.py
|
try:
from pip._internal.req import parse_requirements
except ImportError:
from pip.req import parse_requirements
from setuptools import find_packages
from setuptools import setup
setup(
name='jsonapi-requests',
version='0.6.1.dev0',
description='Python client implementation for json api. http://jsonapi.org/',
author='Social WiFi',
author_email='it@socialwifi.com',
url='https://github.com/socialwifi/jsonapi-requests',
packages=find_packages(exclude=['tests']),
install_requires=[str(ir.req) for ir in parse_requirements('base_requirements.txt', session=False)],
setup_requires=['pytest-runner'],
tests_require=['pytest', 'flask'],
extras_require={
'flask': ['flask']
},
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
]
)
|
try:
from pip._internal.req import parse_requirements
except ImportError:
from pip.req import parse_requirements
from setuptools import find_packages
from setuptools import setup
def get_long_description():
with open('README.md') as readme_file:
return readme_file.read()
setup(
name='jsonapi-requests',
version='0.6.1.dev0',
description='Python client implementation for json api. http://jsonapi.org/',
long_description=get_long_description(),
long_description_content_type='text/markdown',
author='Social WiFi',
author_email='it@socialwifi.com',
url='https://github.com/socialwifi/jsonapi-requests',
packages=find_packages(exclude=['tests']),
install_requires=[str(ir.req) for ir in parse_requirements('base_requirements.txt', session=False)],
setup_requires=['pytest-runner'],
tests_require=['pytest', 'flask'],
extras_require={
'flask': ['flask']
},
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
]
)
|
Add long description to package.
|
Add long description to package.
|
Python
|
bsd-3-clause
|
socialwifi/jsonapi-requests
|
try:
from pip._internal.req import parse_requirements
except ImportError:
from pip.req import parse_requirements
from setuptools import find_packages
from setuptools import setup
setup(
name='jsonapi-requests',
version='0.6.1.dev0',
description='Python client implementation for json api. http://jsonapi.org/',
author='Social WiFi',
author_email='it@socialwifi.com',
url='https://github.com/socialwifi/jsonapi-requests',
packages=find_packages(exclude=['tests']),
install_requires=[str(ir.req) for ir in parse_requirements('base_requirements.txt', session=False)],
setup_requires=['pytest-runner'],
tests_require=['pytest', 'flask'],
extras_require={
'flask': ['flask']
},
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
]
)
Add long description to package.
|
try:
from pip._internal.req import parse_requirements
except ImportError:
from pip.req import parse_requirements
from setuptools import find_packages
from setuptools import setup
def get_long_description():
with open('README.md') as readme_file:
return readme_file.read()
setup(
name='jsonapi-requests',
version='0.6.1.dev0',
description='Python client implementation for json api. http://jsonapi.org/',
long_description=get_long_description(),
long_description_content_type='text/markdown',
author='Social WiFi',
author_email='it@socialwifi.com',
url='https://github.com/socialwifi/jsonapi-requests',
packages=find_packages(exclude=['tests']),
install_requires=[str(ir.req) for ir in parse_requirements('base_requirements.txt', session=False)],
setup_requires=['pytest-runner'],
tests_require=['pytest', 'flask'],
extras_require={
'flask': ['flask']
},
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
]
)
|
<commit_before>try:
from pip._internal.req import parse_requirements
except ImportError:
from pip.req import parse_requirements
from setuptools import find_packages
from setuptools import setup
setup(
name='jsonapi-requests',
version='0.6.1.dev0',
description='Python client implementation for json api. http://jsonapi.org/',
author='Social WiFi',
author_email='it@socialwifi.com',
url='https://github.com/socialwifi/jsonapi-requests',
packages=find_packages(exclude=['tests']),
install_requires=[str(ir.req) for ir in parse_requirements('base_requirements.txt', session=False)],
setup_requires=['pytest-runner'],
tests_require=['pytest', 'flask'],
extras_require={
'flask': ['flask']
},
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
]
)
<commit_msg>Add long description to package.<commit_after>
|
try:
from pip._internal.req import parse_requirements
except ImportError:
from pip.req import parse_requirements
from setuptools import find_packages
from setuptools import setup
def get_long_description():
with open('README.md') as readme_file:
return readme_file.read()
setup(
name='jsonapi-requests',
version='0.6.1.dev0',
description='Python client implementation for json api. http://jsonapi.org/',
long_description=get_long_description(),
long_description_content_type='text/markdown',
author='Social WiFi',
author_email='it@socialwifi.com',
url='https://github.com/socialwifi/jsonapi-requests',
packages=find_packages(exclude=['tests']),
install_requires=[str(ir.req) for ir in parse_requirements('base_requirements.txt', session=False)],
setup_requires=['pytest-runner'],
tests_require=['pytest', 'flask'],
extras_require={
'flask': ['flask']
},
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
]
)
|
try:
from pip._internal.req import parse_requirements
except ImportError:
from pip.req import parse_requirements
from setuptools import find_packages
from setuptools import setup
setup(
name='jsonapi-requests',
version='0.6.1.dev0',
description='Python client implementation for json api. http://jsonapi.org/',
author='Social WiFi',
author_email='it@socialwifi.com',
url='https://github.com/socialwifi/jsonapi-requests',
packages=find_packages(exclude=['tests']),
install_requires=[str(ir.req) for ir in parse_requirements('base_requirements.txt', session=False)],
setup_requires=['pytest-runner'],
tests_require=['pytest', 'flask'],
extras_require={
'flask': ['flask']
},
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
]
)
Add long description to package.try:
from pip._internal.req import parse_requirements
except ImportError:
from pip.req import parse_requirements
from setuptools import find_packages
from setuptools import setup
def get_long_description():
with open('README.md') as readme_file:
return readme_file.read()
setup(
name='jsonapi-requests',
version='0.6.1.dev0',
description='Python client implementation for json api. http://jsonapi.org/',
long_description=get_long_description(),
long_description_content_type='text/markdown',
author='Social WiFi',
author_email='it@socialwifi.com',
url='https://github.com/socialwifi/jsonapi-requests',
packages=find_packages(exclude=['tests']),
install_requires=[str(ir.req) for ir in parse_requirements('base_requirements.txt', session=False)],
setup_requires=['pytest-runner'],
tests_require=['pytest', 'flask'],
extras_require={
'flask': ['flask']
},
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
]
)
|
<commit_before>try:
from pip._internal.req import parse_requirements
except ImportError:
from pip.req import parse_requirements
from setuptools import find_packages
from setuptools import setup
setup(
name='jsonapi-requests',
version='0.6.1.dev0',
description='Python client implementation for json api. http://jsonapi.org/',
author='Social WiFi',
author_email='it@socialwifi.com',
url='https://github.com/socialwifi/jsonapi-requests',
packages=find_packages(exclude=['tests']),
install_requires=[str(ir.req) for ir in parse_requirements('base_requirements.txt', session=False)],
setup_requires=['pytest-runner'],
tests_require=['pytest', 'flask'],
extras_require={
'flask': ['flask']
},
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
]
)
<commit_msg>Add long description to package.<commit_after>try:
from pip._internal.req import parse_requirements
except ImportError:
from pip.req import parse_requirements
from setuptools import find_packages
from setuptools import setup
def get_long_description():
with open('README.md') as readme_file:
return readme_file.read()
setup(
name='jsonapi-requests',
version='0.6.1.dev0',
description='Python client implementation for json api. http://jsonapi.org/',
long_description=get_long_description(),
long_description_content_type='text/markdown',
author='Social WiFi',
author_email='it@socialwifi.com',
url='https://github.com/socialwifi/jsonapi-requests',
packages=find_packages(exclude=['tests']),
install_requires=[str(ir.req) for ir in parse_requirements('base_requirements.txt', session=False)],
setup_requires=['pytest-runner'],
tests_require=['pytest', 'flask'],
extras_require={
'flask': ['flask']
},
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
]
)
|
77625dda88fb4278d7c82b546727f62aae1d5939
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
# Use latest_tweets.VERSION for version numbers
version_tuple = __import__('latest_tweets').VERSION
version = '.'.join([str(v) for v in version_tuple])
setup(
name='django-latest-tweets',
version=version,
description='Latest Tweets for Django',
long_description=open('README.rst').read(),
url='https://github.com/blancltd/django-latest-tweets',
maintainer='Alex Tomkins',
maintainer_email='alex@blanc.ltd.uk',
platforms=['any'],
install_requires=[
'twitter>=1.9.1',
],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
license='BSD-2',
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
# Use latest_tweets.VERSION for version numbers
version_tuple = __import__('latest_tweets').VERSION
version = '.'.join([str(v) for v in version_tuple])
setup(
name='django-latest-tweets',
version=version,
description='Latest Tweets for Django',
long_description=open('README.rst').read(),
url='https://github.com/blancltd/django-latest-tweets',
maintainer='Blanc Ltd',
maintainer_email='studio@blanc.ltd.uk',
platforms=['any'],
install_requires=[
'twitter>=1.9.1',
],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
license='BSD-2',
)
|
Change maintainer to Blanc Ltd
|
Change maintainer to Blanc Ltd
|
Python
|
bsd-3-clause
|
blancltd/django-latest-tweets
|
#!/usr/bin/env python
from setuptools import setup, find_packages
# Use latest_tweets.VERSION for version numbers
version_tuple = __import__('latest_tweets').VERSION
version = '.'.join([str(v) for v in version_tuple])
setup(
name='django-latest-tweets',
version=version,
description='Latest Tweets for Django',
long_description=open('README.rst').read(),
url='https://github.com/blancltd/django-latest-tweets',
maintainer='Alex Tomkins',
maintainer_email='alex@blanc.ltd.uk',
platforms=['any'],
install_requires=[
'twitter>=1.9.1',
],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
license='BSD-2',
)
Change maintainer to Blanc Ltd
|
#!/usr/bin/env python
from setuptools import setup, find_packages
# Use latest_tweets.VERSION for version numbers
version_tuple = __import__('latest_tweets').VERSION
version = '.'.join([str(v) for v in version_tuple])
setup(
name='django-latest-tweets',
version=version,
description='Latest Tweets for Django',
long_description=open('README.rst').read(),
url='https://github.com/blancltd/django-latest-tweets',
maintainer='Blanc Ltd',
maintainer_email='studio@blanc.ltd.uk',
platforms=['any'],
install_requires=[
'twitter>=1.9.1',
],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
license='BSD-2',
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
# Use latest_tweets.VERSION for version numbers
version_tuple = __import__('latest_tweets').VERSION
version = '.'.join([str(v) for v in version_tuple])
setup(
name='django-latest-tweets',
version=version,
description='Latest Tweets for Django',
long_description=open('README.rst').read(),
url='https://github.com/blancltd/django-latest-tweets',
maintainer='Alex Tomkins',
maintainer_email='alex@blanc.ltd.uk',
platforms=['any'],
install_requires=[
'twitter>=1.9.1',
],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
license='BSD-2',
)
<commit_msg>Change maintainer to Blanc Ltd<commit_after>
|
#!/usr/bin/env python
from setuptools import setup, find_packages
# Use latest_tweets.VERSION for version numbers
version_tuple = __import__('latest_tweets').VERSION
version = '.'.join([str(v) for v in version_tuple])
setup(
name='django-latest-tweets',
version=version,
description='Latest Tweets for Django',
long_description=open('README.rst').read(),
url='https://github.com/blancltd/django-latest-tweets',
maintainer='Blanc Ltd',
maintainer_email='studio@blanc.ltd.uk',
platforms=['any'],
install_requires=[
'twitter>=1.9.1',
],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
license='BSD-2',
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
# Use latest_tweets.VERSION for version numbers
version_tuple = __import__('latest_tweets').VERSION
version = '.'.join([str(v) for v in version_tuple])
setup(
name='django-latest-tweets',
version=version,
description='Latest Tweets for Django',
long_description=open('README.rst').read(),
url='https://github.com/blancltd/django-latest-tweets',
maintainer='Alex Tomkins',
maintainer_email='alex@blanc.ltd.uk',
platforms=['any'],
install_requires=[
'twitter>=1.9.1',
],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
license='BSD-2',
)
Change maintainer to Blanc Ltd#!/usr/bin/env python
from setuptools import setup, find_packages
# Use latest_tweets.VERSION for version numbers
version_tuple = __import__('latest_tweets').VERSION
version = '.'.join([str(v) for v in version_tuple])
setup(
name='django-latest-tweets',
version=version,
description='Latest Tweets for Django',
long_description=open('README.rst').read(),
url='https://github.com/blancltd/django-latest-tweets',
maintainer='Blanc Ltd',
maintainer_email='studio@blanc.ltd.uk',
platforms=['any'],
install_requires=[
'twitter>=1.9.1',
],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
license='BSD-2',
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
# Use latest_tweets.VERSION for version numbers
version_tuple = __import__('latest_tweets').VERSION
version = '.'.join([str(v) for v in version_tuple])
setup(
name='django-latest-tweets',
version=version,
description='Latest Tweets for Django',
long_description=open('README.rst').read(),
url='https://github.com/blancltd/django-latest-tweets',
maintainer='Alex Tomkins',
maintainer_email='alex@blanc.ltd.uk',
platforms=['any'],
install_requires=[
'twitter>=1.9.1',
],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
license='BSD-2',
)
<commit_msg>Change maintainer to Blanc Ltd<commit_after>#!/usr/bin/env python
from setuptools import setup, find_packages
# Use latest_tweets.VERSION for version numbers
version_tuple = __import__('latest_tweets').VERSION
version = '.'.join([str(v) for v in version_tuple])
setup(
name='django-latest-tweets',
version=version,
description='Latest Tweets for Django',
long_description=open('README.rst').read(),
url='https://github.com/blancltd/django-latest-tweets',
maintainer='Blanc Ltd',
maintainer_email='studio@blanc.ltd.uk',
platforms=['any'],
install_requires=[
'twitter>=1.9.1',
],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
license='BSD-2',
)
|
34dc1dcfd843f62d6b38b69e9cd465c16eac70c8
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='gu-django-filebrowser-no-grappelli',
version='3.1.32',
description='Media-Management with the Django Admin-Interface. Without django-grappelli requirement.',
author='Patrick Kranzlmueller',
author_email='patrick@vonautomatisch.at',
url='https://github.com/agushuley/gu-django-filebrowser-no-grappelli',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
]
)
|
from setuptools import setup, find_packages
setup(
name='gu-django-filebrowser-no-grappelli',
version='3.1.33',
description='Media-Management with the Django Admin-Interface. Without django-grappelli requirement.',
author='Patrick Kranzlmueller',
author_email='patrick@vonautomatisch.at',
url='https://github.com/hu-django/filebrowser-no-grappelli',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
]
)
|
Migrate to python 3, django 3.2
|
Migrate to python 3, django 3.2
|
Python
|
bsd-3-clause
|
agushuley/gu-django-filebrowser-no-grappelli,agushuley/gu-django-filebrowser-no-grappelli,agushuley/gu-django-filebrowser-no-grappelli,agushuley/gu-django-filebrowser-no-grappelli
|
from setuptools import setup, find_packages
setup(
name='gu-django-filebrowser-no-grappelli',
version='3.1.32',
description='Media-Management with the Django Admin-Interface. Without django-grappelli requirement.',
author='Patrick Kranzlmueller',
author_email='patrick@vonautomatisch.at',
url='https://github.com/agushuley/gu-django-filebrowser-no-grappelli',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
]
)
Migrate to python 3, django 3.2
|
from setuptools import setup, find_packages
setup(
name='gu-django-filebrowser-no-grappelli',
version='3.1.33',
description='Media-Management with the Django Admin-Interface. Without django-grappelli requirement.',
author='Patrick Kranzlmueller',
author_email='patrick@vonautomatisch.at',
url='https://github.com/hu-django/filebrowser-no-grappelli',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
]
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='gu-django-filebrowser-no-grappelli',
version='3.1.32',
description='Media-Management with the Django Admin-Interface. Without django-grappelli requirement.',
author='Patrick Kranzlmueller',
author_email='patrick@vonautomatisch.at',
url='https://github.com/agushuley/gu-django-filebrowser-no-grappelli',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
]
)
<commit_msg>Migrate to python 3, django 3.2<commit_after>
|
from setuptools import setup, find_packages
setup(
name='gu-django-filebrowser-no-grappelli',
version='3.1.33',
description='Media-Management with the Django Admin-Interface. Without django-grappelli requirement.',
author='Patrick Kranzlmueller',
author_email='patrick@vonautomatisch.at',
url='https://github.com/hu-django/filebrowser-no-grappelli',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
]
)
|
from setuptools import setup, find_packages
setup(
name='gu-django-filebrowser-no-grappelli',
version='3.1.32',
description='Media-Management with the Django Admin-Interface. Without django-grappelli requirement.',
author='Patrick Kranzlmueller',
author_email='patrick@vonautomatisch.at',
url='https://github.com/agushuley/gu-django-filebrowser-no-grappelli',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
]
)
Migrate to python 3, django 3.2from setuptools import setup, find_packages
setup(
name='gu-django-filebrowser-no-grappelli',
version='3.1.33',
description='Media-Management with the Django Admin-Interface. Without django-grappelli requirement.',
author='Patrick Kranzlmueller',
author_email='patrick@vonautomatisch.at',
url='https://github.com/hu-django/filebrowser-no-grappelli',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
]
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='gu-django-filebrowser-no-grappelli',
version='3.1.32',
description='Media-Management with the Django Admin-Interface. Without django-grappelli requirement.',
author='Patrick Kranzlmueller',
author_email='patrick@vonautomatisch.at',
url='https://github.com/agushuley/gu-django-filebrowser-no-grappelli',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
]
)
<commit_msg>Migrate to python 3, django 3.2<commit_after>from setuptools import setup, find_packages
setup(
name='gu-django-filebrowser-no-grappelli',
version='3.1.33',
description='Media-Management with the Django Admin-Interface. Without django-grappelli requirement.',
author='Patrick Kranzlmueller',
author_email='patrick@vonautomatisch.at',
url='https://github.com/hu-django/filebrowser-no-grappelli',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
]
)
|
efebca50447d4665e6511b461216f30887242261
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='panoptes_client',
url='https://github.com/zooniverse/panoptes-python-client',
author='Adam McMaster',
author_email='adam@zooniverse.org',
version='1.4.0',
packages=find_packages(),
include_package_data=True,
install_requires=[
'requests>=2.4.2,<2.26',
'future>=0.16,<0.19',
'python-magic>=0.4,<0.5',
'redo>=1.7',
'six>=1.9',
],
extras_require={
'testing': [
'mock>=2.0,<4.1',
],
'docs': [
'sphinx',
],
':python_version == "2.7"': ['futures'],
}
)
|
from setuptools import setup, find_packages
setup(
name='panoptes_client',
url='https://github.com/zooniverse/panoptes-python-client',
author='Adam McMaster',
author_email='adam@zooniverse.org',
version='1.4.0',
packages=find_packages(),
include_package_data=True,
install_requires=[
'requests>=2.4.2,<2.27',
'future>=0.16,<0.19',
'python-magic>=0.4,<0.5',
'redo>=1.7',
'six>=1.9',
],
extras_require={
'testing': [
'mock>=2.0,<4.1',
],
'docs': [
'sphinx',
],
':python_version == "2.7"': ['futures'],
}
)
|
Update requests requirement from <2.26,>=2.4.2 to >=2.4.2,<2.27
|
Update requests requirement from <2.26,>=2.4.2 to >=2.4.2,<2.27
Updates the requirements on [requests](https://github.com/psf/requests) to permit the latest version.
- [Release notes](https://github.com/psf/requests/releases)
- [Changelog](https://github.com/psf/requests/blob/master/HISTORY.md)
- [Commits](https://github.com/psf/requests/compare/v2.4.2...v2.26.0)
---
updated-dependencies:
- dependency-name: requests
dependency-type: direct:production
...
Signed-off-by: dependabot[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@github.com>
|
Python
|
apache-2.0
|
zooniverse/panoptes-python-client
|
from setuptools import setup, find_packages
setup(
name='panoptes_client',
url='https://github.com/zooniverse/panoptes-python-client',
author='Adam McMaster',
author_email='adam@zooniverse.org',
version='1.4.0',
packages=find_packages(),
include_package_data=True,
install_requires=[
'requests>=2.4.2,<2.26',
'future>=0.16,<0.19',
'python-magic>=0.4,<0.5',
'redo>=1.7',
'six>=1.9',
],
extras_require={
'testing': [
'mock>=2.0,<4.1',
],
'docs': [
'sphinx',
],
':python_version == "2.7"': ['futures'],
}
)
Update requests requirement from <2.26,>=2.4.2 to >=2.4.2,<2.27
Updates the requirements on [requests](https://github.com/psf/requests) to permit the latest version.
- [Release notes](https://github.com/psf/requests/releases)
- [Changelog](https://github.com/psf/requests/blob/master/HISTORY.md)
- [Commits](https://github.com/psf/requests/compare/v2.4.2...v2.26.0)
---
updated-dependencies:
- dependency-name: requests
dependency-type: direct:production
...
Signed-off-by: dependabot[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@github.com>
|
from setuptools import setup, find_packages
setup(
name='panoptes_client',
url='https://github.com/zooniverse/panoptes-python-client',
author='Adam McMaster',
author_email='adam@zooniverse.org',
version='1.4.0',
packages=find_packages(),
include_package_data=True,
install_requires=[
'requests>=2.4.2,<2.27',
'future>=0.16,<0.19',
'python-magic>=0.4,<0.5',
'redo>=1.7',
'six>=1.9',
],
extras_require={
'testing': [
'mock>=2.0,<4.1',
],
'docs': [
'sphinx',
],
':python_version == "2.7"': ['futures'],
}
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='panoptes_client',
url='https://github.com/zooniverse/panoptes-python-client',
author='Adam McMaster',
author_email='adam@zooniverse.org',
version='1.4.0',
packages=find_packages(),
include_package_data=True,
install_requires=[
'requests>=2.4.2,<2.26',
'future>=0.16,<0.19',
'python-magic>=0.4,<0.5',
'redo>=1.7',
'six>=1.9',
],
extras_require={
'testing': [
'mock>=2.0,<4.1',
],
'docs': [
'sphinx',
],
':python_version == "2.7"': ['futures'],
}
)
<commit_msg>Update requests requirement from <2.26,>=2.4.2 to >=2.4.2,<2.27
Updates the requirements on [requests](https://github.com/psf/requests) to permit the latest version.
- [Release notes](https://github.com/psf/requests/releases)
- [Changelog](https://github.com/psf/requests/blob/master/HISTORY.md)
- [Commits](https://github.com/psf/requests/compare/v2.4.2...v2.26.0)
---
updated-dependencies:
- dependency-name: requests
dependency-type: direct:production
...
Signed-off-by: dependabot[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@github.com><commit_after>
|
from setuptools import setup, find_packages
setup(
name='panoptes_client',
url='https://github.com/zooniverse/panoptes-python-client',
author='Adam McMaster',
author_email='adam@zooniverse.org',
version='1.4.0',
packages=find_packages(),
include_package_data=True,
install_requires=[
'requests>=2.4.2,<2.27',
'future>=0.16,<0.19',
'python-magic>=0.4,<0.5',
'redo>=1.7',
'six>=1.9',
],
extras_require={
'testing': [
'mock>=2.0,<4.1',
],
'docs': [
'sphinx',
],
':python_version == "2.7"': ['futures'],
}
)
|
from setuptools import setup, find_packages
setup(
name='panoptes_client',
url='https://github.com/zooniverse/panoptes-python-client',
author='Adam McMaster',
author_email='adam@zooniverse.org',
version='1.4.0',
packages=find_packages(),
include_package_data=True,
install_requires=[
'requests>=2.4.2,<2.26',
'future>=0.16,<0.19',
'python-magic>=0.4,<0.5',
'redo>=1.7',
'six>=1.9',
],
extras_require={
'testing': [
'mock>=2.0,<4.1',
],
'docs': [
'sphinx',
],
':python_version == "2.7"': ['futures'],
}
)
Update requests requirement from <2.26,>=2.4.2 to >=2.4.2,<2.27
Updates the requirements on [requests](https://github.com/psf/requests) to permit the latest version.
- [Release notes](https://github.com/psf/requests/releases)
- [Changelog](https://github.com/psf/requests/blob/master/HISTORY.md)
- [Commits](https://github.com/psf/requests/compare/v2.4.2...v2.26.0)
---
updated-dependencies:
- dependency-name: requests
dependency-type: direct:production
...
Signed-off-by: dependabot[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@github.com>from setuptools import setup, find_packages
setup(
name='panoptes_client',
url='https://github.com/zooniverse/panoptes-python-client',
author='Adam McMaster',
author_email='adam@zooniverse.org',
version='1.4.0',
packages=find_packages(),
include_package_data=True,
install_requires=[
'requests>=2.4.2,<2.27',
'future>=0.16,<0.19',
'python-magic>=0.4,<0.5',
'redo>=1.7',
'six>=1.9',
],
extras_require={
'testing': [
'mock>=2.0,<4.1',
],
'docs': [
'sphinx',
],
':python_version == "2.7"': ['futures'],
}
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='panoptes_client',
url='https://github.com/zooniverse/panoptes-python-client',
author='Adam McMaster',
author_email='adam@zooniverse.org',
version='1.4.0',
packages=find_packages(),
include_package_data=True,
install_requires=[
'requests>=2.4.2,<2.26',
'future>=0.16,<0.19',
'python-magic>=0.4,<0.5',
'redo>=1.7',
'six>=1.9',
],
extras_require={
'testing': [
'mock>=2.0,<4.1',
],
'docs': [
'sphinx',
],
':python_version == "2.7"': ['futures'],
}
)
<commit_msg>Update requests requirement from <2.26,>=2.4.2 to >=2.4.2,<2.27
Updates the requirements on [requests](https://github.com/psf/requests) to permit the latest version.
- [Release notes](https://github.com/psf/requests/releases)
- [Changelog](https://github.com/psf/requests/blob/master/HISTORY.md)
- [Commits](https://github.com/psf/requests/compare/v2.4.2...v2.26.0)
---
updated-dependencies:
- dependency-name: requests
dependency-type: direct:production
...
Signed-off-by: dependabot[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@github.com><commit_after>from setuptools import setup, find_packages
setup(
name='panoptes_client',
url='https://github.com/zooniverse/panoptes-python-client',
author='Adam McMaster',
author_email='adam@zooniverse.org',
version='1.4.0',
packages=find_packages(),
include_package_data=True,
install_requires=[
'requests>=2.4.2,<2.27',
'future>=0.16,<0.19',
'python-magic>=0.4,<0.5',
'redo>=1.7',
'six>=1.9',
],
extras_require={
'testing': [
'mock>=2.0,<4.1',
],
'docs': [
'sphinx',
],
':python_version == "2.7"': ['futures'],
}
)
|
4d5942af14f842742174e588aef1d9526cf368ae
|
setup.py
|
setup.py
|
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='fabtools',
version='0.1',
description='Tools for writing awesome Fabric files',
author='Ronan Amicel',
author_email='ronan.amicel@gmail.com',
url='http://github.com/ronnix/fabtools',
install_requires=[
"fabric>=1.2.0",
],
setup_requires=[],
tests_require=[
"unittest2",
"mock",
],
packages=find_packages(exclude=['ez_setup', 'tests']),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Software Distribution',
'Topic :: System :: Systems Administration',
],
)
|
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='fabtools',
version='0.1',
description='Tools for writing awesome Fabric files',
author='Ronan Amicel',
author_email='ronan.amicel@gmail.com',
url='http://github.com/ronnix/fabtools',
license='BSD',
install_requires=[
"fabric>=1.2.0",
],
setup_requires=[],
tests_require=[
"unittest2",
"mock",
],
packages=find_packages(exclude=['ez_setup', 'tests']),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Software Distribution',
'Topic :: System :: Systems Administration',
],
)
|
Add license to the packaging
|
Add license to the packaging
|
Python
|
bsd-2-clause
|
wagigi/fabtools-python,davidcaste/fabtools,prologic/fabtools,n0n0x/fabtools-python,bitmonk/fabtools,ronnix/fabtools,pahaz/fabtools,AMOSoft/fabtools,fabtools/fabtools,sociateru/fabtools,ahnjungho/fabtools,pombredanne/fabtools,badele/fabtools,hagai26/fabtools
|
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='fabtools',
version='0.1',
description='Tools for writing awesome Fabric files',
author='Ronan Amicel',
author_email='ronan.amicel@gmail.com',
url='http://github.com/ronnix/fabtools',
install_requires=[
"fabric>=1.2.0",
],
setup_requires=[],
tests_require=[
"unittest2",
"mock",
],
packages=find_packages(exclude=['ez_setup', 'tests']),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Software Distribution',
'Topic :: System :: Systems Administration',
],
)
Add license to the packaging
|
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='fabtools',
version='0.1',
description='Tools for writing awesome Fabric files',
author='Ronan Amicel',
author_email='ronan.amicel@gmail.com',
url='http://github.com/ronnix/fabtools',
license='BSD',
install_requires=[
"fabric>=1.2.0",
],
setup_requires=[],
tests_require=[
"unittest2",
"mock",
],
packages=find_packages(exclude=['ez_setup', 'tests']),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Software Distribution',
'Topic :: System :: Systems Administration',
],
)
|
<commit_before>try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='fabtools',
version='0.1',
description='Tools for writing awesome Fabric files',
author='Ronan Amicel',
author_email='ronan.amicel@gmail.com',
url='http://github.com/ronnix/fabtools',
install_requires=[
"fabric>=1.2.0",
],
setup_requires=[],
tests_require=[
"unittest2",
"mock",
],
packages=find_packages(exclude=['ez_setup', 'tests']),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Software Distribution',
'Topic :: System :: Systems Administration',
],
)
<commit_msg>Add license to the packaging<commit_after>
|
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='fabtools',
version='0.1',
description='Tools for writing awesome Fabric files',
author='Ronan Amicel',
author_email='ronan.amicel@gmail.com',
url='http://github.com/ronnix/fabtools',
license='BSD',
install_requires=[
"fabric>=1.2.0",
],
setup_requires=[],
tests_require=[
"unittest2",
"mock",
],
packages=find_packages(exclude=['ez_setup', 'tests']),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Software Distribution',
'Topic :: System :: Systems Administration',
],
)
|
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='fabtools',
version='0.1',
description='Tools for writing awesome Fabric files',
author='Ronan Amicel',
author_email='ronan.amicel@gmail.com',
url='http://github.com/ronnix/fabtools',
install_requires=[
"fabric>=1.2.0",
],
setup_requires=[],
tests_require=[
"unittest2",
"mock",
],
packages=find_packages(exclude=['ez_setup', 'tests']),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Software Distribution',
'Topic :: System :: Systems Administration',
],
)
Add license to the packagingtry:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='fabtools',
version='0.1',
description='Tools for writing awesome Fabric files',
author='Ronan Amicel',
author_email='ronan.amicel@gmail.com',
url='http://github.com/ronnix/fabtools',
license='BSD',
install_requires=[
"fabric>=1.2.0",
],
setup_requires=[],
tests_require=[
"unittest2",
"mock",
],
packages=find_packages(exclude=['ez_setup', 'tests']),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Software Distribution',
'Topic :: System :: Systems Administration',
],
)
|
<commit_before>try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='fabtools',
version='0.1',
description='Tools for writing awesome Fabric files',
author='Ronan Amicel',
author_email='ronan.amicel@gmail.com',
url='http://github.com/ronnix/fabtools',
install_requires=[
"fabric>=1.2.0",
],
setup_requires=[],
tests_require=[
"unittest2",
"mock",
],
packages=find_packages(exclude=['ez_setup', 'tests']),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Software Distribution',
'Topic :: System :: Systems Administration',
],
)
<commit_msg>Add license to the packaging<commit_after>try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='fabtools',
version='0.1',
description='Tools for writing awesome Fabric files',
author='Ronan Amicel',
author_email='ronan.amicel@gmail.com',
url='http://github.com/ronnix/fabtools',
license='BSD',
install_requires=[
"fabric>=1.2.0",
],
setup_requires=[],
tests_require=[
"unittest2",
"mock",
],
packages=find_packages(exclude=['ez_setup', 'tests']),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Software Distribution',
'Topic :: System :: Systems Administration',
],
)
|
619ca614890aa9d02acaf04fff51bee67233a8a8
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import find_packages, Command
setup_params = dict(
name='bugimporters',
version=0.1,
author='Various contributers to the OpenHatch project, Berry Phillips',
author_email='all@openhatch.org, berryphillips@gmail.com',
packages=find_packages(),
description='Bug importers for the OpenHatch project',
install_requires=[
'gdata',
'lxml',
'pyopenssl',
'unicodecsv',
'feedparser',
'twisted',
'python-dateutil',
'decorator',
'scrapy>0.9',
'argparse',
'mock',
'PyYAML',
'autoresponse>=0.2',
],
)
### Python 2.7 already has importlib. Because of that,
### we can't put it in install_requires. We test for
### that here; if needed, we add it.
try:
import importlib
except ImportError:
install_requires.append('importlib')
if __name__ == '__main__':
from setuptools import setup
setup(**setup_params)
|
#!/usr/bin/env python
from setuptools import find_packages, Command
setup_params = dict(
name='bugimporters',
version=0.1,
author='Various contributers to the OpenHatch project, Berry Phillips',
author_email='all@openhatch.org, berryphillips@gmail.com',
packages=find_packages(),
description='Bug importers for the OpenHatch project',
install_requires=[
'gdata',
'lxml',
'pyopenssl',
'unicodecsv',
'feedparser',
'twisted',
'python-dateutil',
'decorator',
'scrapy>0.9',
'argparse',
'mock',
'PyYAML',
'autoresponse>=0.2',
],
)
### Python 2.7 already has importlib. Because of that,
### we can't put it in install_requires. We test for
### that here; if needed, we add it.
try:
import importlib
except ImportError:
setup_params['install_requires'].append('importlib')
if __name__ == '__main__':
from setuptools import setup
setup(**setup_params)
|
Fix NameError on Python 2.6
|
Fix NameError on Python 2.6
|
Python
|
agpl-3.0
|
openhatch/oh-bugimporters,openhatch/oh-bugimporters,openhatch/oh-bugimporters
|
#!/usr/bin/env python
from setuptools import find_packages, Command
setup_params = dict(
name='bugimporters',
version=0.1,
author='Various contributers to the OpenHatch project, Berry Phillips',
author_email='all@openhatch.org, berryphillips@gmail.com',
packages=find_packages(),
description='Bug importers for the OpenHatch project',
install_requires=[
'gdata',
'lxml',
'pyopenssl',
'unicodecsv',
'feedparser',
'twisted',
'python-dateutil',
'decorator',
'scrapy>0.9',
'argparse',
'mock',
'PyYAML',
'autoresponse>=0.2',
],
)
### Python 2.7 already has importlib. Because of that,
### we can't put it in install_requires. We test for
### that here; if needed, we add it.
try:
import importlib
except ImportError:
install_requires.append('importlib')
if __name__ == '__main__':
from setuptools import setup
setup(**setup_params)
Fix NameError on Python 2.6
|
#!/usr/bin/env python
from setuptools import find_packages, Command
setup_params = dict(
name='bugimporters',
version=0.1,
author='Various contributers to the OpenHatch project, Berry Phillips',
author_email='all@openhatch.org, berryphillips@gmail.com',
packages=find_packages(),
description='Bug importers for the OpenHatch project',
install_requires=[
'gdata',
'lxml',
'pyopenssl',
'unicodecsv',
'feedparser',
'twisted',
'python-dateutil',
'decorator',
'scrapy>0.9',
'argparse',
'mock',
'PyYAML',
'autoresponse>=0.2',
],
)
### Python 2.7 already has importlib. Because of that,
### we can't put it in install_requires. We test for
### that here; if needed, we add it.
try:
import importlib
except ImportError:
setup_params['install_requires'].append('importlib')
if __name__ == '__main__':
from setuptools import setup
setup(**setup_params)
|
<commit_before>#!/usr/bin/env python
from setuptools import find_packages, Command
setup_params = dict(
name='bugimporters',
version=0.1,
author='Various contributers to the OpenHatch project, Berry Phillips',
author_email='all@openhatch.org, berryphillips@gmail.com',
packages=find_packages(),
description='Bug importers for the OpenHatch project',
install_requires=[
'gdata',
'lxml',
'pyopenssl',
'unicodecsv',
'feedparser',
'twisted',
'python-dateutil',
'decorator',
'scrapy>0.9',
'argparse',
'mock',
'PyYAML',
'autoresponse>=0.2',
],
)
### Python 2.7 already has importlib. Because of that,
### we can't put it in install_requires. We test for
### that here; if needed, we add it.
try:
import importlib
except ImportError:
install_requires.append('importlib')
if __name__ == '__main__':
from setuptools import setup
setup(**setup_params)
<commit_msg>Fix NameError on Python 2.6<commit_after>
|
#!/usr/bin/env python
from setuptools import find_packages, Command
setup_params = dict(
name='bugimporters',
version=0.1,
author='Various contributers to the OpenHatch project, Berry Phillips',
author_email='all@openhatch.org, berryphillips@gmail.com',
packages=find_packages(),
description='Bug importers for the OpenHatch project',
install_requires=[
'gdata',
'lxml',
'pyopenssl',
'unicodecsv',
'feedparser',
'twisted',
'python-dateutil',
'decorator',
'scrapy>0.9',
'argparse',
'mock',
'PyYAML',
'autoresponse>=0.2',
],
)
### Python 2.7 already has importlib. Because of that,
### we can't put it in install_requires. We test for
### that here; if needed, we add it.
try:
import importlib
except ImportError:
setup_params['install_requires'].append('importlib')
if __name__ == '__main__':
from setuptools import setup
setup(**setup_params)
|
#!/usr/bin/env python
from setuptools import find_packages, Command
setup_params = dict(
name='bugimporters',
version=0.1,
author='Various contributers to the OpenHatch project, Berry Phillips',
author_email='all@openhatch.org, berryphillips@gmail.com',
packages=find_packages(),
description='Bug importers for the OpenHatch project',
install_requires=[
'gdata',
'lxml',
'pyopenssl',
'unicodecsv',
'feedparser',
'twisted',
'python-dateutil',
'decorator',
'scrapy>0.9',
'argparse',
'mock',
'PyYAML',
'autoresponse>=0.2',
],
)
### Python 2.7 already has importlib. Because of that,
### we can't put it in install_requires. We test for
### that here; if needed, we add it.
try:
import importlib
except ImportError:
install_requires.append('importlib')
if __name__ == '__main__':
from setuptools import setup
setup(**setup_params)
Fix NameError on Python 2.6#!/usr/bin/env python
from setuptools import find_packages, Command
setup_params = dict(
name='bugimporters',
version=0.1,
author='Various contributers to the OpenHatch project, Berry Phillips',
author_email='all@openhatch.org, berryphillips@gmail.com',
packages=find_packages(),
description='Bug importers for the OpenHatch project',
install_requires=[
'gdata',
'lxml',
'pyopenssl',
'unicodecsv',
'feedparser',
'twisted',
'python-dateutil',
'decorator',
'scrapy>0.9',
'argparse',
'mock',
'PyYAML',
'autoresponse>=0.2',
],
)
### Python 2.7 already has importlib. Because of that,
### we can't put it in install_requires. We test for
### that here; if needed, we add it.
try:
import importlib
except ImportError:
setup_params['install_requires'].append('importlib')
if __name__ == '__main__':
from setuptools import setup
setup(**setup_params)
|
<commit_before>#!/usr/bin/env python
from setuptools import find_packages, Command
setup_params = dict(
name='bugimporters',
version=0.1,
author='Various contributers to the OpenHatch project, Berry Phillips',
author_email='all@openhatch.org, berryphillips@gmail.com',
packages=find_packages(),
description='Bug importers for the OpenHatch project',
install_requires=[
'gdata',
'lxml',
'pyopenssl',
'unicodecsv',
'feedparser',
'twisted',
'python-dateutil',
'decorator',
'scrapy>0.9',
'argparse',
'mock',
'PyYAML',
'autoresponse>=0.2',
],
)
### Python 2.7 already has importlib. Because of that,
### we can't put it in install_requires. We test for
### that here; if needed, we add it.
try:
import importlib
except ImportError:
install_requires.append('importlib')
if __name__ == '__main__':
from setuptools import setup
setup(**setup_params)
<commit_msg>Fix NameError on Python 2.6<commit_after>#!/usr/bin/env python
from setuptools import find_packages, Command
setup_params = dict(
name='bugimporters',
version=0.1,
author='Various contributers to the OpenHatch project, Berry Phillips',
author_email='all@openhatch.org, berryphillips@gmail.com',
packages=find_packages(),
description='Bug importers for the OpenHatch project',
install_requires=[
'gdata',
'lxml',
'pyopenssl',
'unicodecsv',
'feedparser',
'twisted',
'python-dateutil',
'decorator',
'scrapy>0.9',
'argparse',
'mock',
'PyYAML',
'autoresponse>=0.2',
],
)
### Python 2.7 already has importlib. Because of that,
### we can't put it in install_requires. We test for
### that here; if needed, we add it.
try:
import importlib
except ImportError:
setup_params['install_requires'].append('importlib')
if __name__ == '__main__':
from setuptools import setup
setup(**setup_params)
|
68682a04435447f37b4d867e20d819bcda1a0409
|
setup.py
|
setup.py
|
#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name="portinus",
version="0.9.2",
author="Justin Dray",
author_email="justin@dray.be",
url="https://github.com/justin8/portinus",
description="This utility creates a systemd service file for a docker-compose file",
packages=find_packages(),
license="MIT",
install_requires=[
"click",
"docker",
],
tests_require=["nose",
"coverage",
"mock",
],
test_suite="nose.collector",
entry_points={
"console_scripts": [
"portinus=portinus.cli:task",
"portinus-monitor=portinus.monitor.cli:task",
]
},
classifiers=[
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3",
"Environment :: Console",
"License :: OSI Approved :: MIT License",
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python",
],
)
|
#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name="portinus",
version="0.9.3",
author="Justin Dray",
author_email="justin@dray.be",
url="https://github.com/justin8/portinus",
description="This utility creates a systemd service file for a docker-compose file",
packages=find_packages(),
package_data={'portinus': ['templates/*']},
license="MIT",
install_requires=[
"click",
"docker",
],
tests_require=["nose",
"coverage",
"mock",
],
test_suite="nose.collector",
entry_points={
"console_scripts": [
"portinus=portinus.cli:task",
"portinus-monitor=portinus.monitor.cli:task",
]
},
classifiers=[
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3",
"Environment :: Console",
"License :: OSI Approved :: MIT License",
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python",
],
)
|
Fix package_data to include templates
|
Fix package_data to include templates
|
Python
|
mit
|
justin8/portinus,justin8/portinus
|
#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name="portinus",
version="0.9.2",
author="Justin Dray",
author_email="justin@dray.be",
url="https://github.com/justin8/portinus",
description="This utility creates a systemd service file for a docker-compose file",
packages=find_packages(),
license="MIT",
install_requires=[
"click",
"docker",
],
tests_require=["nose",
"coverage",
"mock",
],
test_suite="nose.collector",
entry_points={
"console_scripts": [
"portinus=portinus.cli:task",
"portinus-monitor=portinus.monitor.cli:task",
]
},
classifiers=[
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3",
"Environment :: Console",
"License :: OSI Approved :: MIT License",
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python",
],
)
Fix package_data to include templates
|
#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name="portinus",
version="0.9.3",
author="Justin Dray",
author_email="justin@dray.be",
url="https://github.com/justin8/portinus",
description="This utility creates a systemd service file for a docker-compose file",
packages=find_packages(),
package_data={'portinus': ['templates/*']},
license="MIT",
install_requires=[
"click",
"docker",
],
tests_require=["nose",
"coverage",
"mock",
],
test_suite="nose.collector",
entry_points={
"console_scripts": [
"portinus=portinus.cli:task",
"portinus-monitor=portinus.monitor.cli:task",
]
},
classifiers=[
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3",
"Environment :: Console",
"License :: OSI Approved :: MIT License",
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python",
],
)
|
<commit_before>#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name="portinus",
version="0.9.2",
author="Justin Dray",
author_email="justin@dray.be",
url="https://github.com/justin8/portinus",
description="This utility creates a systemd service file for a docker-compose file",
packages=find_packages(),
license="MIT",
install_requires=[
"click",
"docker",
],
tests_require=["nose",
"coverage",
"mock",
],
test_suite="nose.collector",
entry_points={
"console_scripts": [
"portinus=portinus.cli:task",
"portinus-monitor=portinus.monitor.cli:task",
]
},
classifiers=[
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3",
"Environment :: Console",
"License :: OSI Approved :: MIT License",
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python",
],
)
<commit_msg>Fix package_data to include templates<commit_after>
|
#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name="portinus",
version="0.9.3",
author="Justin Dray",
author_email="justin@dray.be",
url="https://github.com/justin8/portinus",
description="This utility creates a systemd service file for a docker-compose file",
packages=find_packages(),
package_data={'portinus': ['templates/*']},
license="MIT",
install_requires=[
"click",
"docker",
],
tests_require=["nose",
"coverage",
"mock",
],
test_suite="nose.collector",
entry_points={
"console_scripts": [
"portinus=portinus.cli:task",
"portinus-monitor=portinus.monitor.cli:task",
]
},
classifiers=[
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3",
"Environment :: Console",
"License :: OSI Approved :: MIT License",
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python",
],
)
|
#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name="portinus",
version="0.9.2",
author="Justin Dray",
author_email="justin@dray.be",
url="https://github.com/justin8/portinus",
description="This utility creates a systemd service file for a docker-compose file",
packages=find_packages(),
license="MIT",
install_requires=[
"click",
"docker",
],
tests_require=["nose",
"coverage",
"mock",
],
test_suite="nose.collector",
entry_points={
"console_scripts": [
"portinus=portinus.cli:task",
"portinus-monitor=portinus.monitor.cli:task",
]
},
classifiers=[
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3",
"Environment :: Console",
"License :: OSI Approved :: MIT License",
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python",
],
)
Fix package_data to include templates#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name="portinus",
version="0.9.3",
author="Justin Dray",
author_email="justin@dray.be",
url="https://github.com/justin8/portinus",
description="This utility creates a systemd service file for a docker-compose file",
packages=find_packages(),
package_data={'portinus': ['templates/*']},
license="MIT",
install_requires=[
"click",
"docker",
],
tests_require=["nose",
"coverage",
"mock",
],
test_suite="nose.collector",
entry_points={
"console_scripts": [
"portinus=portinus.cli:task",
"portinus-monitor=portinus.monitor.cli:task",
]
},
classifiers=[
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3",
"Environment :: Console",
"License :: OSI Approved :: MIT License",
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python",
],
)
|
<commit_before>#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name="portinus",
version="0.9.2",
author="Justin Dray",
author_email="justin@dray.be",
url="https://github.com/justin8/portinus",
description="This utility creates a systemd service file for a docker-compose file",
packages=find_packages(),
license="MIT",
install_requires=[
"click",
"docker",
],
tests_require=["nose",
"coverage",
"mock",
],
test_suite="nose.collector",
entry_points={
"console_scripts": [
"portinus=portinus.cli:task",
"portinus-monitor=portinus.monitor.cli:task",
]
},
classifiers=[
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3",
"Environment :: Console",
"License :: OSI Approved :: MIT License",
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python",
],
)
<commit_msg>Fix package_data to include templates<commit_after>#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name="portinus",
version="0.9.3",
author="Justin Dray",
author_email="justin@dray.be",
url="https://github.com/justin8/portinus",
description="This utility creates a systemd service file for a docker-compose file",
packages=find_packages(),
package_data={'portinus': ['templates/*']},
license="MIT",
install_requires=[
"click",
"docker",
],
tests_require=["nose",
"coverage",
"mock",
],
test_suite="nose.collector",
entry_points={
"console_scripts": [
"portinus=portinus.cli:task",
"portinus-monitor=portinus.monitor.cli:task",
]
},
classifiers=[
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3",
"Environment :: Console",
"License :: OSI Approved :: MIT License",
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python",
],
)
|
b520fd7380a7a477009c3990d42b07355d9fbe42
|
setup.py
|
setup.py
|
from setuptools import setup
setup(name = 'graphysio',
version = '0.4',
description = 'Graphical visualization of physiologic time series',
url = 'https://github.com/jaj42/graphysio',
author = 'Jona JOACHIM',
author_email = 'jona@joachim.cc',
license = 'ISC',
install_requires = ['pyqtgraph', 'pandas'],
scripts = ['scripts/graphysio.py'],
packages = ['graphysio', 'graphysio.ui'],
)
|
from setuptools import setup
setup(name = 'graphysio',
version = '0.41',
description = 'Graphical visualization of physiologic time series',
url = 'https://github.com/jaj42/graphysio',
author = 'Jona JOACHIM',
author_email = 'jona@joachim.cc',
license = 'ISC',
install_requires = ['pyqtgraph', 'pandas'],
scripts = ['scripts/graphysio.py'],
packages = ['graphysio', 'graphysio.ui'],
)
|
Bump minor version to account for changes of MANIFEST
|
Bump minor version to account for changes of MANIFEST
|
Python
|
isc
|
jaj42/dyngraph,jaj42/GraPhysio,jaj42/GraPhysio
|
from setuptools import setup
setup(name = 'graphysio',
version = '0.4',
description = 'Graphical visualization of physiologic time series',
url = 'https://github.com/jaj42/graphysio',
author = 'Jona JOACHIM',
author_email = 'jona@joachim.cc',
license = 'ISC',
install_requires = ['pyqtgraph', 'pandas'],
scripts = ['scripts/graphysio.py'],
packages = ['graphysio', 'graphysio.ui'],
)
Bump minor version to account for changes of MANIFEST
|
from setuptools import setup
setup(name = 'graphysio',
version = '0.41',
description = 'Graphical visualization of physiologic time series',
url = 'https://github.com/jaj42/graphysio',
author = 'Jona JOACHIM',
author_email = 'jona@joachim.cc',
license = 'ISC',
install_requires = ['pyqtgraph', 'pandas'],
scripts = ['scripts/graphysio.py'],
packages = ['graphysio', 'graphysio.ui'],
)
|
<commit_before>from setuptools import setup
setup(name = 'graphysio',
version = '0.4',
description = 'Graphical visualization of physiologic time series',
url = 'https://github.com/jaj42/graphysio',
author = 'Jona JOACHIM',
author_email = 'jona@joachim.cc',
license = 'ISC',
install_requires = ['pyqtgraph', 'pandas'],
scripts = ['scripts/graphysio.py'],
packages = ['graphysio', 'graphysio.ui'],
)
<commit_msg>Bump minor version to account for changes of MANIFEST<commit_after>
|
from setuptools import setup
setup(name = 'graphysio',
version = '0.41',
description = 'Graphical visualization of physiologic time series',
url = 'https://github.com/jaj42/graphysio',
author = 'Jona JOACHIM',
author_email = 'jona@joachim.cc',
license = 'ISC',
install_requires = ['pyqtgraph', 'pandas'],
scripts = ['scripts/graphysio.py'],
packages = ['graphysio', 'graphysio.ui'],
)
|
from setuptools import setup
setup(name = 'graphysio',
version = '0.4',
description = 'Graphical visualization of physiologic time series',
url = 'https://github.com/jaj42/graphysio',
author = 'Jona JOACHIM',
author_email = 'jona@joachim.cc',
license = 'ISC',
install_requires = ['pyqtgraph', 'pandas'],
scripts = ['scripts/graphysio.py'],
packages = ['graphysio', 'graphysio.ui'],
)
Bump minor version to account for changes of MANIFESTfrom setuptools import setup
setup(name = 'graphysio',
version = '0.41',
description = 'Graphical visualization of physiologic time series',
url = 'https://github.com/jaj42/graphysio',
author = 'Jona JOACHIM',
author_email = 'jona@joachim.cc',
license = 'ISC',
install_requires = ['pyqtgraph', 'pandas'],
scripts = ['scripts/graphysio.py'],
packages = ['graphysio', 'graphysio.ui'],
)
|
<commit_before>from setuptools import setup
setup(name = 'graphysio',
version = '0.4',
description = 'Graphical visualization of physiologic time series',
url = 'https://github.com/jaj42/graphysio',
author = 'Jona JOACHIM',
author_email = 'jona@joachim.cc',
license = 'ISC',
install_requires = ['pyqtgraph', 'pandas'],
scripts = ['scripts/graphysio.py'],
packages = ['graphysio', 'graphysio.ui'],
)
<commit_msg>Bump minor version to account for changes of MANIFEST<commit_after>from setuptools import setup
setup(name = 'graphysio',
version = '0.41',
description = 'Graphical visualization of physiologic time series',
url = 'https://github.com/jaj42/graphysio',
author = 'Jona JOACHIM',
author_email = 'jona@joachim.cc',
license = 'ISC',
install_requires = ['pyqtgraph', 'pandas'],
scripts = ['scripts/graphysio.py'],
packages = ['graphysio', 'graphysio.ui'],
)
|
a18dc1abe7bd792eb2f45d821748fbcc922e5291
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name="ordered-set",
version = '1.1',
maintainer='Luminoso Technologies, Inc.',
maintainer_email='rob@luminoso.com',
license = "MIT-LICENSE",
url = 'http://github.com/LuminosoInsight/orderedset',
platforms = ["any"],
description = "A MutableSet that remembers its order, so that every entry has an index.",
py_modules=['ordered_set'],
)
|
from setuptools import setup
setup(
name="ordered-set",
version = '1.2',
maintainer='Luminoso Technologies, Inc.',
maintainer_email='rob@luminoso.com',
license = "MIT-LICENSE",
url = 'http://github.com/LuminosoInsight/ordered-set',
platforms = ["any"],
description = "A MutableSet that remembers its order, so that every entry has an index.",
py_modules=['ordered_set'],
)
|
Fix PyPI URL, bump version
|
Fix PyPI URL, bump version
|
Python
|
mit
|
Toilal/ordered-set,LuminosoInsight/ordered-set,xsuchy/ordered-set,hayd/ordered-set
|
from setuptools import setup
setup(
name="ordered-set",
version = '1.1',
maintainer='Luminoso Technologies, Inc.',
maintainer_email='rob@luminoso.com',
license = "MIT-LICENSE",
url = 'http://github.com/LuminosoInsight/orderedset',
platforms = ["any"],
description = "A MutableSet that remembers its order, so that every entry has an index.",
py_modules=['ordered_set'],
)
Fix PyPI URL, bump version
|
from setuptools import setup
setup(
name="ordered-set",
version = '1.2',
maintainer='Luminoso Technologies, Inc.',
maintainer_email='rob@luminoso.com',
license = "MIT-LICENSE",
url = 'http://github.com/LuminosoInsight/ordered-set',
platforms = ["any"],
description = "A MutableSet that remembers its order, so that every entry has an index.",
py_modules=['ordered_set'],
)
|
<commit_before>from setuptools import setup
setup(
name="ordered-set",
version = '1.1',
maintainer='Luminoso Technologies, Inc.',
maintainer_email='rob@luminoso.com',
license = "MIT-LICENSE",
url = 'http://github.com/LuminosoInsight/orderedset',
platforms = ["any"],
description = "A MutableSet that remembers its order, so that every entry has an index.",
py_modules=['ordered_set'],
)
<commit_msg>Fix PyPI URL, bump version<commit_after>
|
from setuptools import setup
setup(
name="ordered-set",
version = '1.2',
maintainer='Luminoso Technologies, Inc.',
maintainer_email='rob@luminoso.com',
license = "MIT-LICENSE",
url = 'http://github.com/LuminosoInsight/ordered-set',
platforms = ["any"],
description = "A MutableSet that remembers its order, so that every entry has an index.",
py_modules=['ordered_set'],
)
|
from setuptools import setup
setup(
name="ordered-set",
version = '1.1',
maintainer='Luminoso Technologies, Inc.',
maintainer_email='rob@luminoso.com',
license = "MIT-LICENSE",
url = 'http://github.com/LuminosoInsight/orderedset',
platforms = ["any"],
description = "A MutableSet that remembers its order, so that every entry has an index.",
py_modules=['ordered_set'],
)
Fix PyPI URL, bump versionfrom setuptools import setup
setup(
name="ordered-set",
version = '1.2',
maintainer='Luminoso Technologies, Inc.',
maintainer_email='rob@luminoso.com',
license = "MIT-LICENSE",
url = 'http://github.com/LuminosoInsight/ordered-set',
platforms = ["any"],
description = "A MutableSet that remembers its order, so that every entry has an index.",
py_modules=['ordered_set'],
)
|
<commit_before>from setuptools import setup
setup(
name="ordered-set",
version = '1.1',
maintainer='Luminoso Technologies, Inc.',
maintainer_email='rob@luminoso.com',
license = "MIT-LICENSE",
url = 'http://github.com/LuminosoInsight/orderedset',
platforms = ["any"],
description = "A MutableSet that remembers its order, so that every entry has an index.",
py_modules=['ordered_set'],
)
<commit_msg>Fix PyPI URL, bump version<commit_after>from setuptools import setup
setup(
name="ordered-set",
version = '1.2',
maintainer='Luminoso Technologies, Inc.',
maintainer_email='rob@luminoso.com',
license = "MIT-LICENSE",
url = 'http://github.com/LuminosoInsight/ordered-set',
platforms = ["any"],
description = "A MutableSet that remembers its order, so that every entry has an index.",
py_modules=['ordered_set'],
)
|
f09845167ecb9791ad678001eb940616e3531c3b
|
setup.py
|
setup.py
|
from __future__ import unicode_literals
import re
from setuptools import find_packages, setup
def get_version(filename):
with open(filename) as fh:
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", fh.read()))
return metadata['version']
setup(
name='Mopidy-Tachikoma',
version=get_version('mopidy_tachikoma/__init__.py'),
url='https://github.com/palfrey/mopidy-tachikoma',
license='GNU Affero General Public License, Version 3',
author='Tom Parker',
author_email='palfrey@tevp.net',
description='Mopidy extension for talking to Slack',
long_description=open('README.rst').read(),
packages=find_packages(exclude=['tests', 'tests.*']),
zip_safe=False,
include_package_data=True,
install_requires=[
'setuptools',
'Mopidy >= 1.0',
'Pykka >= 1.1',
],
entry_points={
'mopidy.ext': [
'tachikoma = mopidy_tachikoma:Extension',
],
},
classifiers=[
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Multimedia :: Sound/Audio :: Players',
],
)
|
from __future__ import unicode_literals
import re
from setuptools import find_packages, setup
def get_version(filename):
with open(filename) as fh:
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", fh.read()))
return metadata['version']
setup(
name='Mopidy-Tachikoma',
version=get_version('mopidy_tachikoma/__init__.py'),
url='https://github.com/palfrey/mopidy-tachikoma',
license='GNU Affero General Public License, Version 3',
author='Tom Parker',
author_email='palfrey@tevp.net',
description='Mopidy extension for talking to Slack',
long_description=open('README.rst').read(),
packages=find_packages(exclude=['tests', 'tests.*']),
zip_safe=False,
include_package_data=True,
install_requires=[
'setuptools',
'slackclient',
'Mopidy >= 1.0',
'Pykka >= 1.1',
],
entry_points={
'mopidy.ext': [
'tachikoma = mopidy_tachikoma:Extension',
],
},
classifiers=[
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Multimedia :: Sound/Audio :: Players',
],
)
|
Add Slackclient to the package requirements
|
Add Slackclient to the package requirements
|
Python
|
agpl-3.0
|
palfrey/mopidy-tachikoma,palfrey/mopidy-tachikoma
|
from __future__ import unicode_literals
import re
from setuptools import find_packages, setup
def get_version(filename):
with open(filename) as fh:
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", fh.read()))
return metadata['version']
setup(
name='Mopidy-Tachikoma',
version=get_version('mopidy_tachikoma/__init__.py'),
url='https://github.com/palfrey/mopidy-tachikoma',
license='GNU Affero General Public License, Version 3',
author='Tom Parker',
author_email='palfrey@tevp.net',
description='Mopidy extension for talking to Slack',
long_description=open('README.rst').read(),
packages=find_packages(exclude=['tests', 'tests.*']),
zip_safe=False,
include_package_data=True,
install_requires=[
'setuptools',
'Mopidy >= 1.0',
'Pykka >= 1.1',
],
entry_points={
'mopidy.ext': [
'tachikoma = mopidy_tachikoma:Extension',
],
},
classifiers=[
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Multimedia :: Sound/Audio :: Players',
],
)
Add Slackclient to the package requirements
|
from __future__ import unicode_literals
import re
from setuptools import find_packages, setup
def get_version(filename):
with open(filename) as fh:
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", fh.read()))
return metadata['version']
setup(
name='Mopidy-Tachikoma',
version=get_version('mopidy_tachikoma/__init__.py'),
url='https://github.com/palfrey/mopidy-tachikoma',
license='GNU Affero General Public License, Version 3',
author='Tom Parker',
author_email='palfrey@tevp.net',
description='Mopidy extension for talking to Slack',
long_description=open('README.rst').read(),
packages=find_packages(exclude=['tests', 'tests.*']),
zip_safe=False,
include_package_data=True,
install_requires=[
'setuptools',
'slackclient',
'Mopidy >= 1.0',
'Pykka >= 1.1',
],
entry_points={
'mopidy.ext': [
'tachikoma = mopidy_tachikoma:Extension',
],
},
classifiers=[
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Multimedia :: Sound/Audio :: Players',
],
)
|
<commit_before>from __future__ import unicode_literals
import re
from setuptools import find_packages, setup
def get_version(filename):
with open(filename) as fh:
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", fh.read()))
return metadata['version']
setup(
name='Mopidy-Tachikoma',
version=get_version('mopidy_tachikoma/__init__.py'),
url='https://github.com/palfrey/mopidy-tachikoma',
license='GNU Affero General Public License, Version 3',
author='Tom Parker',
author_email='palfrey@tevp.net',
description='Mopidy extension for talking to Slack',
long_description=open('README.rst').read(),
packages=find_packages(exclude=['tests', 'tests.*']),
zip_safe=False,
include_package_data=True,
install_requires=[
'setuptools',
'Mopidy >= 1.0',
'Pykka >= 1.1',
],
entry_points={
'mopidy.ext': [
'tachikoma = mopidy_tachikoma:Extension',
],
},
classifiers=[
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Multimedia :: Sound/Audio :: Players',
],
)
<commit_msg>Add Slackclient to the package requirements<commit_after>
|
from __future__ import unicode_literals
import re
from setuptools import find_packages, setup
def get_version(filename):
with open(filename) as fh:
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", fh.read()))
return metadata['version']
setup(
name='Mopidy-Tachikoma',
version=get_version('mopidy_tachikoma/__init__.py'),
url='https://github.com/palfrey/mopidy-tachikoma',
license='GNU Affero General Public License, Version 3',
author='Tom Parker',
author_email='palfrey@tevp.net',
description='Mopidy extension for talking to Slack',
long_description=open('README.rst').read(),
packages=find_packages(exclude=['tests', 'tests.*']),
zip_safe=False,
include_package_data=True,
install_requires=[
'setuptools',
'slackclient',
'Mopidy >= 1.0',
'Pykka >= 1.1',
],
entry_points={
'mopidy.ext': [
'tachikoma = mopidy_tachikoma:Extension',
],
},
classifiers=[
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Multimedia :: Sound/Audio :: Players',
],
)
|
from __future__ import unicode_literals
import re
from setuptools import find_packages, setup
def get_version(filename):
with open(filename) as fh:
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", fh.read()))
return metadata['version']
setup(
name='Mopidy-Tachikoma',
version=get_version('mopidy_tachikoma/__init__.py'),
url='https://github.com/palfrey/mopidy-tachikoma',
license='GNU Affero General Public License, Version 3',
author='Tom Parker',
author_email='palfrey@tevp.net',
description='Mopidy extension for talking to Slack',
long_description=open('README.rst').read(),
packages=find_packages(exclude=['tests', 'tests.*']),
zip_safe=False,
include_package_data=True,
install_requires=[
'setuptools',
'Mopidy >= 1.0',
'Pykka >= 1.1',
],
entry_points={
'mopidy.ext': [
'tachikoma = mopidy_tachikoma:Extension',
],
},
classifiers=[
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Multimedia :: Sound/Audio :: Players',
],
)
Add Slackclient to the package requirementsfrom __future__ import unicode_literals
import re
from setuptools import find_packages, setup
def get_version(filename):
with open(filename) as fh:
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", fh.read()))
return metadata['version']
setup(
name='Mopidy-Tachikoma',
version=get_version('mopidy_tachikoma/__init__.py'),
url='https://github.com/palfrey/mopidy-tachikoma',
license='GNU Affero General Public License, Version 3',
author='Tom Parker',
author_email='palfrey@tevp.net',
description='Mopidy extension for talking to Slack',
long_description=open('README.rst').read(),
packages=find_packages(exclude=['tests', 'tests.*']),
zip_safe=False,
include_package_data=True,
install_requires=[
'setuptools',
'slackclient',
'Mopidy >= 1.0',
'Pykka >= 1.1',
],
entry_points={
'mopidy.ext': [
'tachikoma = mopidy_tachikoma:Extension',
],
},
classifiers=[
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Multimedia :: Sound/Audio :: Players',
],
)
|
<commit_before>from __future__ import unicode_literals
import re
from setuptools import find_packages, setup
def get_version(filename):
with open(filename) as fh:
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", fh.read()))
return metadata['version']
setup(
name='Mopidy-Tachikoma',
version=get_version('mopidy_tachikoma/__init__.py'),
url='https://github.com/palfrey/mopidy-tachikoma',
license='GNU Affero General Public License, Version 3',
author='Tom Parker',
author_email='palfrey@tevp.net',
description='Mopidy extension for talking to Slack',
long_description=open('README.rst').read(),
packages=find_packages(exclude=['tests', 'tests.*']),
zip_safe=False,
include_package_data=True,
install_requires=[
'setuptools',
'Mopidy >= 1.0',
'Pykka >= 1.1',
],
entry_points={
'mopidy.ext': [
'tachikoma = mopidy_tachikoma:Extension',
],
},
classifiers=[
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Multimedia :: Sound/Audio :: Players',
],
)
<commit_msg>Add Slackclient to the package requirements<commit_after>from __future__ import unicode_literals
import re
from setuptools import find_packages, setup
def get_version(filename):
with open(filename) as fh:
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", fh.read()))
return metadata['version']
setup(
name='Mopidy-Tachikoma',
version=get_version('mopidy_tachikoma/__init__.py'),
url='https://github.com/palfrey/mopidy-tachikoma',
license='GNU Affero General Public License, Version 3',
author='Tom Parker',
author_email='palfrey@tevp.net',
description='Mopidy extension for talking to Slack',
long_description=open('README.rst').read(),
packages=find_packages(exclude=['tests', 'tests.*']),
zip_safe=False,
include_package_data=True,
install_requires=[
'setuptools',
'slackclient',
'Mopidy >= 1.0',
'Pykka >= 1.1',
],
entry_points={
'mopidy.ext': [
'tachikoma = mopidy_tachikoma:Extension',
],
},
classifiers=[
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Multimedia :: Sound/Audio :: Players',
],
)
|
03cedcf794537df02dd9494be22be6c36975db25
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='8.0.0',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='8.0.1',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
Update the PyPI version to 8.0.1.
|
Update the PyPI version to 8.0.1.
|
Python
|
mit
|
Doist/todoist-python
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='8.0.0',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
Update the PyPI version to 8.0.1.
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='8.0.1',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
<commit_before># -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='8.0.0',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
<commit_msg>Update the PyPI version to 8.0.1.<commit_after>
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='8.0.1',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='8.0.0',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
Update the PyPI version to 8.0.1.# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='8.0.1',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
<commit_before># -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='8.0.0',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
<commit_msg>Update the PyPI version to 8.0.1.<commit_after># -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='8.0.1',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
f8167ecb091908445dac9ec01d56757fba735d36
|
setup.py
|
setup.py
|
from os import path
from pip.download import PipSession
from pip.req import parse_requirements
from graphene_permissions import __version__
from setuptools import setup
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
install_requirements = parse_requirements('requirements/requirements.txt', session=PipSession())
setup(
name='graphene-permissions',
packages=('graphene_permissions',),
license='MIT',
version=__version__,
author='redzej',
description='Simple graphene-django permission system',
long_description=long_description,
url='https://github.com/redzej/graphene-permissions',
download_url='https://github.com/redzej/graphene-permissions',
install_requires=[str(ir.req) for ir in install_requirements],
keywords='graphene django permissions permission system',
python_requires='>=3.4',
classifiers=(
'Development Status :: 5 - Production',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 2.0',
'Topic :: Internet :: WWW/HTTP',
'Intended Audience :: Developers',
),
)
|
from os import path
from pip.download import PipSession
from pip.req import parse_requirements
from graphene_permissions import __version__
from setuptools import setup
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
install_requirements = parse_requirements('requirements/requirements.txt', session=PipSession())
setup(
name='graphene-permissions',
packages=('graphene_permissions',),
license='MIT',
version=__version__,
author='redzej',
description='Simple graphene-django permission system',
long_description=long_description,
url='https://github.com/redzej/graphene-permissions',
download_url='https://github.com/redzej/graphene-permissions/archive/1.0.0.tar.gz',
install_requires=[str(ir.req) for ir in install_requirements],
keywords='graphene django permissions permission system',
python_requires='>=3.5',
classifiers=(
'Development Status :: 5 - Production',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 2.0',
'Topic :: Internet :: WWW/HTTP',
'Intended Audience :: Developers',
),
)
|
Set required python ver to 3.5
|
Set required python ver to 3.5
|
Python
|
mit
|
redzej/graphene-permissions
|
from os import path
from pip.download import PipSession
from pip.req import parse_requirements
from graphene_permissions import __version__
from setuptools import setup
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
install_requirements = parse_requirements('requirements/requirements.txt', session=PipSession())
setup(
name='graphene-permissions',
packages=('graphene_permissions',),
license='MIT',
version=__version__,
author='redzej',
description='Simple graphene-django permission system',
long_description=long_description,
url='https://github.com/redzej/graphene-permissions',
download_url='https://github.com/redzej/graphene-permissions',
install_requires=[str(ir.req) for ir in install_requirements],
keywords='graphene django permissions permission system',
python_requires='>=3.4',
classifiers=(
'Development Status :: 5 - Production',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 2.0',
'Topic :: Internet :: WWW/HTTP',
'Intended Audience :: Developers',
),
)
Set required python ver to 3.5
|
from os import path
from pip.download import PipSession
from pip.req import parse_requirements
from graphene_permissions import __version__
from setuptools import setup
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
install_requirements = parse_requirements('requirements/requirements.txt', session=PipSession())
setup(
name='graphene-permissions',
packages=('graphene_permissions',),
license='MIT',
version=__version__,
author='redzej',
description='Simple graphene-django permission system',
long_description=long_description,
url='https://github.com/redzej/graphene-permissions',
download_url='https://github.com/redzej/graphene-permissions/archive/1.0.0.tar.gz',
install_requires=[str(ir.req) for ir in install_requirements],
keywords='graphene django permissions permission system',
python_requires='>=3.5',
classifiers=(
'Development Status :: 5 - Production',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 2.0',
'Topic :: Internet :: WWW/HTTP',
'Intended Audience :: Developers',
),
)
|
<commit_before>from os import path
from pip.download import PipSession
from pip.req import parse_requirements
from graphene_permissions import __version__
from setuptools import setup
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
install_requirements = parse_requirements('requirements/requirements.txt', session=PipSession())
setup(
name='graphene-permissions',
packages=('graphene_permissions',),
license='MIT',
version=__version__,
author='redzej',
description='Simple graphene-django permission system',
long_description=long_description,
url='https://github.com/redzej/graphene-permissions',
download_url='https://github.com/redzej/graphene-permissions',
install_requires=[str(ir.req) for ir in install_requirements],
keywords='graphene django permissions permission system',
python_requires='>=3.4',
classifiers=(
'Development Status :: 5 - Production',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 2.0',
'Topic :: Internet :: WWW/HTTP',
'Intended Audience :: Developers',
),
)
<commit_msg>Set required python ver to 3.5<commit_after>
|
from os import path
from pip.download import PipSession
from pip.req import parse_requirements
from graphene_permissions import __version__
from setuptools import setup
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
install_requirements = parse_requirements('requirements/requirements.txt', session=PipSession())
setup(
name='graphene-permissions',
packages=('graphene_permissions',),
license='MIT',
version=__version__,
author='redzej',
description='Simple graphene-django permission system',
long_description=long_description,
url='https://github.com/redzej/graphene-permissions',
download_url='https://github.com/redzej/graphene-permissions/archive/1.0.0.tar.gz',
install_requires=[str(ir.req) for ir in install_requirements],
keywords='graphene django permissions permission system',
python_requires='>=3.5',
classifiers=(
'Development Status :: 5 - Production',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 2.0',
'Topic :: Internet :: WWW/HTTP',
'Intended Audience :: Developers',
),
)
|
from os import path
from pip.download import PipSession
from pip.req import parse_requirements
from graphene_permissions import __version__
from setuptools import setup
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
install_requirements = parse_requirements('requirements/requirements.txt', session=PipSession())
setup(
name='graphene-permissions',
packages=('graphene_permissions',),
license='MIT',
version=__version__,
author='redzej',
description='Simple graphene-django permission system',
long_description=long_description,
url='https://github.com/redzej/graphene-permissions',
download_url='https://github.com/redzej/graphene-permissions',
install_requires=[str(ir.req) for ir in install_requirements],
keywords='graphene django permissions permission system',
python_requires='>=3.4',
classifiers=(
'Development Status :: 5 - Production',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 2.0',
'Topic :: Internet :: WWW/HTTP',
'Intended Audience :: Developers',
),
)
Set required python ver to 3.5from os import path
from pip.download import PipSession
from pip.req import parse_requirements
from graphene_permissions import __version__
from setuptools import setup
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
install_requirements = parse_requirements('requirements/requirements.txt', session=PipSession())
setup(
name='graphene-permissions',
packages=('graphene_permissions',),
license='MIT',
version=__version__,
author='redzej',
description='Simple graphene-django permission system',
long_description=long_description,
url='https://github.com/redzej/graphene-permissions',
download_url='https://github.com/redzej/graphene-permissions/archive/1.0.0.tar.gz',
install_requires=[str(ir.req) for ir in install_requirements],
keywords='graphene django permissions permission system',
python_requires='>=3.5',
classifiers=(
'Development Status :: 5 - Production',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 2.0',
'Topic :: Internet :: WWW/HTTP',
'Intended Audience :: Developers',
),
)
|
<commit_before>from os import path
from pip.download import PipSession
from pip.req import parse_requirements
from graphene_permissions import __version__
from setuptools import setup
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
install_requirements = parse_requirements('requirements/requirements.txt', session=PipSession())
setup(
name='graphene-permissions',
packages=('graphene_permissions',),
license='MIT',
version=__version__,
author='redzej',
description='Simple graphene-django permission system',
long_description=long_description,
url='https://github.com/redzej/graphene-permissions',
download_url='https://github.com/redzej/graphene-permissions',
install_requires=[str(ir.req) for ir in install_requirements],
keywords='graphene django permissions permission system',
python_requires='>=3.4',
classifiers=(
'Development Status :: 5 - Production',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 2.0',
'Topic :: Internet :: WWW/HTTP',
'Intended Audience :: Developers',
),
)
<commit_msg>Set required python ver to 3.5<commit_after>from os import path
from pip.download import PipSession
from pip.req import parse_requirements
from graphene_permissions import __version__
from setuptools import setup
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
install_requirements = parse_requirements('requirements/requirements.txt', session=PipSession())
setup(
name='graphene-permissions',
packages=('graphene_permissions',),
license='MIT',
version=__version__,
author='redzej',
description='Simple graphene-django permission system',
long_description=long_description,
url='https://github.com/redzej/graphene-permissions',
download_url='https://github.com/redzej/graphene-permissions/archive/1.0.0.tar.gz',
install_requires=[str(ir.req) for ir in install_requirements],
keywords='graphene django permissions permission system',
python_requires='>=3.5',
classifiers=(
'Development Status :: 5 - Production',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 2.0',
'Topic :: Internet :: WWW/HTTP',
'Intended Audience :: Developers',
),
)
|
2cccffe7cf739b7d34300eed85d48b2d2a92a619
|
setup.py
|
setup.py
|
import sys
from setuptools import setup
if sys.version_info < (2, 6):
sys.stdout.write("At least Python 2.6 is required.\n")
sys.exit(1)
setup(
name = 'xopen',
version = '0.1.0',
author = 'Marcel Martin',
author_email = 'mail@marcelm.net',
url = 'https://github.com/marcelm/xopen/',
description = 'Open compressed files transparently',
license = 'MIT',
py_modules = ['xopen'],
classifiers = [
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Programming Language :: Cython",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
]
)
|
import sys
from setuptools import setup
if sys.version_info < (2, 6):
sys.stdout.write("At least Python 2.6 is required.\n")
sys.exit(1)
setup(
name = 'xopen',
version = '0.1.0',
author = 'Marcel Martin',
author_email = 'mail@marcelm.net',
url = 'https://github.com/marcelm/xopen/',
description = 'Open compressed files transparently',
license = 'MIT',
py_modules = ['xopen'],
classifiers = [
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
]
)
|
Fix trove classifier: Cython is not used
|
Fix trove classifier: Cython is not used
|
Python
|
mit
|
marcelm/xopen
|
import sys
from setuptools import setup
if sys.version_info < (2, 6):
sys.stdout.write("At least Python 2.6 is required.\n")
sys.exit(1)
setup(
name = 'xopen',
version = '0.1.0',
author = 'Marcel Martin',
author_email = 'mail@marcelm.net',
url = 'https://github.com/marcelm/xopen/',
description = 'Open compressed files transparently',
license = 'MIT',
py_modules = ['xopen'],
classifiers = [
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Programming Language :: Cython",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
]
)
Fix trove classifier: Cython is not used
|
import sys
from setuptools import setup
if sys.version_info < (2, 6):
sys.stdout.write("At least Python 2.6 is required.\n")
sys.exit(1)
setup(
name = 'xopen',
version = '0.1.0',
author = 'Marcel Martin',
author_email = 'mail@marcelm.net',
url = 'https://github.com/marcelm/xopen/',
description = 'Open compressed files transparently',
license = 'MIT',
py_modules = ['xopen'],
classifiers = [
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
]
)
|
<commit_before>import sys
from setuptools import setup
if sys.version_info < (2, 6):
sys.stdout.write("At least Python 2.6 is required.\n")
sys.exit(1)
setup(
name = 'xopen',
version = '0.1.0',
author = 'Marcel Martin',
author_email = 'mail@marcelm.net',
url = 'https://github.com/marcelm/xopen/',
description = 'Open compressed files transparently',
license = 'MIT',
py_modules = ['xopen'],
classifiers = [
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Programming Language :: Cython",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
]
)
<commit_msg>Fix trove classifier: Cython is not used<commit_after>
|
import sys
from setuptools import setup
if sys.version_info < (2, 6):
sys.stdout.write("At least Python 2.6 is required.\n")
sys.exit(1)
setup(
name = 'xopen',
version = '0.1.0',
author = 'Marcel Martin',
author_email = 'mail@marcelm.net',
url = 'https://github.com/marcelm/xopen/',
description = 'Open compressed files transparently',
license = 'MIT',
py_modules = ['xopen'],
classifiers = [
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
]
)
|
import sys
from setuptools import setup
if sys.version_info < (2, 6):
sys.stdout.write("At least Python 2.6 is required.\n")
sys.exit(1)
setup(
name = 'xopen',
version = '0.1.0',
author = 'Marcel Martin',
author_email = 'mail@marcelm.net',
url = 'https://github.com/marcelm/xopen/',
description = 'Open compressed files transparently',
license = 'MIT',
py_modules = ['xopen'],
classifiers = [
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Programming Language :: Cython",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
]
)
Fix trove classifier: Cython is not usedimport sys
from setuptools import setup
if sys.version_info < (2, 6):
sys.stdout.write("At least Python 2.6 is required.\n")
sys.exit(1)
setup(
name = 'xopen',
version = '0.1.0',
author = 'Marcel Martin',
author_email = 'mail@marcelm.net',
url = 'https://github.com/marcelm/xopen/',
description = 'Open compressed files transparently',
license = 'MIT',
py_modules = ['xopen'],
classifiers = [
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
]
)
|
<commit_before>import sys
from setuptools import setup
if sys.version_info < (2, 6):
sys.stdout.write("At least Python 2.6 is required.\n")
sys.exit(1)
setup(
name = 'xopen',
version = '0.1.0',
author = 'Marcel Martin',
author_email = 'mail@marcelm.net',
url = 'https://github.com/marcelm/xopen/',
description = 'Open compressed files transparently',
license = 'MIT',
py_modules = ['xopen'],
classifiers = [
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Programming Language :: Cython",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
]
)
<commit_msg>Fix trove classifier: Cython is not used<commit_after>import sys
from setuptools import setup
if sys.version_info < (2, 6):
sys.stdout.write("At least Python 2.6 is required.\n")
sys.exit(1)
setup(
name = 'xopen',
version = '0.1.0',
author = 'Marcel Martin',
author_email = 'mail@marcelm.net',
url = 'https://github.com/marcelm/xopen/',
description = 'Open compressed files transparently',
license = 'MIT',
py_modules = ['xopen'],
classifiers = [
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
]
)
|
bd7035cbb762d93494e55db56e06d6dbccf3c7e1
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(name="stellar-magnate",
version="0.1",
description="A space-themed commodity trading game",
long_description="""
Stellar Magnate is a space-themed trading game in the spirit of Planetary
Travel, a trading game for the Apple IIe by Brian Winn.
""",
author="Toshio Kuratomi",
author_email="toshio@fedoraproject.org",
maintainer="Toshio Kuratomi",
maintainer_email="toshio@fedoraproject.org",
url="https://github.com/abadger/pubmarine",
license="GNU Affero General Public License v3+",
keywords='game trading',
classifiers=[
'Development Status :: 3 - Alpha',
#'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
#'Topic :: Software Development :: Libraries :: Python Modules',
],
packages=['magnate', 'magnate.ui'],
scripts=['bin/magnate'],
install_requires=['pubmarine >= 0.3', 'urwid', 'straight.plugin'],
)
|
from distutils.core import setup
setup(name="stellar-magnate",
version="0.1",
description="A space-themed commodity trading game",
long_description="""
Stellar Magnate is a space-themed trading game in the spirit of Planetary
Travel by Brian Winn.
""",
author="Toshio Kuratomi",
author_email="toshio@fedoraproject.org",
maintainer="Toshio Kuratomi",
maintainer_email="toshio@fedoraproject.org",
url="https://github.com/abadger/pubmarine",
license="GNU Affero General Public License v3 or later (AGPLv3+)",
keywords='game trading',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console :: Curses',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Games/Entertainment',
'Topic :: Games/Entertainment :: Simulation',
],
packages=['magnate', 'magnate.ui'],
scripts=['bin/magnate'],
install_requires=['pubmarine >= 0.3', 'urwid', 'straight.plugin'],
)
|
Correct classifiers from the pypi trove entries
|
Correct classifiers from the pypi trove entries
|
Python
|
agpl-3.0
|
abadger/stellarmagnate
|
from distutils.core import setup
setup(name="stellar-magnate",
version="0.1",
description="A space-themed commodity trading game",
long_description="""
Stellar Magnate is a space-themed trading game in the spirit of Planetary
Travel, a trading game for the Apple IIe by Brian Winn.
""",
author="Toshio Kuratomi",
author_email="toshio@fedoraproject.org",
maintainer="Toshio Kuratomi",
maintainer_email="toshio@fedoraproject.org",
url="https://github.com/abadger/pubmarine",
license="GNU Affero General Public License v3+",
keywords='game trading',
classifiers=[
'Development Status :: 3 - Alpha',
#'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
#'Topic :: Software Development :: Libraries :: Python Modules',
],
packages=['magnate', 'magnate.ui'],
scripts=['bin/magnate'],
install_requires=['pubmarine >= 0.3', 'urwid', 'straight.plugin'],
)
Correct classifiers from the pypi trove entries
|
from distutils.core import setup
setup(name="stellar-magnate",
version="0.1",
description="A space-themed commodity trading game",
long_description="""
Stellar Magnate is a space-themed trading game in the spirit of Planetary
Travel by Brian Winn.
""",
author="Toshio Kuratomi",
author_email="toshio@fedoraproject.org",
maintainer="Toshio Kuratomi",
maintainer_email="toshio@fedoraproject.org",
url="https://github.com/abadger/pubmarine",
license="GNU Affero General Public License v3 or later (AGPLv3+)",
keywords='game trading',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console :: Curses',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Games/Entertainment',
'Topic :: Games/Entertainment :: Simulation',
],
packages=['magnate', 'magnate.ui'],
scripts=['bin/magnate'],
install_requires=['pubmarine >= 0.3', 'urwid', 'straight.plugin'],
)
|
<commit_before>from distutils.core import setup
setup(name="stellar-magnate",
version="0.1",
description="A space-themed commodity trading game",
long_description="""
Stellar Magnate is a space-themed trading game in the spirit of Planetary
Travel, a trading game for the Apple IIe by Brian Winn.
""",
author="Toshio Kuratomi",
author_email="toshio@fedoraproject.org",
maintainer="Toshio Kuratomi",
maintainer_email="toshio@fedoraproject.org",
url="https://github.com/abadger/pubmarine",
license="GNU Affero General Public License v3+",
keywords='game trading',
classifiers=[
'Development Status :: 3 - Alpha',
#'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
#'Topic :: Software Development :: Libraries :: Python Modules',
],
packages=['magnate', 'magnate.ui'],
scripts=['bin/magnate'],
install_requires=['pubmarine >= 0.3', 'urwid', 'straight.plugin'],
)
<commit_msg>Correct classifiers from the pypi trove entries<commit_after>
|
from distutils.core import setup
setup(name="stellar-magnate",
version="0.1",
description="A space-themed commodity trading game",
long_description="""
Stellar Magnate is a space-themed trading game in the spirit of Planetary
Travel by Brian Winn.
""",
author="Toshio Kuratomi",
author_email="toshio@fedoraproject.org",
maintainer="Toshio Kuratomi",
maintainer_email="toshio@fedoraproject.org",
url="https://github.com/abadger/pubmarine",
license="GNU Affero General Public License v3 or later (AGPLv3+)",
keywords='game trading',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console :: Curses',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Games/Entertainment',
'Topic :: Games/Entertainment :: Simulation',
],
packages=['magnate', 'magnate.ui'],
scripts=['bin/magnate'],
install_requires=['pubmarine >= 0.3', 'urwid', 'straight.plugin'],
)
|
from distutils.core import setup
setup(name="stellar-magnate",
version="0.1",
description="A space-themed commodity trading game",
long_description="""
Stellar Magnate is a space-themed trading game in the spirit of Planetary
Travel, a trading game for the Apple IIe by Brian Winn.
""",
author="Toshio Kuratomi",
author_email="toshio@fedoraproject.org",
maintainer="Toshio Kuratomi",
maintainer_email="toshio@fedoraproject.org",
url="https://github.com/abadger/pubmarine",
license="GNU Affero General Public License v3+",
keywords='game trading',
classifiers=[
'Development Status :: 3 - Alpha',
#'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
#'Topic :: Software Development :: Libraries :: Python Modules',
],
packages=['magnate', 'magnate.ui'],
scripts=['bin/magnate'],
install_requires=['pubmarine >= 0.3', 'urwid', 'straight.plugin'],
)
Correct classifiers from the pypi trove entriesfrom distutils.core import setup
setup(name="stellar-magnate",
version="0.1",
description="A space-themed commodity trading game",
long_description="""
Stellar Magnate is a space-themed trading game in the spirit of Planetary
Travel by Brian Winn.
""",
author="Toshio Kuratomi",
author_email="toshio@fedoraproject.org",
maintainer="Toshio Kuratomi",
maintainer_email="toshio@fedoraproject.org",
url="https://github.com/abadger/pubmarine",
license="GNU Affero General Public License v3 or later (AGPLv3+)",
keywords='game trading',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console :: Curses',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Games/Entertainment',
'Topic :: Games/Entertainment :: Simulation',
],
packages=['magnate', 'magnate.ui'],
scripts=['bin/magnate'],
install_requires=['pubmarine >= 0.3', 'urwid', 'straight.plugin'],
)
|
<commit_before>from distutils.core import setup
setup(name="stellar-magnate",
version="0.1",
description="A space-themed commodity trading game",
long_description="""
Stellar Magnate is a space-themed trading game in the spirit of Planetary
Travel, a trading game for the Apple IIe by Brian Winn.
""",
author="Toshio Kuratomi",
author_email="toshio@fedoraproject.org",
maintainer="Toshio Kuratomi",
maintainer_email="toshio@fedoraproject.org",
url="https://github.com/abadger/pubmarine",
license="GNU Affero General Public License v3+",
keywords='game trading',
classifiers=[
'Development Status :: 3 - Alpha',
#'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
#'Topic :: Software Development :: Libraries :: Python Modules',
],
packages=['magnate', 'magnate.ui'],
scripts=['bin/magnate'],
install_requires=['pubmarine >= 0.3', 'urwid', 'straight.plugin'],
)
<commit_msg>Correct classifiers from the pypi trove entries<commit_after>from distutils.core import setup
setup(name="stellar-magnate",
version="0.1",
description="A space-themed commodity trading game",
long_description="""
Stellar Magnate is a space-themed trading game in the spirit of Planetary
Travel by Brian Winn.
""",
author="Toshio Kuratomi",
author_email="toshio@fedoraproject.org",
maintainer="Toshio Kuratomi",
maintainer_email="toshio@fedoraproject.org",
url="https://github.com/abadger/pubmarine",
license="GNU Affero General Public License v3 or later (AGPLv3+)",
keywords='game trading',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console :: Curses',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Games/Entertainment',
'Topic :: Games/Entertainment :: Simulation',
],
packages=['magnate', 'magnate.ui'],
scripts=['bin/magnate'],
install_requires=['pubmarine >= 0.3', 'urwid', 'straight.plugin'],
)
|
a0f97f420e091c61a61e931cf777721dc06f2534
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
DESCRIPTION = "A python module which returns details about a Danish car from its license plate number"
LONG_DESCRIPTION = open('README.rst').read()
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
]
INSTALL_REQUIRES = ['requests', 'beautifulsoup4']
try:
import importlib
except ImportError:
INSTALL_REQUIRES.append('importlib')
tests_require = [
'requests>=1.2',
'beautifulsoup4'
]
setup(
name='dk-car-scraper',
version='1.0.7',
packages=find_packages(exclude=[]),
author='Joshua Karjala-Svenden',
author_email='joshua@fluxuries.com',
url='https://github.com/joshuakarjala/dk-car-scraper/',
license='MIT',
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
platforms=['any'],
classifiers=CLASSIFIERS,
install_requires=INSTALL_REQUIRES,
# tests_require=tests_require,
# extras_require={'test': tests_require},
# test_suite='runtests.runtests',
#include_package_data=True,
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
DESCRIPTION = "A python module which returns details about a Danish car from its license plate number"
LONG_DESCRIPTION = open('README.rst').read()
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
]
INSTALL_REQUIRES = ['requests', 'beautifulsoup4']
try:
import importlib
except ImportError:
INSTALL_REQUIRES.append('importlib')
tests_require = [
'requests>=1.2',
'beautifulsoup4'
]
setup(
name='dk-car-scraper',
version='1.0.8',
packages=find_packages(exclude=[]),
author='Joshua Karjala-Svenden',
author_email='joshua@fluxuries.com',
url='https://github.com/joshuakarjala/dk-car-scraper/',
license='MIT',
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
platforms=['any'],
classifiers=CLASSIFIERS,
install_requires=INSTALL_REQUIRES,
# tests_require=tests_require,
# extras_require={'test': tests_require},
# test_suite='runtests.runtests',
#include_package_data=True,
)
|
Bump version 1.0.8 -- more car details
|
Bump version 1.0.8 -- more car details
|
Python
|
mit
|
joshuakarjala/dk-car-scraper
|
#!/usr/bin/env python
from setuptools import setup, find_packages
DESCRIPTION = "A python module which returns details about a Danish car from its license plate number"
LONG_DESCRIPTION = open('README.rst').read()
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
]
INSTALL_REQUIRES = ['requests', 'beautifulsoup4']
try:
import importlib
except ImportError:
INSTALL_REQUIRES.append('importlib')
tests_require = [
'requests>=1.2',
'beautifulsoup4'
]
setup(
name='dk-car-scraper',
version='1.0.7',
packages=find_packages(exclude=[]),
author='Joshua Karjala-Svenden',
author_email='joshua@fluxuries.com',
url='https://github.com/joshuakarjala/dk-car-scraper/',
license='MIT',
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
platforms=['any'],
classifiers=CLASSIFIERS,
install_requires=INSTALL_REQUIRES,
# tests_require=tests_require,
# extras_require={'test': tests_require},
# test_suite='runtests.runtests',
#include_package_data=True,
)
Bump version 1.0.8 -- more car details
|
#!/usr/bin/env python
from setuptools import setup, find_packages
DESCRIPTION = "A python module which returns details about a Danish car from its license plate number"
LONG_DESCRIPTION = open('README.rst').read()
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
]
INSTALL_REQUIRES = ['requests', 'beautifulsoup4']
try:
import importlib
except ImportError:
INSTALL_REQUIRES.append('importlib')
tests_require = [
'requests>=1.2',
'beautifulsoup4'
]
setup(
name='dk-car-scraper',
version='1.0.8',
packages=find_packages(exclude=[]),
author='Joshua Karjala-Svenden',
author_email='joshua@fluxuries.com',
url='https://github.com/joshuakarjala/dk-car-scraper/',
license='MIT',
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
platforms=['any'],
classifiers=CLASSIFIERS,
install_requires=INSTALL_REQUIRES,
# tests_require=tests_require,
# extras_require={'test': tests_require},
# test_suite='runtests.runtests',
#include_package_data=True,
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
DESCRIPTION = "A python module which returns details about a Danish car from its license plate number"
LONG_DESCRIPTION = open('README.rst').read()
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
]
INSTALL_REQUIRES = ['requests', 'beautifulsoup4']
try:
import importlib
except ImportError:
INSTALL_REQUIRES.append('importlib')
tests_require = [
'requests>=1.2',
'beautifulsoup4'
]
setup(
name='dk-car-scraper',
version='1.0.7',
packages=find_packages(exclude=[]),
author='Joshua Karjala-Svenden',
author_email='joshua@fluxuries.com',
url='https://github.com/joshuakarjala/dk-car-scraper/',
license='MIT',
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
platforms=['any'],
classifiers=CLASSIFIERS,
install_requires=INSTALL_REQUIRES,
# tests_require=tests_require,
# extras_require={'test': tests_require},
# test_suite='runtests.runtests',
#include_package_data=True,
)
<commit_msg>Bump version 1.0.8 -- more car details<commit_after>
|
#!/usr/bin/env python
from setuptools import setup, find_packages
DESCRIPTION = "A python module which returns details about a Danish car from its license plate number"
LONG_DESCRIPTION = open('README.rst').read()
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
]
INSTALL_REQUIRES = ['requests', 'beautifulsoup4']
try:
import importlib
except ImportError:
INSTALL_REQUIRES.append('importlib')
tests_require = [
'requests>=1.2',
'beautifulsoup4'
]
setup(
name='dk-car-scraper',
version='1.0.8',
packages=find_packages(exclude=[]),
author='Joshua Karjala-Svenden',
author_email='joshua@fluxuries.com',
url='https://github.com/joshuakarjala/dk-car-scraper/',
license='MIT',
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
platforms=['any'],
classifiers=CLASSIFIERS,
install_requires=INSTALL_REQUIRES,
# tests_require=tests_require,
# extras_require={'test': tests_require},
# test_suite='runtests.runtests',
#include_package_data=True,
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
DESCRIPTION = "A python module which returns details about a Danish car from its license plate number"
LONG_DESCRIPTION = open('README.rst').read()
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
]
INSTALL_REQUIRES = ['requests', 'beautifulsoup4']
try:
import importlib
except ImportError:
INSTALL_REQUIRES.append('importlib')
tests_require = [
'requests>=1.2',
'beautifulsoup4'
]
setup(
name='dk-car-scraper',
version='1.0.7',
packages=find_packages(exclude=[]),
author='Joshua Karjala-Svenden',
author_email='joshua@fluxuries.com',
url='https://github.com/joshuakarjala/dk-car-scraper/',
license='MIT',
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
platforms=['any'],
classifiers=CLASSIFIERS,
install_requires=INSTALL_REQUIRES,
# tests_require=tests_require,
# extras_require={'test': tests_require},
# test_suite='runtests.runtests',
#include_package_data=True,
)
Bump version 1.0.8 -- more car details#!/usr/bin/env python
from setuptools import setup, find_packages
DESCRIPTION = "A python module which returns details about a Danish car from its license plate number"
LONG_DESCRIPTION = open('README.rst').read()
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
]
INSTALL_REQUIRES = ['requests', 'beautifulsoup4']
try:
import importlib
except ImportError:
INSTALL_REQUIRES.append('importlib')
tests_require = [
'requests>=1.2',
'beautifulsoup4'
]
setup(
name='dk-car-scraper',
version='1.0.8',
packages=find_packages(exclude=[]),
author='Joshua Karjala-Svenden',
author_email='joshua@fluxuries.com',
url='https://github.com/joshuakarjala/dk-car-scraper/',
license='MIT',
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
platforms=['any'],
classifiers=CLASSIFIERS,
install_requires=INSTALL_REQUIRES,
# tests_require=tests_require,
# extras_require={'test': tests_require},
# test_suite='runtests.runtests',
#include_package_data=True,
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
DESCRIPTION = "A python module which returns details about a Danish car from its license plate number"
LONG_DESCRIPTION = open('README.rst').read()
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
]
INSTALL_REQUIRES = ['requests', 'beautifulsoup4']
try:
import importlib
except ImportError:
INSTALL_REQUIRES.append('importlib')
tests_require = [
'requests>=1.2',
'beautifulsoup4'
]
setup(
name='dk-car-scraper',
version='1.0.7',
packages=find_packages(exclude=[]),
author='Joshua Karjala-Svenden',
author_email='joshua@fluxuries.com',
url='https://github.com/joshuakarjala/dk-car-scraper/',
license='MIT',
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
platforms=['any'],
classifiers=CLASSIFIERS,
install_requires=INSTALL_REQUIRES,
# tests_require=tests_require,
# extras_require={'test': tests_require},
# test_suite='runtests.runtests',
#include_package_data=True,
)
<commit_msg>Bump version 1.0.8 -- more car details<commit_after>#!/usr/bin/env python
from setuptools import setup, find_packages
DESCRIPTION = "A python module which returns details about a Danish car from its license plate number"
LONG_DESCRIPTION = open('README.rst').read()
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
]
INSTALL_REQUIRES = ['requests', 'beautifulsoup4']
try:
import importlib
except ImportError:
INSTALL_REQUIRES.append('importlib')
tests_require = [
'requests>=1.2',
'beautifulsoup4'
]
setup(
name='dk-car-scraper',
version='1.0.8',
packages=find_packages(exclude=[]),
author='Joshua Karjala-Svenden',
author_email='joshua@fluxuries.com',
url='https://github.com/joshuakarjala/dk-car-scraper/',
license='MIT',
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
platforms=['any'],
classifiers=CLASSIFIERS,
install_requires=INSTALL_REQUIRES,
# tests_require=tests_require,
# extras_require={'test': tests_require},
# test_suite='runtests.runtests',
#include_package_data=True,
)
|
f2fa354eab1ece56052b2c4bc6adaf0c10803f6d
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='vprof',
version='0.1',
packages=['vprof'],
description="Visual profiler for Python",
url='http//github.com/nvdv/vprof',
license='BSD',
author='nvdv',
include_package_data=True,
entry_points={
'console_scripts': [
'vprof = vprof.__main__:main'
]
},
)
|
from setuptools import setup
setup(
name='vprof',
version='0.1',
packages=['vprof'],
description="Visual profiler for Python",
url='http://github.com/nvdv/vprof',
license='BSD',
author='nvdv',
include_package_data=True,
keywords = ['debugging', 'profiling'],
entry_points = {
'console_scripts': [
'vprof = vprof.__main__:main'
]
},
classifiers = [
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development',
],
)
|
Prepare for submission to PyPI.
|
Prepare for submission to PyPI.
|
Python
|
bsd-2-clause
|
nvdv/vprof,nvdv/vprof,nvdv/vprof
|
from setuptools import setup
setup(
name='vprof',
version='0.1',
packages=['vprof'],
description="Visual profiler for Python",
url='http//github.com/nvdv/vprof',
license='BSD',
author='nvdv',
include_package_data=True,
entry_points={
'console_scripts': [
'vprof = vprof.__main__:main'
]
},
)
Prepare for submission to PyPI.
|
from setuptools import setup
setup(
name='vprof',
version='0.1',
packages=['vprof'],
description="Visual profiler for Python",
url='http://github.com/nvdv/vprof',
license='BSD',
author='nvdv',
include_package_data=True,
keywords = ['debugging', 'profiling'],
entry_points = {
'console_scripts': [
'vprof = vprof.__main__:main'
]
},
classifiers = [
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development',
],
)
|
<commit_before>from setuptools import setup
setup(
name='vprof',
version='0.1',
packages=['vprof'],
description="Visual profiler for Python",
url='http//github.com/nvdv/vprof',
license='BSD',
author='nvdv',
include_package_data=True,
entry_points={
'console_scripts': [
'vprof = vprof.__main__:main'
]
},
)
<commit_msg>Prepare for submission to PyPI.<commit_after>
|
from setuptools import setup
setup(
name='vprof',
version='0.1',
packages=['vprof'],
description="Visual profiler for Python",
url='http://github.com/nvdv/vprof',
license='BSD',
author='nvdv',
include_package_data=True,
keywords = ['debugging', 'profiling'],
entry_points = {
'console_scripts': [
'vprof = vprof.__main__:main'
]
},
classifiers = [
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development',
],
)
|
from setuptools import setup
setup(
name='vprof',
version='0.1',
packages=['vprof'],
description="Visual profiler for Python",
url='http//github.com/nvdv/vprof',
license='BSD',
author='nvdv',
include_package_data=True,
entry_points={
'console_scripts': [
'vprof = vprof.__main__:main'
]
},
)
Prepare for submission to PyPI.from setuptools import setup
setup(
name='vprof',
version='0.1',
packages=['vprof'],
description="Visual profiler for Python",
url='http://github.com/nvdv/vprof',
license='BSD',
author='nvdv',
include_package_data=True,
keywords = ['debugging', 'profiling'],
entry_points = {
'console_scripts': [
'vprof = vprof.__main__:main'
]
},
classifiers = [
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development',
],
)
|
<commit_before>from setuptools import setup
setup(
name='vprof',
version='0.1',
packages=['vprof'],
description="Visual profiler for Python",
url='http//github.com/nvdv/vprof',
license='BSD',
author='nvdv',
include_package_data=True,
entry_points={
'console_scripts': [
'vprof = vprof.__main__:main'
]
},
)
<commit_msg>Prepare for submission to PyPI.<commit_after>from setuptools import setup
setup(
name='vprof',
version='0.1',
packages=['vprof'],
description="Visual profiler for Python",
url='http://github.com/nvdv/vprof',
license='BSD',
author='nvdv',
include_package_data=True,
keywords = ['debugging', 'profiling'],
entry_points = {
'console_scripts': [
'vprof = vprof.__main__:main'
]
},
classifiers = [
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development',
],
)
|
cbf7000e91425ed555281a71d0edd272eed0eaad
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='tsstats',
version='1.4.1',
author='Thor77',
author_email='thor77@thor77.org',
description='A simple Teamspeak stats-generator',
keywords='ts3 teamspeak teamspeak3 tsstats teamspeakstats',
url='https://github.com/Thor77/TeamspeakStats',
packages=['tsstats'],
entry_points={
'console_scripts': [
'tsstats = tsstats.__main__:cli'
]
},
package_data={
'tsstats': ['templates/*.jinja2']
},
install_requires=[
'Jinja2>=2.8'
],
)
|
from setuptools import setup
setup(
name='tsstats',
version='1.4.1',
author='Thor77',
author_email='thor77@thor77.org',
description='A simple Teamspeak stats-generator',
long_description=open('README.rst').read(),
keywords='ts3 teamspeak teamspeak3 tsstats teamspeakstats',
url='https://github.com/Thor77/TeamspeakStats',
packages=['tsstats'],
entry_points={
'console_scripts': [
'tsstats = tsstats.__main__:cli'
]
},
package_data={
'tsstats': ['templates/*.jinja2']
},
install_requires=[
'Jinja2>=2.8'
],
)
|
Include README.rst as description for PyPi
|
Include README.rst as description for PyPi
|
Python
|
mit
|
Thor77/TeamspeakStats,Thor77/TeamspeakStats
|
from setuptools import setup
setup(
name='tsstats',
version='1.4.1',
author='Thor77',
author_email='thor77@thor77.org',
description='A simple Teamspeak stats-generator',
keywords='ts3 teamspeak teamspeak3 tsstats teamspeakstats',
url='https://github.com/Thor77/TeamspeakStats',
packages=['tsstats'],
entry_points={
'console_scripts': [
'tsstats = tsstats.__main__:cli'
]
},
package_data={
'tsstats': ['templates/*.jinja2']
},
install_requires=[
'Jinja2>=2.8'
],
)
Include README.rst as description for PyPi
|
from setuptools import setup
setup(
name='tsstats',
version='1.4.1',
author='Thor77',
author_email='thor77@thor77.org',
description='A simple Teamspeak stats-generator',
long_description=open('README.rst').read(),
keywords='ts3 teamspeak teamspeak3 tsstats teamspeakstats',
url='https://github.com/Thor77/TeamspeakStats',
packages=['tsstats'],
entry_points={
'console_scripts': [
'tsstats = tsstats.__main__:cli'
]
},
package_data={
'tsstats': ['templates/*.jinja2']
},
install_requires=[
'Jinja2>=2.8'
],
)
|
<commit_before>from setuptools import setup
setup(
name='tsstats',
version='1.4.1',
author='Thor77',
author_email='thor77@thor77.org',
description='A simple Teamspeak stats-generator',
keywords='ts3 teamspeak teamspeak3 tsstats teamspeakstats',
url='https://github.com/Thor77/TeamspeakStats',
packages=['tsstats'],
entry_points={
'console_scripts': [
'tsstats = tsstats.__main__:cli'
]
},
package_data={
'tsstats': ['templates/*.jinja2']
},
install_requires=[
'Jinja2>=2.8'
],
)
<commit_msg>Include README.rst as description for PyPi<commit_after>
|
from setuptools import setup
setup(
name='tsstats',
version='1.4.1',
author='Thor77',
author_email='thor77@thor77.org',
description='A simple Teamspeak stats-generator',
long_description=open('README.rst').read(),
keywords='ts3 teamspeak teamspeak3 tsstats teamspeakstats',
url='https://github.com/Thor77/TeamspeakStats',
packages=['tsstats'],
entry_points={
'console_scripts': [
'tsstats = tsstats.__main__:cli'
]
},
package_data={
'tsstats': ['templates/*.jinja2']
},
install_requires=[
'Jinja2>=2.8'
],
)
|
from setuptools import setup
setup(
name='tsstats',
version='1.4.1',
author='Thor77',
author_email='thor77@thor77.org',
description='A simple Teamspeak stats-generator',
keywords='ts3 teamspeak teamspeak3 tsstats teamspeakstats',
url='https://github.com/Thor77/TeamspeakStats',
packages=['tsstats'],
entry_points={
'console_scripts': [
'tsstats = tsstats.__main__:cli'
]
},
package_data={
'tsstats': ['templates/*.jinja2']
},
install_requires=[
'Jinja2>=2.8'
],
)
Include README.rst as description for PyPifrom setuptools import setup
setup(
name='tsstats',
version='1.4.1',
author='Thor77',
author_email='thor77@thor77.org',
description='A simple Teamspeak stats-generator',
long_description=open('README.rst').read(),
keywords='ts3 teamspeak teamspeak3 tsstats teamspeakstats',
url='https://github.com/Thor77/TeamspeakStats',
packages=['tsstats'],
entry_points={
'console_scripts': [
'tsstats = tsstats.__main__:cli'
]
},
package_data={
'tsstats': ['templates/*.jinja2']
},
install_requires=[
'Jinja2>=2.8'
],
)
|
<commit_before>from setuptools import setup
setup(
name='tsstats',
version='1.4.1',
author='Thor77',
author_email='thor77@thor77.org',
description='A simple Teamspeak stats-generator',
keywords='ts3 teamspeak teamspeak3 tsstats teamspeakstats',
url='https://github.com/Thor77/TeamspeakStats',
packages=['tsstats'],
entry_points={
'console_scripts': [
'tsstats = tsstats.__main__:cli'
]
},
package_data={
'tsstats': ['templates/*.jinja2']
},
install_requires=[
'Jinja2>=2.8'
],
)
<commit_msg>Include README.rst as description for PyPi<commit_after>from setuptools import setup
setup(
name='tsstats',
version='1.4.1',
author='Thor77',
author_email='thor77@thor77.org',
description='A simple Teamspeak stats-generator',
long_description=open('README.rst').read(),
keywords='ts3 teamspeak teamspeak3 tsstats teamspeakstats',
url='https://github.com/Thor77/TeamspeakStats',
packages=['tsstats'],
entry_points={
'console_scripts': [
'tsstats = tsstats.__main__:cli'
]
},
package_data={
'tsstats': ['templates/*.jinja2']
},
install_requires=[
'Jinja2>=2.8'
],
)
|
60bcd21c00443d947d76b268ed052b8787741f5b
|
setup.py
|
setup.py
|
from setuptools import setup
import sys
requires = ['feedgenerator', 'jinja2', 'pygments', 'docutils']
if sys.version_info < (2,7):
requires.append('argparse')
setup(
name = "pelican",
version = '2.3',
url = 'http://alexis.notmyidea.org/pelican/',
author = 'Alexis Metaireau',
author_email = 'alexis@notmyidea.org',
description = "A tool to generate a static blog, with restructured text (or markdown) input files.",
long_description=open('README.rst').read(),
packages = ['pelican'],
include_package_data = True,
install_requires = requires,
scripts = ['bin/pelican'],
classifiers = ['Development Status :: 5 - Production/Stable',
'Environment :: Console',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
from setuptools import setup
import sys
requires = ['feedgenerator', 'jinja2', 'pygments', 'docutils', 'Markdown']
if sys.version_info < (2,7):
requires.append('argparse')
setup(
name = "pelican",
version = '2.3',
url = 'http://alexis.notmyidea.org/pelican/',
author = 'Alexis Metaireau',
author_email = 'alexis@notmyidea.org',
description = "A tool to generate a static blog, with restructured text (or markdown) input files.",
long_description=open('README.rst').read(),
packages = ['pelican'],
include_package_data = True,
install_requires = requires,
scripts = ['bin/pelican'],
classifiers = ['Development Status :: 5 - Production/Stable',
'Environment :: Console',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
Add the dependency to Mardown.
|
Add the dependency to Mardown.
|
Python
|
agpl-3.0
|
rbarraud/pelican,eevee/pelican,lucasplus/pelican,ehashman/pelican,florianjacob/pelican,garbas/pelican,Scheirle/pelican,levanhien8/pelican,HyperGroups/pelican,TC01/pelican,crmackay/pelican,alexras/pelican,UdeskDeveloper/pelican,janaurka/git-debug-presentiation,jimperio/pelican,jvehent/pelican,levanhien8/pelican,liyonghelpme/myBlog,deved69/pelican-1,rbarraud/pelican,lucasplus/pelican,getpelican/pelican,goerz/pelican,number5/pelican,lucasplus/pelican,jvehent/pelican,btnpushnmunky/pelican,koobs/pelican,alexras/pelican,treyhunner/pelican,farseerfc/pelican,kernc/pelican,iurisilvio/pelican,zackw/pelican,rbarraud/pelican,goerz/pelican,Scheirle/pelican,Rogdham/pelican,abrahamvarricatt/pelican,GiovanniMoretti/pelican,ionelmc/pelican,ls2uper/pelican,HyperGroups/pelican,Polyconseil/pelican,JeremyMorgan/pelican,Polyconseil/pelican,iKevinY/pelican,UdeskDeveloper/pelican,liyonghelpme/myBlog,kennethlyn/pelican,crmackay/pelican,talha131/pelican,sunzhongwei/pelican,douglaskastle/pelican,koobs/pelican,eevee/pelican,florianjacob/pelican,levanhien8/pelican,11craft/pelican,karlcow/pelican,avaris/pelican,garbas/pelican,liyonghelpme/myBlog,Rogdham/pelican,sunzhongwei/pelican,deved69/pelican-1,51itclub/pelican,ehashman/pelican,catdog2/pelican,avaris/pelican,Summonee/pelican,51itclub/pelican,kernc/pelican,talha131/pelican,treyhunner/pelican,ingwinlu/pelican,justinmayer/pelican,Summonee/pelican,UdeskDeveloper/pelican,getpelican/pelican,kennethlyn/pelican,deved69/pelican-1,simonjj/pelican,HyperGroups/pelican,iurisilvio/pelican,treyhunner/pelican,simonjj/pelican,karlcow/pelican,arty-name/pelican,gymglish/pelican,11craft/pelican,fbs/pelican,GiovanniMoretti/pelican,joetboole/pelican,joetboole/pelican,TC01/pelican,ls2uper/pelican,eevee/pelican,TC01/pelican,joetboole/pelican,jo-tham/pelican,liyonghelpme/myBlog,sunzhongwei/pelican,JeremyMorgan/pelican,Rogdham/pelican,deanishe/pelican,koobs/pelican,GiovanniMoretti/pelican,iKevinY/pelican,btnpushnmunky/pelican,iurisilvio/pelican,abrahamvarricatt/pelican,btnpushnmunky/pelican,zackw/pelican,deanishe/pelican,farseerfc/pelican,Summonee/pelican,florianjacob/pelican,douglaskastle/pelican,ls2uper/pelican,zackw/pelican,karlcow/pelican,jo-tham/pelican,0xMF/pelican,JeremyMorgan/pelican,number5/pelican,crmackay/pelican,alexras/pelican,kennethlyn/pelican,liyonghelpme/myBlog,deanishe/pelican,catdog2/pelican,sunzhongwei/pelican,gymglish/pelican,ingwinlu/pelican,kernc/pelican,abrahamvarricatt/pelican,simonjj/pelican,number5/pelican,Scheirle/pelican,11craft/pelican,catdog2/pelican,gymglish/pelican,ehashman/pelican,janaurka/git-debug-presentiation,lazycoder-ru/pelican,51itclub/pelican,jimperio/pelican,goerz/pelican,jimperio/pelican,douglaskastle/pelican,Natim/pelican,garbas/pelican,lazycoder-ru/pelican,janaurka/git-debug-presentiation,lazycoder-ru/pelican,jvehent/pelican
|
from setuptools import setup
import sys
requires = ['feedgenerator', 'jinja2', 'pygments', 'docutils']
if sys.version_info < (2,7):
requires.append('argparse')
setup(
name = "pelican",
version = '2.3',
url = 'http://alexis.notmyidea.org/pelican/',
author = 'Alexis Metaireau',
author_email = 'alexis@notmyidea.org',
description = "A tool to generate a static blog, with restructured text (or markdown) input files.",
long_description=open('README.rst').read(),
packages = ['pelican'],
include_package_data = True,
install_requires = requires,
scripts = ['bin/pelican'],
classifiers = ['Development Status :: 5 - Production/Stable',
'Environment :: Console',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
Add the dependency to Mardown.
|
from setuptools import setup
import sys
requires = ['feedgenerator', 'jinja2', 'pygments', 'docutils', 'Markdown']
if sys.version_info < (2,7):
requires.append('argparse')
setup(
name = "pelican",
version = '2.3',
url = 'http://alexis.notmyidea.org/pelican/',
author = 'Alexis Metaireau',
author_email = 'alexis@notmyidea.org',
description = "A tool to generate a static blog, with restructured text (or markdown) input files.",
long_description=open('README.rst').read(),
packages = ['pelican'],
include_package_data = True,
install_requires = requires,
scripts = ['bin/pelican'],
classifiers = ['Development Status :: 5 - Production/Stable',
'Environment :: Console',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
<commit_before>from setuptools import setup
import sys
requires = ['feedgenerator', 'jinja2', 'pygments', 'docutils']
if sys.version_info < (2,7):
requires.append('argparse')
setup(
name = "pelican",
version = '2.3',
url = 'http://alexis.notmyidea.org/pelican/',
author = 'Alexis Metaireau',
author_email = 'alexis@notmyidea.org',
description = "A tool to generate a static blog, with restructured text (or markdown) input files.",
long_description=open('README.rst').read(),
packages = ['pelican'],
include_package_data = True,
install_requires = requires,
scripts = ['bin/pelican'],
classifiers = ['Development Status :: 5 - Production/Stable',
'Environment :: Console',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
<commit_msg>Add the dependency to Mardown.<commit_after>
|
from setuptools import setup
import sys
requires = ['feedgenerator', 'jinja2', 'pygments', 'docutils', 'Markdown']
if sys.version_info < (2,7):
requires.append('argparse')
setup(
name = "pelican",
version = '2.3',
url = 'http://alexis.notmyidea.org/pelican/',
author = 'Alexis Metaireau',
author_email = 'alexis@notmyidea.org',
description = "A tool to generate a static blog, with restructured text (or markdown) input files.",
long_description=open('README.rst').read(),
packages = ['pelican'],
include_package_data = True,
install_requires = requires,
scripts = ['bin/pelican'],
classifiers = ['Development Status :: 5 - Production/Stable',
'Environment :: Console',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
from setuptools import setup
import sys
requires = ['feedgenerator', 'jinja2', 'pygments', 'docutils']
if sys.version_info < (2,7):
requires.append('argparse')
setup(
name = "pelican",
version = '2.3',
url = 'http://alexis.notmyidea.org/pelican/',
author = 'Alexis Metaireau',
author_email = 'alexis@notmyidea.org',
description = "A tool to generate a static blog, with restructured text (or markdown) input files.",
long_description=open('README.rst').read(),
packages = ['pelican'],
include_package_data = True,
install_requires = requires,
scripts = ['bin/pelican'],
classifiers = ['Development Status :: 5 - Production/Stable',
'Environment :: Console',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
Add the dependency to Mardown.from setuptools import setup
import sys
requires = ['feedgenerator', 'jinja2', 'pygments', 'docutils', 'Markdown']
if sys.version_info < (2,7):
requires.append('argparse')
setup(
name = "pelican",
version = '2.3',
url = 'http://alexis.notmyidea.org/pelican/',
author = 'Alexis Metaireau',
author_email = 'alexis@notmyidea.org',
description = "A tool to generate a static blog, with restructured text (or markdown) input files.",
long_description=open('README.rst').read(),
packages = ['pelican'],
include_package_data = True,
install_requires = requires,
scripts = ['bin/pelican'],
classifiers = ['Development Status :: 5 - Production/Stable',
'Environment :: Console',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
<commit_before>from setuptools import setup
import sys
requires = ['feedgenerator', 'jinja2', 'pygments', 'docutils']
if sys.version_info < (2,7):
requires.append('argparse')
setup(
name = "pelican",
version = '2.3',
url = 'http://alexis.notmyidea.org/pelican/',
author = 'Alexis Metaireau',
author_email = 'alexis@notmyidea.org',
description = "A tool to generate a static blog, with restructured text (or markdown) input files.",
long_description=open('README.rst').read(),
packages = ['pelican'],
include_package_data = True,
install_requires = requires,
scripts = ['bin/pelican'],
classifiers = ['Development Status :: 5 - Production/Stable',
'Environment :: Console',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
<commit_msg>Add the dependency to Mardown.<commit_after>from setuptools import setup
import sys
requires = ['feedgenerator', 'jinja2', 'pygments', 'docutils', 'Markdown']
if sys.version_info < (2,7):
requires.append('argparse')
setup(
name = "pelican",
version = '2.3',
url = 'http://alexis.notmyidea.org/pelican/',
author = 'Alexis Metaireau',
author_email = 'alexis@notmyidea.org',
description = "A tool to generate a static blog, with restructured text (or markdown) input files.",
long_description=open('README.rst').read(),
packages = ['pelican'],
include_package_data = True,
install_requires = requires,
scripts = ['bin/pelican'],
classifiers = ['Development Status :: 5 - Production/Stable',
'Environment :: Console',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
688bf70a58ab15d7b2c3da3883660facb2ba9079
|
setup.py
|
setup.py
|
import setuptools
from icekit import __version__
setuptools.setup(
name='icekit',
version=__version__,
packages=setuptools.find_packages(),
install_requires=[
'coverage',
'django-bootstrap3',
'django-brightcove',
'django-dynamic-fixture',
'django-fluent-pages[flatpage,fluentpage,redirectnode]',
'django-fluent-contents',
'django-nose',
'django-webtest',
'mkdocs',
'nose-progressive',
'Pillow',
'tox',
'WebTest',
],
extras_require={
'search': ['django-haystack', ]
}
)
|
import setuptools
from icekit import __version__
setuptools.setup(
name='icekit',
version=__version__,
packages=setuptools.find_packages(),
install_requires=[
'coverage',
'django-bootstrap3',
'django-dynamic-fixture',
'django-fluent-pages[flatpage,fluentpage,redirectnode]',
'django-fluent-contents',
'django-nose',
'django-webtest',
'mkdocs',
'nose-progressive',
'Pillow',
'tox',
'WebTest',
],
extras_require={
'brightcove': ['django-brightcove'],
'search': ['django-haystack', ]
}
)
|
Move `django-brightcove` dependency into `brightcove` optional extra.
|
Move `django-brightcove` dependency into `brightcove` optional extra.
|
Python
|
mit
|
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit
|
import setuptools
from icekit import __version__
setuptools.setup(
name='icekit',
version=__version__,
packages=setuptools.find_packages(),
install_requires=[
'coverage',
'django-bootstrap3',
'django-brightcove',
'django-dynamic-fixture',
'django-fluent-pages[flatpage,fluentpage,redirectnode]',
'django-fluent-contents',
'django-nose',
'django-webtest',
'mkdocs',
'nose-progressive',
'Pillow',
'tox',
'WebTest',
],
extras_require={
'search': ['django-haystack', ]
}
)
Move `django-brightcove` dependency into `brightcove` optional extra.
|
import setuptools
from icekit import __version__
setuptools.setup(
name='icekit',
version=__version__,
packages=setuptools.find_packages(),
install_requires=[
'coverage',
'django-bootstrap3',
'django-dynamic-fixture',
'django-fluent-pages[flatpage,fluentpage,redirectnode]',
'django-fluent-contents',
'django-nose',
'django-webtest',
'mkdocs',
'nose-progressive',
'Pillow',
'tox',
'WebTest',
],
extras_require={
'brightcove': ['django-brightcove'],
'search': ['django-haystack', ]
}
)
|
<commit_before>import setuptools
from icekit import __version__
setuptools.setup(
name='icekit',
version=__version__,
packages=setuptools.find_packages(),
install_requires=[
'coverage',
'django-bootstrap3',
'django-brightcove',
'django-dynamic-fixture',
'django-fluent-pages[flatpage,fluentpage,redirectnode]',
'django-fluent-contents',
'django-nose',
'django-webtest',
'mkdocs',
'nose-progressive',
'Pillow',
'tox',
'WebTest',
],
extras_require={
'search': ['django-haystack', ]
}
)
<commit_msg>Move `django-brightcove` dependency into `brightcove` optional extra.<commit_after>
|
import setuptools
from icekit import __version__
setuptools.setup(
name='icekit',
version=__version__,
packages=setuptools.find_packages(),
install_requires=[
'coverage',
'django-bootstrap3',
'django-dynamic-fixture',
'django-fluent-pages[flatpage,fluentpage,redirectnode]',
'django-fluent-contents',
'django-nose',
'django-webtest',
'mkdocs',
'nose-progressive',
'Pillow',
'tox',
'WebTest',
],
extras_require={
'brightcove': ['django-brightcove'],
'search': ['django-haystack', ]
}
)
|
import setuptools
from icekit import __version__
setuptools.setup(
name='icekit',
version=__version__,
packages=setuptools.find_packages(),
install_requires=[
'coverage',
'django-bootstrap3',
'django-brightcove',
'django-dynamic-fixture',
'django-fluent-pages[flatpage,fluentpage,redirectnode]',
'django-fluent-contents',
'django-nose',
'django-webtest',
'mkdocs',
'nose-progressive',
'Pillow',
'tox',
'WebTest',
],
extras_require={
'search': ['django-haystack', ]
}
)
Move `django-brightcove` dependency into `brightcove` optional extra.import setuptools
from icekit import __version__
setuptools.setup(
name='icekit',
version=__version__,
packages=setuptools.find_packages(),
install_requires=[
'coverage',
'django-bootstrap3',
'django-dynamic-fixture',
'django-fluent-pages[flatpage,fluentpage,redirectnode]',
'django-fluent-contents',
'django-nose',
'django-webtest',
'mkdocs',
'nose-progressive',
'Pillow',
'tox',
'WebTest',
],
extras_require={
'brightcove': ['django-brightcove'],
'search': ['django-haystack', ]
}
)
|
<commit_before>import setuptools
from icekit import __version__
setuptools.setup(
name='icekit',
version=__version__,
packages=setuptools.find_packages(),
install_requires=[
'coverage',
'django-bootstrap3',
'django-brightcove',
'django-dynamic-fixture',
'django-fluent-pages[flatpage,fluentpage,redirectnode]',
'django-fluent-contents',
'django-nose',
'django-webtest',
'mkdocs',
'nose-progressive',
'Pillow',
'tox',
'WebTest',
],
extras_require={
'search': ['django-haystack', ]
}
)
<commit_msg>Move `django-brightcove` dependency into `brightcove` optional extra.<commit_after>import setuptools
from icekit import __version__
setuptools.setup(
name='icekit',
version=__version__,
packages=setuptools.find_packages(),
install_requires=[
'coverage',
'django-bootstrap3',
'django-dynamic-fixture',
'django-fluent-pages[flatpage,fluentpage,redirectnode]',
'django-fluent-contents',
'django-nose',
'django-webtest',
'mkdocs',
'nose-progressive',
'Pillow',
'tox',
'WebTest',
],
extras_require={
'brightcove': ['django-brightcove'],
'search': ['django-haystack', ]
}
)
|
9fd4708f1c1e6f2f85b2bb1b5cd744d09ebe2886
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='django_hana',
version='1.1',
description='SAP HANA backend for Django 1.7',
author='Max Bothe, Kapil Ratnani',
author_email='mathebox@gmail.com, kapil.ratnani@iiitb.net',
url='https://github.com/mathebox/django_hana',
packages=['django_hana'],
install_requires = [
'pyhdb==0.3.0'
],
dependency_links = [
'https://github.com/mathebox/PyHDB/archive/171d75f1708b2297971a4df89217cd348f2b47b6.zip#egg=pyhdb-0.3.0'
],
)
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='django_hana',
version='1.1',
description='SAP HANA backend for Django 1.7',
author='Max Bothe, Kapil Ratnani',
author_email='mathebox@gmail.com, kapil.ratnani@iiitb.net',
url='https://github.com/mathebox/django_hana',
packages=['django_hana'],
install_requires = [
'pyhdb==0.3.1-mod'
],
dependency_links = [
'https://github.com/mathebox/PyHDB/archive/f3cb802a9da783238c937c0175a36c771f77a9d3.zip#egg=pyhdb-0.3.1-mod'
],
)
|
Change to custom pyhdb version number.
|
Change to custom pyhdb version number.
|
Python
|
bsd-3-clause
|
cmfcmf/django_hana_pyhdb,mathebox/django_hana_pyhdb
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='django_hana',
version='1.1',
description='SAP HANA backend for Django 1.7',
author='Max Bothe, Kapil Ratnani',
author_email='mathebox@gmail.com, kapil.ratnani@iiitb.net',
url='https://github.com/mathebox/django_hana',
packages=['django_hana'],
install_requires = [
'pyhdb==0.3.0'
],
dependency_links = [
'https://github.com/mathebox/PyHDB/archive/171d75f1708b2297971a4df89217cd348f2b47b6.zip#egg=pyhdb-0.3.0'
],
)
Change to custom pyhdb version number.
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='django_hana',
version='1.1',
description='SAP HANA backend for Django 1.7',
author='Max Bothe, Kapil Ratnani',
author_email='mathebox@gmail.com, kapil.ratnani@iiitb.net',
url='https://github.com/mathebox/django_hana',
packages=['django_hana'],
install_requires = [
'pyhdb==0.3.1-mod'
],
dependency_links = [
'https://github.com/mathebox/PyHDB/archive/f3cb802a9da783238c937c0175a36c771f77a9d3.zip#egg=pyhdb-0.3.1-mod'
],
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
setup(
name='django_hana',
version='1.1',
description='SAP HANA backend for Django 1.7',
author='Max Bothe, Kapil Ratnani',
author_email='mathebox@gmail.com, kapil.ratnani@iiitb.net',
url='https://github.com/mathebox/django_hana',
packages=['django_hana'],
install_requires = [
'pyhdb==0.3.0'
],
dependency_links = [
'https://github.com/mathebox/PyHDB/archive/171d75f1708b2297971a4df89217cd348f2b47b6.zip#egg=pyhdb-0.3.0'
],
)
<commit_msg>Change to custom pyhdb version number.<commit_after>
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='django_hana',
version='1.1',
description='SAP HANA backend for Django 1.7',
author='Max Bothe, Kapil Ratnani',
author_email='mathebox@gmail.com, kapil.ratnani@iiitb.net',
url='https://github.com/mathebox/django_hana',
packages=['django_hana'],
install_requires = [
'pyhdb==0.3.1-mod'
],
dependency_links = [
'https://github.com/mathebox/PyHDB/archive/f3cb802a9da783238c937c0175a36c771f77a9d3.zip#egg=pyhdb-0.3.1-mod'
],
)
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='django_hana',
version='1.1',
description='SAP HANA backend for Django 1.7',
author='Max Bothe, Kapil Ratnani',
author_email='mathebox@gmail.com, kapil.ratnani@iiitb.net',
url='https://github.com/mathebox/django_hana',
packages=['django_hana'],
install_requires = [
'pyhdb==0.3.0'
],
dependency_links = [
'https://github.com/mathebox/PyHDB/archive/171d75f1708b2297971a4df89217cd348f2b47b6.zip#egg=pyhdb-0.3.0'
],
)
Change to custom pyhdb version number.#!/usr/bin/env python
from setuptools import setup
setup(
name='django_hana',
version='1.1',
description='SAP HANA backend for Django 1.7',
author='Max Bothe, Kapil Ratnani',
author_email='mathebox@gmail.com, kapil.ratnani@iiitb.net',
url='https://github.com/mathebox/django_hana',
packages=['django_hana'],
install_requires = [
'pyhdb==0.3.1-mod'
],
dependency_links = [
'https://github.com/mathebox/PyHDB/archive/f3cb802a9da783238c937c0175a36c771f77a9d3.zip#egg=pyhdb-0.3.1-mod'
],
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
setup(
name='django_hana',
version='1.1',
description='SAP HANA backend for Django 1.7',
author='Max Bothe, Kapil Ratnani',
author_email='mathebox@gmail.com, kapil.ratnani@iiitb.net',
url='https://github.com/mathebox/django_hana',
packages=['django_hana'],
install_requires = [
'pyhdb==0.3.0'
],
dependency_links = [
'https://github.com/mathebox/PyHDB/archive/171d75f1708b2297971a4df89217cd348f2b47b6.zip#egg=pyhdb-0.3.0'
],
)
<commit_msg>Change to custom pyhdb version number.<commit_after>#!/usr/bin/env python
from setuptools import setup
setup(
name='django_hana',
version='1.1',
description='SAP HANA backend for Django 1.7',
author='Max Bothe, Kapil Ratnani',
author_email='mathebox@gmail.com, kapil.ratnani@iiitb.net',
url='https://github.com/mathebox/django_hana',
packages=['django_hana'],
install_requires = [
'pyhdb==0.3.1-mod'
],
dependency_links = [
'https://github.com/mathebox/PyHDB/archive/f3cb802a9da783238c937c0175a36c771f77a9d3.zip#egg=pyhdb-0.3.1-mod'
],
)
|
db908daf4cade04c2698a4aa14bbb991c81f2f65
|
setup.py
|
setup.py
|
import os
from setuptools import setup, find_packages
scripts = [os.path.join('bin', filename) for filename in os.listdir('bin')]
setup(
name='mnp',
version='1.0.0',
author='Heryandi',
author_email='heryandi@gmail.com',
packages=find_packages(exclude='test'),
scripts=scripts,
url='https://github.com/heryandi/mnp',
license='MIT',
description='Wrapper tools to manage Mininet package',
long_description=open('README.txt').read(),
install_requires=[
"pip",
"distribute",
],
entry_points={'console_scripts': [
'mnp = mnp:main',
]}
)
|
import os
from setuptools import setup, find_packages
setup(
name='mnp',
version='1.0.0',
author='Heryandi',
author_email='heryandi@gmail.com',
packages=find_packages(exclude='test'),
scripts=[],
url='https://github.com/heryandi/mnp',
license='MIT',
description='Wrapper tools to manage Mininet package',
long_description=open('README.txt').read(),
install_requires=[
"pip",
"distribute",
],
entry_points={'console_scripts': [
'mnp = mnp:main',
]}
)
|
Remove the scripts listing as it causes error if bin/ is empty
|
Remove the scripts listing as it causes error if bin/ is empty
|
Python
|
mit
|
heryandi/mnp
|
import os
from setuptools import setup, find_packages
scripts = [os.path.join('bin', filename) for filename in os.listdir('bin')]
setup(
name='mnp',
version='1.0.0',
author='Heryandi',
author_email='heryandi@gmail.com',
packages=find_packages(exclude='test'),
scripts=scripts,
url='https://github.com/heryandi/mnp',
license='MIT',
description='Wrapper tools to manage Mininet package',
long_description=open('README.txt').read(),
install_requires=[
"pip",
"distribute",
],
entry_points={'console_scripts': [
'mnp = mnp:main',
]}
)
Remove the scripts listing as it causes error if bin/ is empty
|
import os
from setuptools import setup, find_packages
setup(
name='mnp',
version='1.0.0',
author='Heryandi',
author_email='heryandi@gmail.com',
packages=find_packages(exclude='test'),
scripts=[],
url='https://github.com/heryandi/mnp',
license='MIT',
description='Wrapper tools to manage Mininet package',
long_description=open('README.txt').read(),
install_requires=[
"pip",
"distribute",
],
entry_points={'console_scripts': [
'mnp = mnp:main',
]}
)
|
<commit_before>import os
from setuptools import setup, find_packages
scripts = [os.path.join('bin', filename) for filename in os.listdir('bin')]
setup(
name='mnp',
version='1.0.0',
author='Heryandi',
author_email='heryandi@gmail.com',
packages=find_packages(exclude='test'),
scripts=scripts,
url='https://github.com/heryandi/mnp',
license='MIT',
description='Wrapper tools to manage Mininet package',
long_description=open('README.txt').read(),
install_requires=[
"pip",
"distribute",
],
entry_points={'console_scripts': [
'mnp = mnp:main',
]}
)
<commit_msg>Remove the scripts listing as it causes error if bin/ is empty<commit_after>
|
import os
from setuptools import setup, find_packages
setup(
name='mnp',
version='1.0.0',
author='Heryandi',
author_email='heryandi@gmail.com',
packages=find_packages(exclude='test'),
scripts=[],
url='https://github.com/heryandi/mnp',
license='MIT',
description='Wrapper tools to manage Mininet package',
long_description=open('README.txt').read(),
install_requires=[
"pip",
"distribute",
],
entry_points={'console_scripts': [
'mnp = mnp:main',
]}
)
|
import os
from setuptools import setup, find_packages
scripts = [os.path.join('bin', filename) for filename in os.listdir('bin')]
setup(
name='mnp',
version='1.0.0',
author='Heryandi',
author_email='heryandi@gmail.com',
packages=find_packages(exclude='test'),
scripts=scripts,
url='https://github.com/heryandi/mnp',
license='MIT',
description='Wrapper tools to manage Mininet package',
long_description=open('README.txt').read(),
install_requires=[
"pip",
"distribute",
],
entry_points={'console_scripts': [
'mnp = mnp:main',
]}
)
Remove the scripts listing as it causes error if bin/ is emptyimport os
from setuptools import setup, find_packages
setup(
name='mnp',
version='1.0.0',
author='Heryandi',
author_email='heryandi@gmail.com',
packages=find_packages(exclude='test'),
scripts=[],
url='https://github.com/heryandi/mnp',
license='MIT',
description='Wrapper tools to manage Mininet package',
long_description=open('README.txt').read(),
install_requires=[
"pip",
"distribute",
],
entry_points={'console_scripts': [
'mnp = mnp:main',
]}
)
|
<commit_before>import os
from setuptools import setup, find_packages
scripts = [os.path.join('bin', filename) for filename in os.listdir('bin')]
setup(
name='mnp',
version='1.0.0',
author='Heryandi',
author_email='heryandi@gmail.com',
packages=find_packages(exclude='test'),
scripts=scripts,
url='https://github.com/heryandi/mnp',
license='MIT',
description='Wrapper tools to manage Mininet package',
long_description=open('README.txt').read(),
install_requires=[
"pip",
"distribute",
],
entry_points={'console_scripts': [
'mnp = mnp:main',
]}
)
<commit_msg>Remove the scripts listing as it causes error if bin/ is empty<commit_after>import os
from setuptools import setup, find_packages
setup(
name='mnp',
version='1.0.0',
author='Heryandi',
author_email='heryandi@gmail.com',
packages=find_packages(exclude='test'),
scripts=[],
url='https://github.com/heryandi/mnp',
license='MIT',
description='Wrapper tools to manage Mininet package',
long_description=open('README.txt').read(),
install_requires=[
"pip",
"distribute",
],
entry_points={'console_scripts': [
'mnp = mnp:main',
]}
)
|
750d835bb6185115c436ce24a347436875caf983
|
setup.py
|
setup.py
|
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.txt')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt')) as f:
CHANGES = f.read()
requires = [
'pyramid',
'pyramid_chameleon',
'pyramid_debugtoolbar',
'waitress',
]
setup(name='s3authbasic',
version='0.0',
description='s3authbasic',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Antonio Perez-Aranda Alcaide',
author_email='ant30tx@gmail.com',
url='http://www.ant30.es/',
keywords='web pyramid pylons s3',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="s3authbasic",
entry_points="""\
[paste.app_factory]
main = s3authbasic:main
""",
)
|
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.txt')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt')) as f:
CHANGES = f.read()
requires = [
'pyramid',
'pyramid_chameleon',
'pyramid_debugtoolbar',
'waitress',
'boto',
]
setup(name='s3authbasic',
version='0.0',
description='s3authbasic',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Antonio Perez-Aranda Alcaide',
author_email='ant30tx@gmail.com',
url='http://www.ant30.es/',
keywords='web pyramid pylons s3',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="s3authbasic",
entry_points="""\
[paste.app_factory]
main = s3authbasic:main
""",
)
|
Add the boto as requirement
|
Add the boto as requirement
|
Python
|
mit
|
ant30/s3authbasic
|
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.txt')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt')) as f:
CHANGES = f.read()
requires = [
'pyramid',
'pyramid_chameleon',
'pyramid_debugtoolbar',
'waitress',
]
setup(name='s3authbasic',
version='0.0',
description='s3authbasic',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Antonio Perez-Aranda Alcaide',
author_email='ant30tx@gmail.com',
url='http://www.ant30.es/',
keywords='web pyramid pylons s3',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="s3authbasic",
entry_points="""\
[paste.app_factory]
main = s3authbasic:main
""",
)
Add the boto as requirement
|
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.txt')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt')) as f:
CHANGES = f.read()
requires = [
'pyramid',
'pyramid_chameleon',
'pyramid_debugtoolbar',
'waitress',
'boto',
]
setup(name='s3authbasic',
version='0.0',
description='s3authbasic',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Antonio Perez-Aranda Alcaide',
author_email='ant30tx@gmail.com',
url='http://www.ant30.es/',
keywords='web pyramid pylons s3',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="s3authbasic",
entry_points="""\
[paste.app_factory]
main = s3authbasic:main
""",
)
|
<commit_before>import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.txt')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt')) as f:
CHANGES = f.read()
requires = [
'pyramid',
'pyramid_chameleon',
'pyramid_debugtoolbar',
'waitress',
]
setup(name='s3authbasic',
version='0.0',
description='s3authbasic',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Antonio Perez-Aranda Alcaide',
author_email='ant30tx@gmail.com',
url='http://www.ant30.es/',
keywords='web pyramid pylons s3',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="s3authbasic",
entry_points="""\
[paste.app_factory]
main = s3authbasic:main
""",
)
<commit_msg>Add the boto as requirement<commit_after>
|
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.txt')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt')) as f:
CHANGES = f.read()
requires = [
'pyramid',
'pyramid_chameleon',
'pyramid_debugtoolbar',
'waitress',
'boto',
]
setup(name='s3authbasic',
version='0.0',
description='s3authbasic',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Antonio Perez-Aranda Alcaide',
author_email='ant30tx@gmail.com',
url='http://www.ant30.es/',
keywords='web pyramid pylons s3',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="s3authbasic",
entry_points="""\
[paste.app_factory]
main = s3authbasic:main
""",
)
|
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.txt')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt')) as f:
CHANGES = f.read()
requires = [
'pyramid',
'pyramid_chameleon',
'pyramid_debugtoolbar',
'waitress',
]
setup(name='s3authbasic',
version='0.0',
description='s3authbasic',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Antonio Perez-Aranda Alcaide',
author_email='ant30tx@gmail.com',
url='http://www.ant30.es/',
keywords='web pyramid pylons s3',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="s3authbasic",
entry_points="""\
[paste.app_factory]
main = s3authbasic:main
""",
)
Add the boto as requirementimport os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.txt')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt')) as f:
CHANGES = f.read()
requires = [
'pyramid',
'pyramid_chameleon',
'pyramid_debugtoolbar',
'waitress',
'boto',
]
setup(name='s3authbasic',
version='0.0',
description='s3authbasic',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Antonio Perez-Aranda Alcaide',
author_email='ant30tx@gmail.com',
url='http://www.ant30.es/',
keywords='web pyramid pylons s3',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="s3authbasic",
entry_points="""\
[paste.app_factory]
main = s3authbasic:main
""",
)
|
<commit_before>import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.txt')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt')) as f:
CHANGES = f.read()
requires = [
'pyramid',
'pyramid_chameleon',
'pyramid_debugtoolbar',
'waitress',
]
setup(name='s3authbasic',
version='0.0',
description='s3authbasic',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Antonio Perez-Aranda Alcaide',
author_email='ant30tx@gmail.com',
url='http://www.ant30.es/',
keywords='web pyramid pylons s3',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="s3authbasic",
entry_points="""\
[paste.app_factory]
main = s3authbasic:main
""",
)
<commit_msg>Add the boto as requirement<commit_after>import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.txt')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt')) as f:
CHANGES = f.read()
requires = [
'pyramid',
'pyramid_chameleon',
'pyramid_debugtoolbar',
'waitress',
'boto',
]
setup(name='s3authbasic',
version='0.0',
description='s3authbasic',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='Antonio Perez-Aranda Alcaide',
author_email='ant30tx@gmail.com',
url='http://www.ant30.es/',
keywords='web pyramid pylons s3',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="s3authbasic",
entry_points="""\
[paste.app_factory]
main = s3authbasic:main
""",
)
|
15ec0fab2fbe16fd84791c9d0a5b9006971b76bf
|
setup.py
|
setup.py
|
#! /usr/bin/env python
from setuptools import setup, find_packages
from basic_modeling_interface import __version__
setup(name='basic-modeling-interface',
version=__version__,
author='Eric Hutton',
author_email='eric.hutton@colorado.edu',
url='https://github.com/bmi-forum/bmi-python',
# download_url='https://github.com/bmi-forum/bmi-python/tarball/0.1.0',
license='MIT',
description='Python bindings for the Basic Modeling Interface',
long_description=open('README.md').read(),
keywords='BMI model coupling',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
packages=find_packages(),
)
|
#! /usr/bin/env python
from setuptools import setup, find_packages
from basic_modeling_interface import __version__
setup(name='basic-modeling-interface',
version=__version__,
author='Eric Hutton',
author_email='eric.hutton@colorado.edu',
url='https://github.com/bmi-forum/bmi-python',
license='MIT',
description='Python bindings for the Basic Modeling Interface',
long_description=open('README.md').read(),
keywords='BMI model coupling',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
packages=find_packages(),
)
|
Update classifiers to show Python 3 support
|
Update classifiers to show Python 3 support
|
Python
|
mit
|
bmi-forum/bmi-python,bmi-forum/bmi-python
|
#! /usr/bin/env python
from setuptools import setup, find_packages
from basic_modeling_interface import __version__
setup(name='basic-modeling-interface',
version=__version__,
author='Eric Hutton',
author_email='eric.hutton@colorado.edu',
url='https://github.com/bmi-forum/bmi-python',
# download_url='https://github.com/bmi-forum/bmi-python/tarball/0.1.0',
license='MIT',
description='Python bindings for the Basic Modeling Interface',
long_description=open('README.md').read(),
keywords='BMI model coupling',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
packages=find_packages(),
)
Update classifiers to show Python 3 support
|
#! /usr/bin/env python
from setuptools import setup, find_packages
from basic_modeling_interface import __version__
setup(name='basic-modeling-interface',
version=__version__,
author='Eric Hutton',
author_email='eric.hutton@colorado.edu',
url='https://github.com/bmi-forum/bmi-python',
license='MIT',
description='Python bindings for the Basic Modeling Interface',
long_description=open('README.md').read(),
keywords='BMI model coupling',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
packages=find_packages(),
)
|
<commit_before>#! /usr/bin/env python
from setuptools import setup, find_packages
from basic_modeling_interface import __version__
setup(name='basic-modeling-interface',
version=__version__,
author='Eric Hutton',
author_email='eric.hutton@colorado.edu',
url='https://github.com/bmi-forum/bmi-python',
# download_url='https://github.com/bmi-forum/bmi-python/tarball/0.1.0',
license='MIT',
description='Python bindings for the Basic Modeling Interface',
long_description=open('README.md').read(),
keywords='BMI model coupling',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
packages=find_packages(),
)
<commit_msg>Update classifiers to show Python 3 support<commit_after>
|
#! /usr/bin/env python
from setuptools import setup, find_packages
from basic_modeling_interface import __version__
setup(name='basic-modeling-interface',
version=__version__,
author='Eric Hutton',
author_email='eric.hutton@colorado.edu',
url='https://github.com/bmi-forum/bmi-python',
license='MIT',
description='Python bindings for the Basic Modeling Interface',
long_description=open('README.md').read(),
keywords='BMI model coupling',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
packages=find_packages(),
)
|
#! /usr/bin/env python
from setuptools import setup, find_packages
from basic_modeling_interface import __version__
setup(name='basic-modeling-interface',
version=__version__,
author='Eric Hutton',
author_email='eric.hutton@colorado.edu',
url='https://github.com/bmi-forum/bmi-python',
# download_url='https://github.com/bmi-forum/bmi-python/tarball/0.1.0',
license='MIT',
description='Python bindings for the Basic Modeling Interface',
long_description=open('README.md').read(),
keywords='BMI model coupling',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
packages=find_packages(),
)
Update classifiers to show Python 3 support#! /usr/bin/env python
from setuptools import setup, find_packages
from basic_modeling_interface import __version__
setup(name='basic-modeling-interface',
version=__version__,
author='Eric Hutton',
author_email='eric.hutton@colorado.edu',
url='https://github.com/bmi-forum/bmi-python',
license='MIT',
description='Python bindings for the Basic Modeling Interface',
long_description=open('README.md').read(),
keywords='BMI model coupling',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
packages=find_packages(),
)
|
<commit_before>#! /usr/bin/env python
from setuptools import setup, find_packages
from basic_modeling_interface import __version__
setup(name='basic-modeling-interface',
version=__version__,
author='Eric Hutton',
author_email='eric.hutton@colorado.edu',
url='https://github.com/bmi-forum/bmi-python',
# download_url='https://github.com/bmi-forum/bmi-python/tarball/0.1.0',
license='MIT',
description='Python bindings for the Basic Modeling Interface',
long_description=open('README.md').read(),
keywords='BMI model coupling',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
packages=find_packages(),
)
<commit_msg>Update classifiers to show Python 3 support<commit_after>#! /usr/bin/env python
from setuptools import setup, find_packages
from basic_modeling_interface import __version__
setup(name='basic-modeling-interface',
version=__version__,
author='Eric Hutton',
author_email='eric.hutton@colorado.edu',
url='https://github.com/bmi-forum/bmi-python',
license='MIT',
description='Python bindings for the Basic Modeling Interface',
long_description=open('README.md').read(),
keywords='BMI model coupling',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
packages=find_packages(),
)
|
f521ec4baef6d5e78def56aa42e273f00601a3db
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
setup(
name='django-oscar-stores',
version=":versiontools:stores:",
url='https://github.com/tangentlabs/django-oscar-stores',
author="Sebastian Vetter",
author_email="sebastian.vetter@tangentsnowball.com.au",
description="An extension for Oscar to include store locations",
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
keywords="django, oscar, e-commerce",
license='BSD',
platforms=['linux'],
packages=find_packages(exclude=["sandbox*", "tests*"]),
include_package_data=True,
install_requires=[
'Django>=1.4.2',
'versiontools>=1.1.9',
'django-oscar>=0.4',
'django-extra-views>=0.5.2',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python'
]
)
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
setup(
name='django-oscar-stores',
version=":versiontools:stores:",
url='https://github.com/tangentlabs/django-oscar-stores',
author="Sebastian Vetter",
author_email="sebastian.vetter@tangentsnowball.com.au",
description="An extension for Oscar to include store locations",
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
keywords="django, oscar, e-commerce",
license='BSD',
platforms=['linux'],
packages=find_packages(exclude=["sandbox*", "tests*"]),
include_package_data=True,
install_requires=[
'Django>=1.4.2',
'versiontools>=1.1.9',
'django-oscar>=0.4',
'django-extra-views>=0.2.2',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python'
]
)
|
Switch extra-views to lower version for now
|
Switch extra-views to lower version for now
|
Python
|
bsd-3-clause
|
django-oscar/django-oscar-stores,django-oscar/django-oscar-stores,django-oscar/django-oscar-stores
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
setup(
name='django-oscar-stores',
version=":versiontools:stores:",
url='https://github.com/tangentlabs/django-oscar-stores',
author="Sebastian Vetter",
author_email="sebastian.vetter@tangentsnowball.com.au",
description="An extension for Oscar to include store locations",
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
keywords="django, oscar, e-commerce",
license='BSD',
platforms=['linux'],
packages=find_packages(exclude=["sandbox*", "tests*"]),
include_package_data=True,
install_requires=[
'Django>=1.4.2',
'versiontools>=1.1.9',
'django-oscar>=0.4',
'django-extra-views>=0.5.2',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python'
]
)
Switch extra-views to lower version for now
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
setup(
name='django-oscar-stores',
version=":versiontools:stores:",
url='https://github.com/tangentlabs/django-oscar-stores',
author="Sebastian Vetter",
author_email="sebastian.vetter@tangentsnowball.com.au",
description="An extension for Oscar to include store locations",
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
keywords="django, oscar, e-commerce",
license='BSD',
platforms=['linux'],
packages=find_packages(exclude=["sandbox*", "tests*"]),
include_package_data=True,
install_requires=[
'Django>=1.4.2',
'versiontools>=1.1.9',
'django-oscar>=0.4',
'django-extra-views>=0.2.2',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python'
]
)
|
<commit_before>#!/usr/bin/env python
import os
from setuptools import setup, find_packages
setup(
name='django-oscar-stores',
version=":versiontools:stores:",
url='https://github.com/tangentlabs/django-oscar-stores',
author="Sebastian Vetter",
author_email="sebastian.vetter@tangentsnowball.com.au",
description="An extension for Oscar to include store locations",
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
keywords="django, oscar, e-commerce",
license='BSD',
platforms=['linux'],
packages=find_packages(exclude=["sandbox*", "tests*"]),
include_package_data=True,
install_requires=[
'Django>=1.4.2',
'versiontools>=1.1.9',
'django-oscar>=0.4',
'django-extra-views>=0.5.2',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python'
]
)
<commit_msg>Switch extra-views to lower version for now<commit_after>
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
setup(
name='django-oscar-stores',
version=":versiontools:stores:",
url='https://github.com/tangentlabs/django-oscar-stores',
author="Sebastian Vetter",
author_email="sebastian.vetter@tangentsnowball.com.au",
description="An extension for Oscar to include store locations",
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
keywords="django, oscar, e-commerce",
license='BSD',
platforms=['linux'],
packages=find_packages(exclude=["sandbox*", "tests*"]),
include_package_data=True,
install_requires=[
'Django>=1.4.2',
'versiontools>=1.1.9',
'django-oscar>=0.4',
'django-extra-views>=0.2.2',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python'
]
)
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
setup(
name='django-oscar-stores',
version=":versiontools:stores:",
url='https://github.com/tangentlabs/django-oscar-stores',
author="Sebastian Vetter",
author_email="sebastian.vetter@tangentsnowball.com.au",
description="An extension for Oscar to include store locations",
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
keywords="django, oscar, e-commerce",
license='BSD',
platforms=['linux'],
packages=find_packages(exclude=["sandbox*", "tests*"]),
include_package_data=True,
install_requires=[
'Django>=1.4.2',
'versiontools>=1.1.9',
'django-oscar>=0.4',
'django-extra-views>=0.5.2',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python'
]
)
Switch extra-views to lower version for now#!/usr/bin/env python
import os
from setuptools import setup, find_packages
setup(
name='django-oscar-stores',
version=":versiontools:stores:",
url='https://github.com/tangentlabs/django-oscar-stores',
author="Sebastian Vetter",
author_email="sebastian.vetter@tangentsnowball.com.au",
description="An extension for Oscar to include store locations",
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
keywords="django, oscar, e-commerce",
license='BSD',
platforms=['linux'],
packages=find_packages(exclude=["sandbox*", "tests*"]),
include_package_data=True,
install_requires=[
'Django>=1.4.2',
'versiontools>=1.1.9',
'django-oscar>=0.4',
'django-extra-views>=0.2.2',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python'
]
)
|
<commit_before>#!/usr/bin/env python
import os
from setuptools import setup, find_packages
setup(
name='django-oscar-stores',
version=":versiontools:stores:",
url='https://github.com/tangentlabs/django-oscar-stores',
author="Sebastian Vetter",
author_email="sebastian.vetter@tangentsnowball.com.au",
description="An extension for Oscar to include store locations",
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
keywords="django, oscar, e-commerce",
license='BSD',
platforms=['linux'],
packages=find_packages(exclude=["sandbox*", "tests*"]),
include_package_data=True,
install_requires=[
'Django>=1.4.2',
'versiontools>=1.1.9',
'django-oscar>=0.4',
'django-extra-views>=0.5.2',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python'
]
)
<commit_msg>Switch extra-views to lower version for now<commit_after>#!/usr/bin/env python
import os
from setuptools import setup, find_packages
setup(
name='django-oscar-stores',
version=":versiontools:stores:",
url='https://github.com/tangentlabs/django-oscar-stores',
author="Sebastian Vetter",
author_email="sebastian.vetter@tangentsnowball.com.au",
description="An extension for Oscar to include store locations",
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
keywords="django, oscar, e-commerce",
license='BSD',
platforms=['linux'],
packages=find_packages(exclude=["sandbox*", "tests*"]),
include_package_data=True,
install_requires=[
'Django>=1.4.2',
'versiontools>=1.1.9',
'django-oscar>=0.4',
'django-extra-views>=0.2.2',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python'
]
)
|
aabb37522f93afd88fa30dbababbfaf6f357c485
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='sorl-thumbnail',
use_scm_version=True,
description='Thumbnails for Django',
long_description=open('README.rst').read(),
author="Mikko Hellsing",
author_email='mikko@aino.se',
maintainer="Jazzband",
maintainer_email="roadies@jazzband.co",
license="BSD",
url='https://github.com/jazzband/sorl-thumbnail',
packages=find_packages(exclude=['tests', 'tests.*']),
platforms='any',
python_requires='>=3.6',
zip_safe=False,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Multimedia :: Graphics',
'Framework :: Django',
'Framework :: Django :: 2.2',
'Framework :: Django :: 3.1',
'Framework :: Django :: 3.2',
'Framework :: Django :: 4.0',
'Framework :: Django :: 4.1',
],
setup_requires=['setuptools_scm'],
)
|
from setuptools import setup, find_packages
setup(
name='sorl-thumbnail',
use_scm_version=True,
description='Thumbnails for Django',
long_description=open('README.rst').read(),
author="Mikko Hellsing",
author_email='mikko@aino.se',
maintainer="Jazzband",
maintainer_email="roadies@jazzband.co",
license="BSD",
url='https://github.com/jazzband/sorl-thumbnail',
packages=find_packages(exclude=['tests', 'tests.*']),
platforms='any',
python_requires='>=3.6',
zip_safe=False,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Multimedia :: Graphics',
'Framework :: Django',
'Framework :: Django :: 2.2',
'Framework :: Django :: 3.1',
'Framework :: Django :: 3.2',
'Framework :: Django :: 4.0',
'Framework :: Django :: 4.1',
],
setup_requires=['setuptools_scm'],
)
|
Set development status to stable
|
Set development status to stable
Thanks Uri Rodberg for the suggestion.
|
Python
|
bsd-3-clause
|
jazzband/sorl-thumbnail,jazzband/sorl-thumbnail,jazzband/sorl-thumbnail
|
from setuptools import setup, find_packages
setup(
name='sorl-thumbnail',
use_scm_version=True,
description='Thumbnails for Django',
long_description=open('README.rst').read(),
author="Mikko Hellsing",
author_email='mikko@aino.se',
maintainer="Jazzband",
maintainer_email="roadies@jazzband.co",
license="BSD",
url='https://github.com/jazzband/sorl-thumbnail',
packages=find_packages(exclude=['tests', 'tests.*']),
platforms='any',
python_requires='>=3.6',
zip_safe=False,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Multimedia :: Graphics',
'Framework :: Django',
'Framework :: Django :: 2.2',
'Framework :: Django :: 3.1',
'Framework :: Django :: 3.2',
'Framework :: Django :: 4.0',
'Framework :: Django :: 4.1',
],
setup_requires=['setuptools_scm'],
)
Set development status to stable
Thanks Uri Rodberg for the suggestion.
|
from setuptools import setup, find_packages
setup(
name='sorl-thumbnail',
use_scm_version=True,
description='Thumbnails for Django',
long_description=open('README.rst').read(),
author="Mikko Hellsing",
author_email='mikko@aino.se',
maintainer="Jazzband",
maintainer_email="roadies@jazzband.co",
license="BSD",
url='https://github.com/jazzband/sorl-thumbnail',
packages=find_packages(exclude=['tests', 'tests.*']),
platforms='any',
python_requires='>=3.6',
zip_safe=False,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Multimedia :: Graphics',
'Framework :: Django',
'Framework :: Django :: 2.2',
'Framework :: Django :: 3.1',
'Framework :: Django :: 3.2',
'Framework :: Django :: 4.0',
'Framework :: Django :: 4.1',
],
setup_requires=['setuptools_scm'],
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='sorl-thumbnail',
use_scm_version=True,
description='Thumbnails for Django',
long_description=open('README.rst').read(),
author="Mikko Hellsing",
author_email='mikko@aino.se',
maintainer="Jazzband",
maintainer_email="roadies@jazzband.co",
license="BSD",
url='https://github.com/jazzband/sorl-thumbnail',
packages=find_packages(exclude=['tests', 'tests.*']),
platforms='any',
python_requires='>=3.6',
zip_safe=False,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Multimedia :: Graphics',
'Framework :: Django',
'Framework :: Django :: 2.2',
'Framework :: Django :: 3.1',
'Framework :: Django :: 3.2',
'Framework :: Django :: 4.0',
'Framework :: Django :: 4.1',
],
setup_requires=['setuptools_scm'],
)
<commit_msg>Set development status to stable
Thanks Uri Rodberg for the suggestion.<commit_after>
|
from setuptools import setup, find_packages
setup(
name='sorl-thumbnail',
use_scm_version=True,
description='Thumbnails for Django',
long_description=open('README.rst').read(),
author="Mikko Hellsing",
author_email='mikko@aino.se',
maintainer="Jazzband",
maintainer_email="roadies@jazzband.co",
license="BSD",
url='https://github.com/jazzband/sorl-thumbnail',
packages=find_packages(exclude=['tests', 'tests.*']),
platforms='any',
python_requires='>=3.6',
zip_safe=False,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Multimedia :: Graphics',
'Framework :: Django',
'Framework :: Django :: 2.2',
'Framework :: Django :: 3.1',
'Framework :: Django :: 3.2',
'Framework :: Django :: 4.0',
'Framework :: Django :: 4.1',
],
setup_requires=['setuptools_scm'],
)
|
from setuptools import setup, find_packages
setup(
name='sorl-thumbnail',
use_scm_version=True,
description='Thumbnails for Django',
long_description=open('README.rst').read(),
author="Mikko Hellsing",
author_email='mikko@aino.se',
maintainer="Jazzband",
maintainer_email="roadies@jazzband.co",
license="BSD",
url='https://github.com/jazzband/sorl-thumbnail',
packages=find_packages(exclude=['tests', 'tests.*']),
platforms='any',
python_requires='>=3.6',
zip_safe=False,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Multimedia :: Graphics',
'Framework :: Django',
'Framework :: Django :: 2.2',
'Framework :: Django :: 3.1',
'Framework :: Django :: 3.2',
'Framework :: Django :: 4.0',
'Framework :: Django :: 4.1',
],
setup_requires=['setuptools_scm'],
)
Set development status to stable
Thanks Uri Rodberg for the suggestion.from setuptools import setup, find_packages
setup(
name='sorl-thumbnail',
use_scm_version=True,
description='Thumbnails for Django',
long_description=open('README.rst').read(),
author="Mikko Hellsing",
author_email='mikko@aino.se',
maintainer="Jazzband",
maintainer_email="roadies@jazzband.co",
license="BSD",
url='https://github.com/jazzband/sorl-thumbnail',
packages=find_packages(exclude=['tests', 'tests.*']),
platforms='any',
python_requires='>=3.6',
zip_safe=False,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Multimedia :: Graphics',
'Framework :: Django',
'Framework :: Django :: 2.2',
'Framework :: Django :: 3.1',
'Framework :: Django :: 3.2',
'Framework :: Django :: 4.0',
'Framework :: Django :: 4.1',
],
setup_requires=['setuptools_scm'],
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='sorl-thumbnail',
use_scm_version=True,
description='Thumbnails for Django',
long_description=open('README.rst').read(),
author="Mikko Hellsing",
author_email='mikko@aino.se',
maintainer="Jazzband",
maintainer_email="roadies@jazzband.co",
license="BSD",
url='https://github.com/jazzband/sorl-thumbnail',
packages=find_packages(exclude=['tests', 'tests.*']),
platforms='any',
python_requires='>=3.6',
zip_safe=False,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Multimedia :: Graphics',
'Framework :: Django',
'Framework :: Django :: 2.2',
'Framework :: Django :: 3.1',
'Framework :: Django :: 3.2',
'Framework :: Django :: 4.0',
'Framework :: Django :: 4.1',
],
setup_requires=['setuptools_scm'],
)
<commit_msg>Set development status to stable
Thanks Uri Rodberg for the suggestion.<commit_after>from setuptools import setup, find_packages
setup(
name='sorl-thumbnail',
use_scm_version=True,
description='Thumbnails for Django',
long_description=open('README.rst').read(),
author="Mikko Hellsing",
author_email='mikko@aino.se',
maintainer="Jazzband",
maintainer_email="roadies@jazzband.co",
license="BSD",
url='https://github.com/jazzband/sorl-thumbnail',
packages=find_packages(exclude=['tests', 'tests.*']),
platforms='any',
python_requires='>=3.6',
zip_safe=False,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Multimedia :: Graphics',
'Framework :: Django',
'Framework :: Django :: 2.2',
'Framework :: Django :: 3.1',
'Framework :: Django :: 3.2',
'Framework :: Django :: 4.0',
'Framework :: Django :: 4.1',
],
setup_requires=['setuptools_scm'],
)
|
59234b3f99c7093649090036a0eda28a4314f4af
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name="cobertura-clover-transform",
version='1.1b1',
packages=find_packages(),
description="Tools for transforming Cobertura test "
"coverage XML into Clover-style XML",
author='Chris Wacek',
author_email='cwacek@gmail.com',
url='http://github.com/cwacek/cobertura-clover-transform',
license='MIT',
keywords='cobertura coverage test clover xml'
)
|
from setuptools import setup, find_packages
setup(
name="cobertura-clover-transform",
version='1.1b1',
packages=find_packages(),
include_package_data=True,
description="Tools for transforming Cobertura test "
"coverage XML into Clover-style XML",
author='Chris Wacek',
author_email='cwacek@gmail.com',
url='http://github.com/cwacek/cobertura-clover-transform',
license='MIT',
keywords='cobertura coverage test clover xml'
)
|
Include package data for wheels
|
Include package data for wheels
|
Python
|
mit
|
cwacek/cobertura-clover-transform
|
from setuptools import setup, find_packages
setup(
name="cobertura-clover-transform",
version='1.1b1',
packages=find_packages(),
description="Tools for transforming Cobertura test "
"coverage XML into Clover-style XML",
author='Chris Wacek',
author_email='cwacek@gmail.com',
url='http://github.com/cwacek/cobertura-clover-transform',
license='MIT',
keywords='cobertura coverage test clover xml'
)
Include package data for wheels
|
from setuptools import setup, find_packages
setup(
name="cobertura-clover-transform",
version='1.1b1',
packages=find_packages(),
include_package_data=True,
description="Tools for transforming Cobertura test "
"coverage XML into Clover-style XML",
author='Chris Wacek',
author_email='cwacek@gmail.com',
url='http://github.com/cwacek/cobertura-clover-transform',
license='MIT',
keywords='cobertura coverage test clover xml'
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name="cobertura-clover-transform",
version='1.1b1',
packages=find_packages(),
description="Tools for transforming Cobertura test "
"coverage XML into Clover-style XML",
author='Chris Wacek',
author_email='cwacek@gmail.com',
url='http://github.com/cwacek/cobertura-clover-transform',
license='MIT',
keywords='cobertura coverage test clover xml'
)
<commit_msg>Include package data for wheels<commit_after>
|
from setuptools import setup, find_packages
setup(
name="cobertura-clover-transform",
version='1.1b1',
packages=find_packages(),
include_package_data=True,
description="Tools for transforming Cobertura test "
"coverage XML into Clover-style XML",
author='Chris Wacek',
author_email='cwacek@gmail.com',
url='http://github.com/cwacek/cobertura-clover-transform',
license='MIT',
keywords='cobertura coverage test clover xml'
)
|
from setuptools import setup, find_packages
setup(
name="cobertura-clover-transform",
version='1.1b1',
packages=find_packages(),
description="Tools for transforming Cobertura test "
"coverage XML into Clover-style XML",
author='Chris Wacek',
author_email='cwacek@gmail.com',
url='http://github.com/cwacek/cobertura-clover-transform',
license='MIT',
keywords='cobertura coverage test clover xml'
)
Include package data for wheelsfrom setuptools import setup, find_packages
setup(
name="cobertura-clover-transform",
version='1.1b1',
packages=find_packages(),
include_package_data=True,
description="Tools for transforming Cobertura test "
"coverage XML into Clover-style XML",
author='Chris Wacek',
author_email='cwacek@gmail.com',
url='http://github.com/cwacek/cobertura-clover-transform',
license='MIT',
keywords='cobertura coverage test clover xml'
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name="cobertura-clover-transform",
version='1.1b1',
packages=find_packages(),
description="Tools for transforming Cobertura test "
"coverage XML into Clover-style XML",
author='Chris Wacek',
author_email='cwacek@gmail.com',
url='http://github.com/cwacek/cobertura-clover-transform',
license='MIT',
keywords='cobertura coverage test clover xml'
)
<commit_msg>Include package data for wheels<commit_after>from setuptools import setup, find_packages
setup(
name="cobertura-clover-transform",
version='1.1b1',
packages=find_packages(),
include_package_data=True,
description="Tools for transforming Cobertura test "
"coverage XML into Clover-style XML",
author='Chris Wacek',
author_email='cwacek@gmail.com',
url='http://github.com/cwacek/cobertura-clover-transform',
license='MIT',
keywords='cobertura coverage test clover xml'
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.