commit stringlengths 40 40 | old_file stringlengths 4 118 | new_file stringlengths 4 118 | old_contents stringlengths 0 2.94k | new_contents stringlengths 1 4.43k | subject stringlengths 15 444 | message stringlengths 16 3.45k | lang stringclasses 1 value | license stringclasses 13 values | repos stringlengths 5 43.2k | prompt stringlengths 17 4.58k | response stringlengths 1 4.43k | prompt_tagged stringlengths 58 4.62k | response_tagged stringlengths 1 4.43k | text stringlengths 132 7.29k | text_tagged stringlengths 173 7.33k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
392ee6bec1041730b9859e70b9abe9b28a012d45 | libs/__init__.py | libs/__init__.py | # Copyright 2015-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
.. automodule:: libs.utils
:members:
:show-inheritance:
"""
import wlgen
import utils
| # Copyright 2015-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
.. automodule:: libs.utils
:members:
:show-inheritance:
.. automodule:: wlgen
:members:
:show-inheritance:
"""
import wlgen
import utils
| Add automodule sphinx directive for wlgen package | libs: Add automodule sphinx directive for wlgen package
| Python | apache-2.0 | credp/lisa,credp/lisa,ARM-software/lisa,credp/lisa,credp/lisa,ARM-software/lisa,ARM-software/lisa,ARM-software/lisa | # Copyright 2015-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
.. automodule:: libs.utils
:members:
:show-inheritance:
"""
import wlgen
import utils
libs: Add automodule sphinx directive for wlgen package | # Copyright 2015-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
.. automodule:: libs.utils
:members:
:show-inheritance:
.. automodule:: wlgen
:members:
:show-inheritance:
"""
import wlgen
import utils
| <commit_before># Copyright 2015-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
.. automodule:: libs.utils
:members:
:show-inheritance:
"""
import wlgen
import utils
<commit_msg>libs: Add automodule sphinx directive for wlgen package<commit_after> | # Copyright 2015-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
.. automodule:: libs.utils
:members:
:show-inheritance:
.. automodule:: wlgen
:members:
:show-inheritance:
"""
import wlgen
import utils
| # Copyright 2015-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
.. automodule:: libs.utils
:members:
:show-inheritance:
"""
import wlgen
import utils
libs: Add automodule sphinx directive for wlgen package# Copyright 2015-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
.. automodule:: libs.utils
:members:
:show-inheritance:
.. automodule:: wlgen
:members:
:show-inheritance:
"""
import wlgen
import utils
| <commit_before># Copyright 2015-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
.. automodule:: libs.utils
:members:
:show-inheritance:
"""
import wlgen
import utils
<commit_msg>libs: Add automodule sphinx directive for wlgen package<commit_after># Copyright 2015-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
.. automodule:: libs.utils
:members:
:show-inheritance:
.. automodule:: wlgen
:members:
:show-inheritance:
"""
import wlgen
import utils
|
36c97aea9d3ea143f6a494c5f436ad7c0392cd6a | jsonsempai.py | jsonsempai.py | import sys
class SempaiLoader(object):
def __init__(self, *args):
print args
def find_module(self, fullname, path=None):
print 'finding', fullname, path
if fullname == 'simple':
return self
return None
sys.path_hooks.append(SempaiLoader)
sys.path.insert(0, 'simple')
| import os
import sys
class SempaiLoader(object):
def find_module(self, name, path=None):
for d in sys.path:
json_path = os.path.join(d, '{}.json'.format(name))
if os.path.isfile(json_path):
print json_path
return self
return None
def load_module(self, name):
raise ImportError('Hey, yo fool')
sys.meta_path.append(SempaiLoader())
| Add the import hook to sys.meta_path | Add the import hook to sys.meta_path
| Python | mit | kragniz/json-sempai | import sys
class SempaiLoader(object):
def __init__(self, *args):
print args
def find_module(self, fullname, path=None):
print 'finding', fullname, path
if fullname == 'simple':
return self
return None
sys.path_hooks.append(SempaiLoader)
sys.path.insert(0, 'simple')
Add the import hook to sys.meta_path | import os
import sys
class SempaiLoader(object):
def find_module(self, name, path=None):
for d in sys.path:
json_path = os.path.join(d, '{}.json'.format(name))
if os.path.isfile(json_path):
print json_path
return self
return None
def load_module(self, name):
raise ImportError('Hey, yo fool')
sys.meta_path.append(SempaiLoader())
| <commit_before>import sys
class SempaiLoader(object):
def __init__(self, *args):
print args
def find_module(self, fullname, path=None):
print 'finding', fullname, path
if fullname == 'simple':
return self
return None
sys.path_hooks.append(SempaiLoader)
sys.path.insert(0, 'simple')
<commit_msg>Add the import hook to sys.meta_path<commit_after> | import os
import sys
class SempaiLoader(object):
def find_module(self, name, path=None):
for d in sys.path:
json_path = os.path.join(d, '{}.json'.format(name))
if os.path.isfile(json_path):
print json_path
return self
return None
def load_module(self, name):
raise ImportError('Hey, yo fool')
sys.meta_path.append(SempaiLoader())
| import sys
class SempaiLoader(object):
def __init__(self, *args):
print args
def find_module(self, fullname, path=None):
print 'finding', fullname, path
if fullname == 'simple':
return self
return None
sys.path_hooks.append(SempaiLoader)
sys.path.insert(0, 'simple')
Add the import hook to sys.meta_pathimport os
import sys
class SempaiLoader(object):
def find_module(self, name, path=None):
for d in sys.path:
json_path = os.path.join(d, '{}.json'.format(name))
if os.path.isfile(json_path):
print json_path
return self
return None
def load_module(self, name):
raise ImportError('Hey, yo fool')
sys.meta_path.append(SempaiLoader())
| <commit_before>import sys
class SempaiLoader(object):
def __init__(self, *args):
print args
def find_module(self, fullname, path=None):
print 'finding', fullname, path
if fullname == 'simple':
return self
return None
sys.path_hooks.append(SempaiLoader)
sys.path.insert(0, 'simple')
<commit_msg>Add the import hook to sys.meta_path<commit_after>import os
import sys
class SempaiLoader(object):
def find_module(self, name, path=None):
for d in sys.path:
json_path = os.path.join(d, '{}.json'.format(name))
if os.path.isfile(json_path):
print json_path
return self
return None
def load_module(self, name):
raise ImportError('Hey, yo fool')
sys.meta_path.append(SempaiLoader())
|
2c2fef3b8cf52219ee6bb2196fe5d3c9f9ae4443 | tacyt_sdk/api_requests/tag_request.py | tacyt_sdk/api_requests/tag_request.py | """
This library offers an API to use Tacyt in a python environment.
Copyright (C) 2015-2020 Eleven Paths
"""
try:
import simplejson as json
except ImportError:
import json
class TagRequest(object):
LIST_REQUEST = "LIST"
CREATE_REQUEST = "CREATE"
REMOVE_REQUEST = "REMOVE"
REMOVE_ALL_REQUEST = "REMOVE_ALL"
def __init__(self, request_type=None, tag=None, apps=None):
self.request_type = request_type
self.tag = tag.strip().lower()
self.apps = apps if apps else []
def as_dict(self):
json_obj = dict()
if self.request_type:
json_obj["requestType"] = self.request_type
if self.tag:
json_obj["tag"] = self.tag
if self.apps:
json_obj["apps"] = self.apps
return json_obj
def get_json_encode_string(self):
return json.dumps(self.as_dict())
| """
This library offers an API to use Tacyt in a python environment.
Copyright (C) 2015-2020 Eleven Paths
"""
try:
import simplejson as json
except ImportError:
import json
class TagRequest(object):
LIST_REQUEST = "LIST"
CREATE_REQUEST = "CREATE"
REMOVE_REQUEST = "REMOVE"
REMOVE_ALL_REQUEST = "REMOVE_ALL"
def __init__(self, request_type=None, tag=None, apps=None):
self.request_type = request_type
self.tag = tag.strip().lower() if tag else None
self.apps = apps if apps else []
def as_dict(self):
json_obj = dict()
if self.request_type:
json_obj["requestType"] = self.request_type
if self.tag:
json_obj["tag"] = self.tag
if self.apps:
json_obj["apps"] = self.apps
return json_obj
def get_json_encode_string(self):
return json.dumps(self.as_dict())
| Fix normalization of tags in case is None | Fix normalization of tags in case is None
| Python | lgpl-2.1 | ElevenPaths/tacyt-sdk-python | """
This library offers an API to use Tacyt in a python environment.
Copyright (C) 2015-2020 Eleven Paths
"""
try:
import simplejson as json
except ImportError:
import json
class TagRequest(object):
LIST_REQUEST = "LIST"
CREATE_REQUEST = "CREATE"
REMOVE_REQUEST = "REMOVE"
REMOVE_ALL_REQUEST = "REMOVE_ALL"
def __init__(self, request_type=None, tag=None, apps=None):
self.request_type = request_type
self.tag = tag.strip().lower()
self.apps = apps if apps else []
def as_dict(self):
json_obj = dict()
if self.request_type:
json_obj["requestType"] = self.request_type
if self.tag:
json_obj["tag"] = self.tag
if self.apps:
json_obj["apps"] = self.apps
return json_obj
def get_json_encode_string(self):
return json.dumps(self.as_dict())
Fix normalization of tags in case is None | """
This library offers an API to use Tacyt in a python environment.
Copyright (C) 2015-2020 Eleven Paths
"""
try:
import simplejson as json
except ImportError:
import json
class TagRequest(object):
LIST_REQUEST = "LIST"
CREATE_REQUEST = "CREATE"
REMOVE_REQUEST = "REMOVE"
REMOVE_ALL_REQUEST = "REMOVE_ALL"
def __init__(self, request_type=None, tag=None, apps=None):
self.request_type = request_type
self.tag = tag.strip().lower() if tag else None
self.apps = apps if apps else []
def as_dict(self):
json_obj = dict()
if self.request_type:
json_obj["requestType"] = self.request_type
if self.tag:
json_obj["tag"] = self.tag
if self.apps:
json_obj["apps"] = self.apps
return json_obj
def get_json_encode_string(self):
return json.dumps(self.as_dict())
| <commit_before>"""
This library offers an API to use Tacyt in a python environment.
Copyright (C) 2015-2020 Eleven Paths
"""
try:
import simplejson as json
except ImportError:
import json
class TagRequest(object):
LIST_REQUEST = "LIST"
CREATE_REQUEST = "CREATE"
REMOVE_REQUEST = "REMOVE"
REMOVE_ALL_REQUEST = "REMOVE_ALL"
def __init__(self, request_type=None, tag=None, apps=None):
self.request_type = request_type
self.tag = tag.strip().lower()
self.apps = apps if apps else []
def as_dict(self):
json_obj = dict()
if self.request_type:
json_obj["requestType"] = self.request_type
if self.tag:
json_obj["tag"] = self.tag
if self.apps:
json_obj["apps"] = self.apps
return json_obj
def get_json_encode_string(self):
return json.dumps(self.as_dict())
<commit_msg>Fix normalization of tags in case is None<commit_after> | """
This library offers an API to use Tacyt in a python environment.
Copyright (C) 2015-2020 Eleven Paths
"""
try:
import simplejson as json
except ImportError:
import json
class TagRequest(object):
LIST_REQUEST = "LIST"
CREATE_REQUEST = "CREATE"
REMOVE_REQUEST = "REMOVE"
REMOVE_ALL_REQUEST = "REMOVE_ALL"
def __init__(self, request_type=None, tag=None, apps=None):
self.request_type = request_type
self.tag = tag.strip().lower() if tag else None
self.apps = apps if apps else []
def as_dict(self):
json_obj = dict()
if self.request_type:
json_obj["requestType"] = self.request_type
if self.tag:
json_obj["tag"] = self.tag
if self.apps:
json_obj["apps"] = self.apps
return json_obj
def get_json_encode_string(self):
return json.dumps(self.as_dict())
| """
This library offers an API to use Tacyt in a python environment.
Copyright (C) 2015-2020 Eleven Paths
"""
try:
import simplejson as json
except ImportError:
import json
class TagRequest(object):
LIST_REQUEST = "LIST"
CREATE_REQUEST = "CREATE"
REMOVE_REQUEST = "REMOVE"
REMOVE_ALL_REQUEST = "REMOVE_ALL"
def __init__(self, request_type=None, tag=None, apps=None):
self.request_type = request_type
self.tag = tag.strip().lower()
self.apps = apps if apps else []
def as_dict(self):
json_obj = dict()
if self.request_type:
json_obj["requestType"] = self.request_type
if self.tag:
json_obj["tag"] = self.tag
if self.apps:
json_obj["apps"] = self.apps
return json_obj
def get_json_encode_string(self):
return json.dumps(self.as_dict())
Fix normalization of tags in case is None"""
This library offers an API to use Tacyt in a python environment.
Copyright (C) 2015-2020 Eleven Paths
"""
try:
import simplejson as json
except ImportError:
import json
class TagRequest(object):
LIST_REQUEST = "LIST"
CREATE_REQUEST = "CREATE"
REMOVE_REQUEST = "REMOVE"
REMOVE_ALL_REQUEST = "REMOVE_ALL"
def __init__(self, request_type=None, tag=None, apps=None):
self.request_type = request_type
self.tag = tag.strip().lower() if tag else None
self.apps = apps if apps else []
def as_dict(self):
json_obj = dict()
if self.request_type:
json_obj["requestType"] = self.request_type
if self.tag:
json_obj["tag"] = self.tag
if self.apps:
json_obj["apps"] = self.apps
return json_obj
def get_json_encode_string(self):
return json.dumps(self.as_dict())
| <commit_before>"""
This library offers an API to use Tacyt in a python environment.
Copyright (C) 2015-2020 Eleven Paths
"""
try:
import simplejson as json
except ImportError:
import json
class TagRequest(object):
LIST_REQUEST = "LIST"
CREATE_REQUEST = "CREATE"
REMOVE_REQUEST = "REMOVE"
REMOVE_ALL_REQUEST = "REMOVE_ALL"
def __init__(self, request_type=None, tag=None, apps=None):
self.request_type = request_type
self.tag = tag.strip().lower()
self.apps = apps if apps else []
def as_dict(self):
json_obj = dict()
if self.request_type:
json_obj["requestType"] = self.request_type
if self.tag:
json_obj["tag"] = self.tag
if self.apps:
json_obj["apps"] = self.apps
return json_obj
def get_json_encode_string(self):
return json.dumps(self.as_dict())
<commit_msg>Fix normalization of tags in case is None<commit_after>"""
This library offers an API to use Tacyt in a python environment.
Copyright (C) 2015-2020 Eleven Paths
"""
try:
import simplejson as json
except ImportError:
import json
class TagRequest(object):
LIST_REQUEST = "LIST"
CREATE_REQUEST = "CREATE"
REMOVE_REQUEST = "REMOVE"
REMOVE_ALL_REQUEST = "REMOVE_ALL"
def __init__(self, request_type=None, tag=None, apps=None):
self.request_type = request_type
self.tag = tag.strip().lower() if tag else None
self.apps = apps if apps else []
def as_dict(self):
json_obj = dict()
if self.request_type:
json_obj["requestType"] = self.request_type
if self.tag:
json_obj["tag"] = self.tag
if self.apps:
json_obj["apps"] = self.apps
return json_obj
def get_json_encode_string(self):
return json.dumps(self.as_dict())
|
522279e967a8864e4404c8d05536b3d418da521f | cellcounter/urls.py | cellcounter/urls.py | from django.conf.urls import patterns, include, url
from django.views.generic.simple import direct_to_template
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from django.contrib.auth.views import login, logout
from cellcounter.main.views import new_count, view_count, edit_count, ListMyCountsView
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', direct_to_template, {'template': 'main/index.html'},
name="index"),
url(r'^count/$', new_count, name="count_home"),
url(r'^count/new/$', new_count, name="new_count"),
url(r'^count/(?P<count_id>\d+)/$', view_count, name="view_count"),
url(r'^count/(?P<count_id>\d+)/edit/$', edit_count, name="edit_count"),
url(r'^user/$', ListMyCountsView.as_view()),
url(r'^login/$', login, {'template_name': 'main/login.html'}, name='login'),
url(r'^logout/$', logout, {'next_page': '/'}, name='logout'),
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += staticfiles_urlpatterns()
| from django.conf.urls import patterns, include, url
from django.views.generic.simple import direct_to_template
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from django.contrib.auth.views import login, logout
from cellcounter.main.views import new_count, view_count, edit_count, ListMyCountsView
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', direct_to_template, {'template': 'main/index.html'},
name="index"),
url(r'^count/$', new_count, name="count_home"),
url(r'^count/new/$', new_count, name="new_count"),
url(r'^count/(?P<count_id>\d+)/$', view_count, name="view_count"),
url(r'^count/(?P<count_id>\d+)/edit/$', edit_count, name="edit_count"),
url(r'^user/$', ListMyCountsView.as_view(), name="my_counts"),
url(r'^login/$', login, {'template_name': 'main/login.html'}, name='login'),
url(r'^logout/$', logout, {'next_page': '/'}, name='logout'),
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += staticfiles_urlpatterns()
| Use 'my_counts' as name for MyCountsListView | Use 'my_counts' as name for MyCountsListView
| Python | mit | cellcounter/cellcounter,cellcounter/cellcounter,cellcounter/cellcounter,haematologic/cellcountr,haematologic/cellcounter,haematologic/cellcounter,cellcounter/cellcounter,oghm2/hackdayoxford,haematologic/cellcounter,oghm2/hackdayoxford,haematologic/cellcountr | from django.conf.urls import patterns, include, url
from django.views.generic.simple import direct_to_template
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from django.contrib.auth.views import login, logout
from cellcounter.main.views import new_count, view_count, edit_count, ListMyCountsView
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', direct_to_template, {'template': 'main/index.html'},
name="index"),
url(r'^count/$', new_count, name="count_home"),
url(r'^count/new/$', new_count, name="new_count"),
url(r'^count/(?P<count_id>\d+)/$', view_count, name="view_count"),
url(r'^count/(?P<count_id>\d+)/edit/$', edit_count, name="edit_count"),
url(r'^user/$', ListMyCountsView.as_view()),
url(r'^login/$', login, {'template_name': 'main/login.html'}, name='login'),
url(r'^logout/$', logout, {'next_page': '/'}, name='logout'),
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += staticfiles_urlpatterns()
Use 'my_counts' as name for MyCountsListView | from django.conf.urls import patterns, include, url
from django.views.generic.simple import direct_to_template
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from django.contrib.auth.views import login, logout
from cellcounter.main.views import new_count, view_count, edit_count, ListMyCountsView
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', direct_to_template, {'template': 'main/index.html'},
name="index"),
url(r'^count/$', new_count, name="count_home"),
url(r'^count/new/$', new_count, name="new_count"),
url(r'^count/(?P<count_id>\d+)/$', view_count, name="view_count"),
url(r'^count/(?P<count_id>\d+)/edit/$', edit_count, name="edit_count"),
url(r'^user/$', ListMyCountsView.as_view(), name="my_counts"),
url(r'^login/$', login, {'template_name': 'main/login.html'}, name='login'),
url(r'^logout/$', logout, {'next_page': '/'}, name='logout'),
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += staticfiles_urlpatterns()
| <commit_before>from django.conf.urls import patterns, include, url
from django.views.generic.simple import direct_to_template
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from django.contrib.auth.views import login, logout
from cellcounter.main.views import new_count, view_count, edit_count, ListMyCountsView
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', direct_to_template, {'template': 'main/index.html'},
name="index"),
url(r'^count/$', new_count, name="count_home"),
url(r'^count/new/$', new_count, name="new_count"),
url(r'^count/(?P<count_id>\d+)/$', view_count, name="view_count"),
url(r'^count/(?P<count_id>\d+)/edit/$', edit_count, name="edit_count"),
url(r'^user/$', ListMyCountsView.as_view()),
url(r'^login/$', login, {'template_name': 'main/login.html'}, name='login'),
url(r'^logout/$', logout, {'next_page': '/'}, name='logout'),
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += staticfiles_urlpatterns()
<commit_msg>Use 'my_counts' as name for MyCountsListView<commit_after> | from django.conf.urls import patterns, include, url
from django.views.generic.simple import direct_to_template
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from django.contrib.auth.views import login, logout
from cellcounter.main.views import new_count, view_count, edit_count, ListMyCountsView
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', direct_to_template, {'template': 'main/index.html'},
name="index"),
url(r'^count/$', new_count, name="count_home"),
url(r'^count/new/$', new_count, name="new_count"),
url(r'^count/(?P<count_id>\d+)/$', view_count, name="view_count"),
url(r'^count/(?P<count_id>\d+)/edit/$', edit_count, name="edit_count"),
url(r'^user/$', ListMyCountsView.as_view(), name="my_counts"),
url(r'^login/$', login, {'template_name': 'main/login.html'}, name='login'),
url(r'^logout/$', logout, {'next_page': '/'}, name='logout'),
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += staticfiles_urlpatterns()
| from django.conf.urls import patterns, include, url
from django.views.generic.simple import direct_to_template
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from django.contrib.auth.views import login, logout
from cellcounter.main.views import new_count, view_count, edit_count, ListMyCountsView
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', direct_to_template, {'template': 'main/index.html'},
name="index"),
url(r'^count/$', new_count, name="count_home"),
url(r'^count/new/$', new_count, name="new_count"),
url(r'^count/(?P<count_id>\d+)/$', view_count, name="view_count"),
url(r'^count/(?P<count_id>\d+)/edit/$', edit_count, name="edit_count"),
url(r'^user/$', ListMyCountsView.as_view()),
url(r'^login/$', login, {'template_name': 'main/login.html'}, name='login'),
url(r'^logout/$', logout, {'next_page': '/'}, name='logout'),
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += staticfiles_urlpatterns()
Use 'my_counts' as name for MyCountsListViewfrom django.conf.urls import patterns, include, url
from django.views.generic.simple import direct_to_template
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from django.contrib.auth.views import login, logout
from cellcounter.main.views import new_count, view_count, edit_count, ListMyCountsView
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', direct_to_template, {'template': 'main/index.html'},
name="index"),
url(r'^count/$', new_count, name="count_home"),
url(r'^count/new/$', new_count, name="new_count"),
url(r'^count/(?P<count_id>\d+)/$', view_count, name="view_count"),
url(r'^count/(?P<count_id>\d+)/edit/$', edit_count, name="edit_count"),
url(r'^user/$', ListMyCountsView.as_view(), name="my_counts"),
url(r'^login/$', login, {'template_name': 'main/login.html'}, name='login'),
url(r'^logout/$', logout, {'next_page': '/'}, name='logout'),
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += staticfiles_urlpatterns()
| <commit_before>from django.conf.urls import patterns, include, url
from django.views.generic.simple import direct_to_template
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from django.contrib.auth.views import login, logout
from cellcounter.main.views import new_count, view_count, edit_count, ListMyCountsView
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', direct_to_template, {'template': 'main/index.html'},
name="index"),
url(r'^count/$', new_count, name="count_home"),
url(r'^count/new/$', new_count, name="new_count"),
url(r'^count/(?P<count_id>\d+)/$', view_count, name="view_count"),
url(r'^count/(?P<count_id>\d+)/edit/$', edit_count, name="edit_count"),
url(r'^user/$', ListMyCountsView.as_view()),
url(r'^login/$', login, {'template_name': 'main/login.html'}, name='login'),
url(r'^logout/$', logout, {'next_page': '/'}, name='logout'),
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += staticfiles_urlpatterns()
<commit_msg>Use 'my_counts' as name for MyCountsListView<commit_after>from django.conf.urls import patterns, include, url
from django.views.generic.simple import direct_to_template
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from django.contrib.auth.views import login, logout
from cellcounter.main.views import new_count, view_count, edit_count, ListMyCountsView
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', direct_to_template, {'template': 'main/index.html'},
name="index"),
url(r'^count/$', new_count, name="count_home"),
url(r'^count/new/$', new_count, name="new_count"),
url(r'^count/(?P<count_id>\d+)/$', view_count, name="view_count"),
url(r'^count/(?P<count_id>\d+)/edit/$', edit_count, name="edit_count"),
url(r'^user/$', ListMyCountsView.as_view(), name="my_counts"),
url(r'^login/$', login, {'template_name': 'main/login.html'}, name='login'),
url(r'^logout/$', logout, {'next_page': '/'}, name='logout'),
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += staticfiles_urlpatterns()
|
2e6df2a332217d0e3da225075807360fe230b600 | tools/po2js.py | tools/po2js.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
import os.path
import codecs
import dfstrings
import time
def make_js_from_po(path):
strings = []
for po in [p for p in dfstrings.get_po_strings(path) if "scope" in p and "dragonfly" in p["scope"] ]:
strings.append(u"""ui_strings.%s="%s";""" % (po["jsname"], po["msgstr"]))
return """/* Generated from %s at %s */
window.ui_strings || ( window.ui_strings = {} )
window.ui_strings.lang_code = "?";
%s""" % (unicode(os.path.basename(path)), unicode(time.asctime()), u"\n".join(strings))
def main():
if len(sys.argv)==1:
print "Usage: po2js.py infile [outfile]. If no outfile, write to stdout"
return(1)
else:
infile = sys.argv[1]
if len(sys.argv)==3:
outfile = codecs.open(sys.argv[2], "w", encoding="utf_8_sig")
else:
outfile = sys.stdout
data = make_js_from_po(infile)
outfile.write(data)
return 0
if __name__ == "__main__":
sys.exit(main())
| #!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
import os.path
import codecs
import dfstrings
import time
def make_js_from_po(path):
strings = []
for po in [p for p in dfstrings.get_po_strings(path) if "scope" in p and "dragonfly" in p["scope"] ]:
strings.append(u"""ui_strings.%s="%s";""" % (po["jsname"], po["msgstr"]))
return """/* Generated from %s at %s */
window.ui_strings || ( window.ui_strings = {} )
window.ui_strings.lang_code = "%s";
%s""" % (unicode(os.path.basename(path)),
unicode(time.asctime()),
unicode(os.path.splitext(os.path.basename(path))[0]),
u"\n".join(strings))
def main():
if len(sys.argv)==1:
print "Usage: po2js.py infile [outfile]. If no outfile, write to stdout"
return(1)
else:
infile = sys.argv[1]
if len(sys.argv)==3:
outfile = codecs.open(sys.argv[2], "w", encoding="utf_8_sig")
else:
outfile = sys.stdout
data = make_js_from_po(infile)
outfile.write(data)
return 0
if __name__ == "__main__":
sys.exit(main())
| Add the language code to the translated file | Add the language code to the translated file
| Python | apache-2.0 | runeh/dragonfly-stp-1,runeh/dragonfly-stp-1,runeh/dragonfly-stp-1 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
import os.path
import codecs
import dfstrings
import time
def make_js_from_po(path):
strings = []
for po in [p for p in dfstrings.get_po_strings(path) if "scope" in p and "dragonfly" in p["scope"] ]:
strings.append(u"""ui_strings.%s="%s";""" % (po["jsname"], po["msgstr"]))
return """/* Generated from %s at %s */
window.ui_strings || ( window.ui_strings = {} )
window.ui_strings.lang_code = "?";
%s""" % (unicode(os.path.basename(path)), unicode(time.asctime()), u"\n".join(strings))
def main():
if len(sys.argv)==1:
print "Usage: po2js.py infile [outfile]. If no outfile, write to stdout"
return(1)
else:
infile = sys.argv[1]
if len(sys.argv)==3:
outfile = codecs.open(sys.argv[2], "w", encoding="utf_8_sig")
else:
outfile = sys.stdout
data = make_js_from_po(infile)
outfile.write(data)
return 0
if __name__ == "__main__":
sys.exit(main())
Add the language code to the translated file | #!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
import os.path
import codecs
import dfstrings
import time
def make_js_from_po(path):
strings = []
for po in [p for p in dfstrings.get_po_strings(path) if "scope" in p and "dragonfly" in p["scope"] ]:
strings.append(u"""ui_strings.%s="%s";""" % (po["jsname"], po["msgstr"]))
return """/* Generated from %s at %s */
window.ui_strings || ( window.ui_strings = {} )
window.ui_strings.lang_code = "%s";
%s""" % (unicode(os.path.basename(path)),
unicode(time.asctime()),
unicode(os.path.splitext(os.path.basename(path))[0]),
u"\n".join(strings))
def main():
if len(sys.argv)==1:
print "Usage: po2js.py infile [outfile]. If no outfile, write to stdout"
return(1)
else:
infile = sys.argv[1]
if len(sys.argv)==3:
outfile = codecs.open(sys.argv[2], "w", encoding="utf_8_sig")
else:
outfile = sys.stdout
data = make_js_from_po(infile)
outfile.write(data)
return 0
if __name__ == "__main__":
sys.exit(main())
| <commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
import os.path
import codecs
import dfstrings
import time
def make_js_from_po(path):
strings = []
for po in [p for p in dfstrings.get_po_strings(path) if "scope" in p and "dragonfly" in p["scope"] ]:
strings.append(u"""ui_strings.%s="%s";""" % (po["jsname"], po["msgstr"]))
return """/* Generated from %s at %s */
window.ui_strings || ( window.ui_strings = {} )
window.ui_strings.lang_code = "?";
%s""" % (unicode(os.path.basename(path)), unicode(time.asctime()), u"\n".join(strings))
def main():
if len(sys.argv)==1:
print "Usage: po2js.py infile [outfile]. If no outfile, write to stdout"
return(1)
else:
infile = sys.argv[1]
if len(sys.argv)==3:
outfile = codecs.open(sys.argv[2], "w", encoding="utf_8_sig")
else:
outfile = sys.stdout
data = make_js_from_po(infile)
outfile.write(data)
return 0
if __name__ == "__main__":
sys.exit(main())
<commit_msg>Add the language code to the translated file<commit_after> | #!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
import os.path
import codecs
import dfstrings
import time
def make_js_from_po(path):
strings = []
for po in [p for p in dfstrings.get_po_strings(path) if "scope" in p and "dragonfly" in p["scope"] ]:
strings.append(u"""ui_strings.%s="%s";""" % (po["jsname"], po["msgstr"]))
return """/* Generated from %s at %s */
window.ui_strings || ( window.ui_strings = {} )
window.ui_strings.lang_code = "%s";
%s""" % (unicode(os.path.basename(path)),
unicode(time.asctime()),
unicode(os.path.splitext(os.path.basename(path))[0]),
u"\n".join(strings))
def main():
if len(sys.argv)==1:
print "Usage: po2js.py infile [outfile]. If no outfile, write to stdout"
return(1)
else:
infile = sys.argv[1]
if len(sys.argv)==3:
outfile = codecs.open(sys.argv[2], "w", encoding="utf_8_sig")
else:
outfile = sys.stdout
data = make_js_from_po(infile)
outfile.write(data)
return 0
if __name__ == "__main__":
sys.exit(main())
| #!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
import os.path
import codecs
import dfstrings
import time
def make_js_from_po(path):
strings = []
for po in [p for p in dfstrings.get_po_strings(path) if "scope" in p and "dragonfly" in p["scope"] ]:
strings.append(u"""ui_strings.%s="%s";""" % (po["jsname"], po["msgstr"]))
return """/* Generated from %s at %s */
window.ui_strings || ( window.ui_strings = {} )
window.ui_strings.lang_code = "?";
%s""" % (unicode(os.path.basename(path)), unicode(time.asctime()), u"\n".join(strings))
def main():
if len(sys.argv)==1:
print "Usage: po2js.py infile [outfile]. If no outfile, write to stdout"
return(1)
else:
infile = sys.argv[1]
if len(sys.argv)==3:
outfile = codecs.open(sys.argv[2], "w", encoding="utf_8_sig")
else:
outfile = sys.stdout
data = make_js_from_po(infile)
outfile.write(data)
return 0
if __name__ == "__main__":
sys.exit(main())
Add the language code to the translated file#!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
import os.path
import codecs
import dfstrings
import time
def make_js_from_po(path):
strings = []
for po in [p for p in dfstrings.get_po_strings(path) if "scope" in p and "dragonfly" in p["scope"] ]:
strings.append(u"""ui_strings.%s="%s";""" % (po["jsname"], po["msgstr"]))
return """/* Generated from %s at %s */
window.ui_strings || ( window.ui_strings = {} )
window.ui_strings.lang_code = "%s";
%s""" % (unicode(os.path.basename(path)),
unicode(time.asctime()),
unicode(os.path.splitext(os.path.basename(path))[0]),
u"\n".join(strings))
def main():
if len(sys.argv)==1:
print "Usage: po2js.py infile [outfile]. If no outfile, write to stdout"
return(1)
else:
infile = sys.argv[1]
if len(sys.argv)==3:
outfile = codecs.open(sys.argv[2], "w", encoding="utf_8_sig")
else:
outfile = sys.stdout
data = make_js_from_po(infile)
outfile.write(data)
return 0
if __name__ == "__main__":
sys.exit(main())
| <commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
import os.path
import codecs
import dfstrings
import time
def make_js_from_po(path):
strings = []
for po in [p for p in dfstrings.get_po_strings(path) if "scope" in p and "dragonfly" in p["scope"] ]:
strings.append(u"""ui_strings.%s="%s";""" % (po["jsname"], po["msgstr"]))
return """/* Generated from %s at %s */
window.ui_strings || ( window.ui_strings = {} )
window.ui_strings.lang_code = "?";
%s""" % (unicode(os.path.basename(path)), unicode(time.asctime()), u"\n".join(strings))
def main():
if len(sys.argv)==1:
print "Usage: po2js.py infile [outfile]. If no outfile, write to stdout"
return(1)
else:
infile = sys.argv[1]
if len(sys.argv)==3:
outfile = codecs.open(sys.argv[2], "w", encoding="utf_8_sig")
else:
outfile = sys.stdout
data = make_js_from_po(infile)
outfile.write(data)
return 0
if __name__ == "__main__":
sys.exit(main())
<commit_msg>Add the language code to the translated file<commit_after>#!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
import os.path
import codecs
import dfstrings
import time
def make_js_from_po(path):
strings = []
for po in [p for p in dfstrings.get_po_strings(path) if "scope" in p and "dragonfly" in p["scope"] ]:
strings.append(u"""ui_strings.%s="%s";""" % (po["jsname"], po["msgstr"]))
return """/* Generated from %s at %s */
window.ui_strings || ( window.ui_strings = {} )
window.ui_strings.lang_code = "%s";
%s""" % (unicode(os.path.basename(path)),
unicode(time.asctime()),
unicode(os.path.splitext(os.path.basename(path))[0]),
u"\n".join(strings))
def main():
if len(sys.argv)==1:
print "Usage: po2js.py infile [outfile]. If no outfile, write to stdout"
return(1)
else:
infile = sys.argv[1]
if len(sys.argv)==3:
outfile = codecs.open(sys.argv[2], "w", encoding="utf_8_sig")
else:
outfile = sys.stdout
data = make_js_from_po(infile)
outfile.write(data)
return 0
if __name__ == "__main__":
sys.exit(main())
|
bc20e8d01dc154d45f9dfc8f2b610d415a40f253 | broadbean/__init__.py | broadbean/__init__.py | # flake8: noqa (ignore unused imports)
# Version 1.0
from . import ripasso
from .element import Element
from .segment import Segment
from .sequence import Sequence
from .blueprint import BluePrint
from .tools import makeVaryingSequence, repeatAndVarySequence
from .broadbean import PulseAtoms
| # flake8: noqa (ignore unused imports)
# Version 1.0
from . import ripasso
from .element import Element
from .sequence import Sequence
from .blueprint import BluePrint
from .tools import makeVaryingSequence, repeatAndVarySequence
from .broadbean import PulseAtoms
| Remove import of version 1.0 feature | Remove import of version 1.0 feature
| Python | mit | WilliamHPNielsen/broadbean | # flake8: noqa (ignore unused imports)
# Version 1.0
from . import ripasso
from .element import Element
from .segment import Segment
from .sequence import Sequence
from .blueprint import BluePrint
from .tools import makeVaryingSequence, repeatAndVarySequence
from .broadbean import PulseAtoms
Remove import of version 1.0 feature | # flake8: noqa (ignore unused imports)
# Version 1.0
from . import ripasso
from .element import Element
from .sequence import Sequence
from .blueprint import BluePrint
from .tools import makeVaryingSequence, repeatAndVarySequence
from .broadbean import PulseAtoms
| <commit_before># flake8: noqa (ignore unused imports)
# Version 1.0
from . import ripasso
from .element import Element
from .segment import Segment
from .sequence import Sequence
from .blueprint import BluePrint
from .tools import makeVaryingSequence, repeatAndVarySequence
from .broadbean import PulseAtoms
<commit_msg>Remove import of version 1.0 feature<commit_after> | # flake8: noqa (ignore unused imports)
# Version 1.0
from . import ripasso
from .element import Element
from .sequence import Sequence
from .blueprint import BluePrint
from .tools import makeVaryingSequence, repeatAndVarySequence
from .broadbean import PulseAtoms
| # flake8: noqa (ignore unused imports)
# Version 1.0
from . import ripasso
from .element import Element
from .segment import Segment
from .sequence import Sequence
from .blueprint import BluePrint
from .tools import makeVaryingSequence, repeatAndVarySequence
from .broadbean import PulseAtoms
Remove import of version 1.0 feature# flake8: noqa (ignore unused imports)
# Version 1.0
from . import ripasso
from .element import Element
from .sequence import Sequence
from .blueprint import BluePrint
from .tools import makeVaryingSequence, repeatAndVarySequence
from .broadbean import PulseAtoms
| <commit_before># flake8: noqa (ignore unused imports)
# Version 1.0
from . import ripasso
from .element import Element
from .segment import Segment
from .sequence import Sequence
from .blueprint import BluePrint
from .tools import makeVaryingSequence, repeatAndVarySequence
from .broadbean import PulseAtoms
<commit_msg>Remove import of version 1.0 feature<commit_after># flake8: noqa (ignore unused imports)
# Version 1.0
from . import ripasso
from .element import Element
from .sequence import Sequence
from .blueprint import BluePrint
from .tools import makeVaryingSequence, repeatAndVarySequence
from .broadbean import PulseAtoms
|
52d7a26237cb1e594456127b775524596c3fb1ac | tests/test_barebones.py | tests/test_barebones.py | # -*- coding: utf-8 -*-
"""
Tests for the barebones example project
"""
import os
import py.path
from tarbell.app import EXCLUDES, TarbellSite
PATH = os.path.realpath('examples/barebones')
def test_get_site():
site = TarbellSite(PATH)
assert os.path.realpath(site.path) == os.path.realpath(PATH)
assert site.project.name == "barebones"
def test_default_excludes():
"Ensure a basic set of excluded files"
site = TarbellSite(PATH)
assert site.project.EXCLUDES == EXCLUDES
def test_generate_site(tmpdir):
"Generate a static site matching what's in _site"
site = TarbellSite(PATH)
built = os.path.join(PATH, '_site')
site.generate_static_site(str(tmpdir))
files = set(f.basename for f in tmpdir.listdir())
assert files == set(['data.json', 'index.html'])
| # -*- coding: utf-8 -*-
"""
Tests for the barebones example project
"""
import os
import py.path
from tarbell.app import EXCLUDES, TarbellSite
PATH = os.path.realpath('examples/barebones')
def test_get_site():
site = TarbellSite(PATH)
assert os.path.realpath(site.path) == os.path.realpath(PATH)
assert site.project.name == "barebones"
def test_default_excludes():
"Ensure a basic set of excluded files"
site = TarbellSite(PATH)
assert set(site.project.EXCLUDES) == set(EXCLUDES)
def test_generate_site(tmpdir):
"Generate a static site matching what's in _site"
site = TarbellSite(PATH)
built = os.path.join(PATH, '_site')
site.generate_static_site(str(tmpdir))
files = set(f.basename for f in tmpdir.listdir())
assert files == set(['data.json', 'index.html'])
| Test exclude equality with sets | Test exclude equality with sets
| Python | bsd-3-clause | tarbell-project/tarbell,eyeseast/tarbell,eyeseast/tarbell,tarbell-project/tarbell | # -*- coding: utf-8 -*-
"""
Tests for the barebones example project
"""
import os
import py.path
from tarbell.app import EXCLUDES, TarbellSite
PATH = os.path.realpath('examples/barebones')
def test_get_site():
site = TarbellSite(PATH)
assert os.path.realpath(site.path) == os.path.realpath(PATH)
assert site.project.name == "barebones"
def test_default_excludes():
"Ensure a basic set of excluded files"
site = TarbellSite(PATH)
assert site.project.EXCLUDES == EXCLUDES
def test_generate_site(tmpdir):
"Generate a static site matching what's in _site"
site = TarbellSite(PATH)
built = os.path.join(PATH, '_site')
site.generate_static_site(str(tmpdir))
files = set(f.basename for f in tmpdir.listdir())
assert files == set(['data.json', 'index.html'])
Test exclude equality with sets | # -*- coding: utf-8 -*-
"""
Tests for the barebones example project
"""
import os
import py.path
from tarbell.app import EXCLUDES, TarbellSite
PATH = os.path.realpath('examples/barebones')
def test_get_site():
site = TarbellSite(PATH)
assert os.path.realpath(site.path) == os.path.realpath(PATH)
assert site.project.name == "barebones"
def test_default_excludes():
"Ensure a basic set of excluded files"
site = TarbellSite(PATH)
assert set(site.project.EXCLUDES) == set(EXCLUDES)
def test_generate_site(tmpdir):
"Generate a static site matching what's in _site"
site = TarbellSite(PATH)
built = os.path.join(PATH, '_site')
site.generate_static_site(str(tmpdir))
files = set(f.basename for f in tmpdir.listdir())
assert files == set(['data.json', 'index.html'])
| <commit_before># -*- coding: utf-8 -*-
"""
Tests for the barebones example project
"""
import os
import py.path
from tarbell.app import EXCLUDES, TarbellSite
PATH = os.path.realpath('examples/barebones')
def test_get_site():
site = TarbellSite(PATH)
assert os.path.realpath(site.path) == os.path.realpath(PATH)
assert site.project.name == "barebones"
def test_default_excludes():
"Ensure a basic set of excluded files"
site = TarbellSite(PATH)
assert site.project.EXCLUDES == EXCLUDES
def test_generate_site(tmpdir):
"Generate a static site matching what's in _site"
site = TarbellSite(PATH)
built = os.path.join(PATH, '_site')
site.generate_static_site(str(tmpdir))
files = set(f.basename for f in tmpdir.listdir())
assert files == set(['data.json', 'index.html'])
<commit_msg>Test exclude equality with sets<commit_after> | # -*- coding: utf-8 -*-
"""
Tests for the barebones example project
"""
import os
import py.path
from tarbell.app import EXCLUDES, TarbellSite
PATH = os.path.realpath('examples/barebones')
def test_get_site():
site = TarbellSite(PATH)
assert os.path.realpath(site.path) == os.path.realpath(PATH)
assert site.project.name == "barebones"
def test_default_excludes():
"Ensure a basic set of excluded files"
site = TarbellSite(PATH)
assert set(site.project.EXCLUDES) == set(EXCLUDES)
def test_generate_site(tmpdir):
"Generate a static site matching what's in _site"
site = TarbellSite(PATH)
built = os.path.join(PATH, '_site')
site.generate_static_site(str(tmpdir))
files = set(f.basename for f in tmpdir.listdir())
assert files == set(['data.json', 'index.html'])
| # -*- coding: utf-8 -*-
"""
Tests for the barebones example project
"""
import os
import py.path
from tarbell.app import EXCLUDES, TarbellSite
PATH = os.path.realpath('examples/barebones')
def test_get_site():
site = TarbellSite(PATH)
assert os.path.realpath(site.path) == os.path.realpath(PATH)
assert site.project.name == "barebones"
def test_default_excludes():
"Ensure a basic set of excluded files"
site = TarbellSite(PATH)
assert site.project.EXCLUDES == EXCLUDES
def test_generate_site(tmpdir):
"Generate a static site matching what's in _site"
site = TarbellSite(PATH)
built = os.path.join(PATH, '_site')
site.generate_static_site(str(tmpdir))
files = set(f.basename for f in tmpdir.listdir())
assert files == set(['data.json', 'index.html'])
Test exclude equality with sets# -*- coding: utf-8 -*-
"""
Tests for the barebones example project
"""
import os
import py.path
from tarbell.app import EXCLUDES, TarbellSite
PATH = os.path.realpath('examples/barebones')
def test_get_site():
site = TarbellSite(PATH)
assert os.path.realpath(site.path) == os.path.realpath(PATH)
assert site.project.name == "barebones"
def test_default_excludes():
"Ensure a basic set of excluded files"
site = TarbellSite(PATH)
assert set(site.project.EXCLUDES) == set(EXCLUDES)
def test_generate_site(tmpdir):
"Generate a static site matching what's in _site"
site = TarbellSite(PATH)
built = os.path.join(PATH, '_site')
site.generate_static_site(str(tmpdir))
files = set(f.basename for f in tmpdir.listdir())
assert files == set(['data.json', 'index.html'])
| <commit_before># -*- coding: utf-8 -*-
"""
Tests for the barebones example project
"""
import os
import py.path
from tarbell.app import EXCLUDES, TarbellSite
PATH = os.path.realpath('examples/barebones')
def test_get_site():
site = TarbellSite(PATH)
assert os.path.realpath(site.path) == os.path.realpath(PATH)
assert site.project.name == "barebones"
def test_default_excludes():
"Ensure a basic set of excluded files"
site = TarbellSite(PATH)
assert site.project.EXCLUDES == EXCLUDES
def test_generate_site(tmpdir):
"Generate a static site matching what's in _site"
site = TarbellSite(PATH)
built = os.path.join(PATH, '_site')
site.generate_static_site(str(tmpdir))
files = set(f.basename for f in tmpdir.listdir())
assert files == set(['data.json', 'index.html'])
<commit_msg>Test exclude equality with sets<commit_after># -*- coding: utf-8 -*-
"""
Tests for the barebones example project
"""
import os
import py.path
from tarbell.app import EXCLUDES, TarbellSite
PATH = os.path.realpath('examples/barebones')
def test_get_site():
site = TarbellSite(PATH)
assert os.path.realpath(site.path) == os.path.realpath(PATH)
assert site.project.name == "barebones"
def test_default_excludes():
"Ensure a basic set of excluded files"
site = TarbellSite(PATH)
assert set(site.project.EXCLUDES) == set(EXCLUDES)
def test_generate_site(tmpdir):
"Generate a static site matching what's in _site"
site = TarbellSite(PATH)
built = os.path.join(PATH, '_site')
site.generate_static_site(str(tmpdir))
files = set(f.basename for f in tmpdir.listdir())
assert files == set(['data.json', 'index.html'])
|
0f22d72aeb4fc872dfa1c5e75d40102c27cf2a8c | tabtranslator/model.py | tabtranslator/model.py | class sheet(object):
""" sheet: Top level object.
Models the entire music sheet """
def __init__(self, name):
super(sheet, self).__init__()
self.name = name
self.bars = list()
def add_bar(bar):
self.bars.append(bar)
class stave(sheet):
""" stave: sheet that is displayed in music theory representation"""
def __init__(self):
super(stave, self).__init__()
class tab(sheet):
""" tab: sheet that is displayed with tab representation"""
def __init__(self):
super(tab, self).__init__()
class bar(object):
""" bar: Models a measure.
Compose the sheet as the temporal layer
=> Where the notes are displayed on the sheet """
def __init__(self, cycle=4):
super(bar, self).__init__()
self.cycle = cycle
self.notes = dict()
def add_note(self, note, start_time):
"""
note : note : note instance
start_time : int : start time inside the measure
"""
self.notes[note] = start_time
class note(object):
""" note: Models the unit in music representation
Drives visual representation
=> What note must be displayed on the sheet
"""
def __init__(self, pitch, duration=1):
super(bote, self).__init__()
self.pitch = pitch
self.duration = duration | class sheet(object):
""" sheet: Top level object.
Models the entire music sheet """
def __init__(self, name):
super(sheet, self).__init__()
self.name = name
self.bars = list()
class bar(object):
""" bar: Models a measure.
Compose the sheet as the temporal layer
=> Where the notes are displayed on the sheet """
def __init__(self, cycle=4):
super(bar, self).__init__()
self.cycle = cycle
self.notes = dict()
def add_note(self, note, start_time):
"""
note : note : note instance
start_time : int : start time inside the measure
"""
self.notes[note] = start_time
class note(object):
""" note: Models the unit in music representation
Drives visual representation
=> What note must be displayed on the sheet """
def __init__(self, pitch, duration=1):
super(bote, self).__init__()
self.pitch = pitch
self.duration = duration
| CLEAN simplify useless dep and methods | CLEAN simplify useless dep and methods
| Python | mit | ograndedjogo/tab-translator,ograndedjogo/tab-translator | class sheet(object):
""" sheet: Top level object.
Models the entire music sheet """
def __init__(self, name):
super(sheet, self).__init__()
self.name = name
self.bars = list()
def add_bar(bar):
self.bars.append(bar)
class stave(sheet):
""" stave: sheet that is displayed in music theory representation"""
def __init__(self):
super(stave, self).__init__()
class tab(sheet):
""" tab: sheet that is displayed with tab representation"""
def __init__(self):
super(tab, self).__init__()
class bar(object):
""" bar: Models a measure.
Compose the sheet as the temporal layer
=> Where the notes are displayed on the sheet """
def __init__(self, cycle=4):
super(bar, self).__init__()
self.cycle = cycle
self.notes = dict()
def add_note(self, note, start_time):
"""
note : note : note instance
start_time : int : start time inside the measure
"""
self.notes[note] = start_time
class note(object):
""" note: Models the unit in music representation
Drives visual representation
=> What note must be displayed on the sheet
"""
def __init__(self, pitch, duration=1):
super(bote, self).__init__()
self.pitch = pitch
self.duration = durationCLEAN simplify useless dep and methods | class sheet(object):
""" sheet: Top level object.
Models the entire music sheet """
def __init__(self, name):
super(sheet, self).__init__()
self.name = name
self.bars = list()
class bar(object):
""" bar: Models a measure.
Compose the sheet as the temporal layer
=> Where the notes are displayed on the sheet """
def __init__(self, cycle=4):
super(bar, self).__init__()
self.cycle = cycle
self.notes = dict()
def add_note(self, note, start_time):
"""
note : note : note instance
start_time : int : start time inside the measure
"""
self.notes[note] = start_time
class note(object):
""" note: Models the unit in music representation
Drives visual representation
=> What note must be displayed on the sheet """
def __init__(self, pitch, duration=1):
super(bote, self).__init__()
self.pitch = pitch
self.duration = duration
| <commit_before>class sheet(object):
""" sheet: Top level object.
Models the entire music sheet """
def __init__(self, name):
super(sheet, self).__init__()
self.name = name
self.bars = list()
def add_bar(bar):
self.bars.append(bar)
class stave(sheet):
""" stave: sheet that is displayed in music theory representation"""
def __init__(self):
super(stave, self).__init__()
class tab(sheet):
""" tab: sheet that is displayed with tab representation"""
def __init__(self):
super(tab, self).__init__()
class bar(object):
""" bar: Models a measure.
Compose the sheet as the temporal layer
=> Where the notes are displayed on the sheet """
def __init__(self, cycle=4):
super(bar, self).__init__()
self.cycle = cycle
self.notes = dict()
def add_note(self, note, start_time):
"""
note : note : note instance
start_time : int : start time inside the measure
"""
self.notes[note] = start_time
class note(object):
""" note: Models the unit in music representation
Drives visual representation
=> What note must be displayed on the sheet
"""
def __init__(self, pitch, duration=1):
super(bote, self).__init__()
self.pitch = pitch
self.duration = duration<commit_msg>CLEAN simplify useless dep and methods<commit_after> | class sheet(object):
""" sheet: Top level object.
Models the entire music sheet """
def __init__(self, name):
super(sheet, self).__init__()
self.name = name
self.bars = list()
class bar(object):
""" bar: Models a measure.
Compose the sheet as the temporal layer
=> Where the notes are displayed on the sheet """
def __init__(self, cycle=4):
super(bar, self).__init__()
self.cycle = cycle
self.notes = dict()
def add_note(self, note, start_time):
"""
note : note : note instance
start_time : int : start time inside the measure
"""
self.notes[note] = start_time
class note(object):
""" note: Models the unit in music representation
Drives visual representation
=> What note must be displayed on the sheet """
def __init__(self, pitch, duration=1):
super(bote, self).__init__()
self.pitch = pitch
self.duration = duration
| class sheet(object):
""" sheet: Top level object.
Models the entire music sheet """
def __init__(self, name):
super(sheet, self).__init__()
self.name = name
self.bars = list()
def add_bar(bar):
self.bars.append(bar)
class stave(sheet):
""" stave: sheet that is displayed in music theory representation"""
def __init__(self):
super(stave, self).__init__()
class tab(sheet):
""" tab: sheet that is displayed with tab representation"""
def __init__(self):
super(tab, self).__init__()
class bar(object):
""" bar: Models a measure.
Compose the sheet as the temporal layer
=> Where the notes are displayed on the sheet """
def __init__(self, cycle=4):
super(bar, self).__init__()
self.cycle = cycle
self.notes = dict()
def add_note(self, note, start_time):
"""
note : note : note instance
start_time : int : start time inside the measure
"""
self.notes[note] = start_time
class note(object):
""" note: Models the unit in music representation
Drives visual representation
=> What note must be displayed on the sheet
"""
def __init__(self, pitch, duration=1):
super(bote, self).__init__()
self.pitch = pitch
self.duration = durationCLEAN simplify useless dep and methodsclass sheet(object):
""" sheet: Top level object.
Models the entire music sheet """
def __init__(self, name):
super(sheet, self).__init__()
self.name = name
self.bars = list()
class bar(object):
""" bar: Models a measure.
Compose the sheet as the temporal layer
=> Where the notes are displayed on the sheet """
def __init__(self, cycle=4):
super(bar, self).__init__()
self.cycle = cycle
self.notes = dict()
def add_note(self, note, start_time):
"""
note : note : note instance
start_time : int : start time inside the measure
"""
self.notes[note] = start_time
class note(object):
""" note: Models the unit in music representation
Drives visual representation
=> What note must be displayed on the sheet """
def __init__(self, pitch, duration=1):
super(bote, self).__init__()
self.pitch = pitch
self.duration = duration
| <commit_before>class sheet(object):
""" sheet: Top level object.
Models the entire music sheet """
def __init__(self, name):
super(sheet, self).__init__()
self.name = name
self.bars = list()
def add_bar(bar):
self.bars.append(bar)
class stave(sheet):
""" stave: sheet that is displayed in music theory representation"""
def __init__(self):
super(stave, self).__init__()
class tab(sheet):
""" tab: sheet that is displayed with tab representation"""
def __init__(self):
super(tab, self).__init__()
class bar(object):
""" bar: Models a measure.
Compose the sheet as the temporal layer
=> Where the notes are displayed on the sheet """
def __init__(self, cycle=4):
super(bar, self).__init__()
self.cycle = cycle
self.notes = dict()
def add_note(self, note, start_time):
"""
note : note : note instance
start_time : int : start time inside the measure
"""
self.notes[note] = start_time
class note(object):
""" note: Models the unit in music representation
Drives visual representation
=> What note must be displayed on the sheet
"""
def __init__(self, pitch, duration=1):
super(bote, self).__init__()
self.pitch = pitch
self.duration = duration<commit_msg>CLEAN simplify useless dep and methods<commit_after>class sheet(object):
""" sheet: Top level object.
Models the entire music sheet """
def __init__(self, name):
super(sheet, self).__init__()
self.name = name
self.bars = list()
class bar(object):
""" bar: Models a measure.
Compose the sheet as the temporal layer
=> Where the notes are displayed on the sheet """
def __init__(self, cycle=4):
super(bar, self).__init__()
self.cycle = cycle
self.notes = dict()
def add_note(self, note, start_time):
"""
note : note : note instance
start_time : int : start time inside the measure
"""
self.notes[note] = start_time
class note(object):
""" note: Models the unit in music representation
Drives visual representation
=> What note must be displayed on the sheet """
def __init__(self, pitch, duration=1):
super(bote, self).__init__()
self.pitch = pitch
self.duration = duration
|
ee5bd327bb3070277c87a96f72ca7e019c92f777 | publisher/build_paper.py | publisher/build_paper.py | #!/usr/bin/env python
import docutils.core as dc
from writer import writer
import os.path
import sys
import glob
preamble = '\n'.join([r'% PDF Standard Fonts',
r'\usepackage{mathptmx}',
r'\usepackage[scaled=.80]{helvet}',
r'\usepackage{courier}'],)
settings = {'documentclass': 'IEEEtran',
'use_verbatim_when_possible': True,
'use_latex_citations': True,
'latex_preamble': preamble}
if len(sys.argv) != 2:
print "Usage: build_paper.py paper_directory"
sys.exit(-1)
path = sys.argv[1]
if not os.path.isdir(path):
print("Cannot open directory: %s" % path)
sys.exit(-1)
rst = glob.glob(os.path.join(path, '*.rst'))[0]
content = open(rst, 'r').read()
content = '''
.. role:: math(raw)
:format: latex
''' + content
tex = dc.publish_string(source=content, writer=writer,
settings_overrides=settings)
out = open('/tmp/paper.tex', 'w')
out.write(tex)
out.close()
| #!/usr/bin/env python
import docutils.core as dc
from writer import writer
import os.path
import sys
import glob
preamble = r'''
% These preamble commands are from build_paper.py
% PDF Standard Fonts
\usepackage{mathptmx}
\usepackage[scaled=.90]{helvet}
\usepackage{courier}
% Make verbatim environment smaller
\makeatletter
\g@addto@macro\@verbatim\footnotesize
\makeatother
\renewcommand{\quote}{}
'''
settings = {'documentclass': 'IEEEtran',
'use_verbatim_when_possible': True,
'use_latex_citations': True,
'latex_preamble': preamble}
if len(sys.argv) != 2:
print "Usage: build_paper.py paper_directory"
sys.exit(-1)
path = sys.argv[1]
if not os.path.isdir(path):
print("Cannot open directory: %s" % path)
sys.exit(-1)
rst = glob.glob(os.path.join(path, '*.rst'))[0]
content = open(rst, 'r').read()
content = '''
.. role:: math(raw)
:format: latex
''' + content
tex = dc.publish_string(source=content, writer=writer,
settings_overrides=settings)
out = open('/tmp/paper.tex', 'w')
out.write(tex)
out.close()
| Update preamble: no indent on quote, smaller verbatim font size. | Update preamble: no indent on quote, smaller verbatim font size.
| Python | bsd-2-clause | dotsdl/scipy_proceedings,sbenthall/scipy_proceedings,juhasch/euroscipy_proceedings,chendaniely/scipy_proceedings,springcoil/euroscipy_proceedings,euroscipy/euroscipy_proceedings,mjklemm/euroscipy_proceedings,mikaem/euroscipy_proceedings,SepidehAlassi/euroscipy_proceedings,sbenthall/scipy_proceedings,katyhuff/scipy_proceedings,springcoil/euroscipy_proceedings,SepidehAlassi/euroscipy_proceedings,springcoil/euroscipy_proceedings,juhasch/euroscipy_proceedings,SepidehAlassi/euroscipy_proceedings,michaelpacer/scipy_proceedings,michaelpacer/scipy_proceedings,katyhuff/scipy_proceedings,Stewori/euroscipy_proceedings,mwcraig/scipy_proceedings,mwcraig/scipy_proceedings,mikaem/euroscipy_proceedings,mjklemm/euroscipy_proceedings,sbenthall/scipy_proceedings,mikaem/euroscipy_proceedings,helgee/euroscipy_proceedings,chendaniely/scipy_proceedings,juhasch/euroscipy_proceedings,euroscipy/euroscipy_proceedings,michaelpacer/scipy_proceedings,helgee/euroscipy_proceedings,Stewori/euroscipy_proceedings,chendaniely/scipy_proceedings,katyhuff/scipy_proceedings,Stewori/euroscipy_proceedings,mwcraig/scipy_proceedings,helgee/euroscipy_proceedings,mjklemm/euroscipy_proceedings,euroscipy/euroscipy_proceedings,dotsdl/scipy_proceedings,dotsdl/scipy_proceedings | #!/usr/bin/env python
import docutils.core as dc
from writer import writer
import os.path
import sys
import glob
preamble = '\n'.join([r'% PDF Standard Fonts',
r'\usepackage{mathptmx}',
r'\usepackage[scaled=.80]{helvet}',
r'\usepackage{courier}'],)
settings = {'documentclass': 'IEEEtran',
'use_verbatim_when_possible': True,
'use_latex_citations': True,
'latex_preamble': preamble}
if len(sys.argv) != 2:
print "Usage: build_paper.py paper_directory"
sys.exit(-1)
path = sys.argv[1]
if not os.path.isdir(path):
print("Cannot open directory: %s" % path)
sys.exit(-1)
rst = glob.glob(os.path.join(path, '*.rst'))[0]
content = open(rst, 'r').read()
content = '''
.. role:: math(raw)
:format: latex
''' + content
tex = dc.publish_string(source=content, writer=writer,
settings_overrides=settings)
out = open('/tmp/paper.tex', 'w')
out.write(tex)
out.close()
Update preamble: no indent on quote, smaller verbatim font size. | #!/usr/bin/env python
import docutils.core as dc
from writer import writer
import os.path
import sys
import glob
preamble = r'''
% These preamble commands are from build_paper.py
% PDF Standard Fonts
\usepackage{mathptmx}
\usepackage[scaled=.90]{helvet}
\usepackage{courier}
% Make verbatim environment smaller
\makeatletter
\g@addto@macro\@verbatim\footnotesize
\makeatother
\renewcommand{\quote}{}
'''
settings = {'documentclass': 'IEEEtran',
'use_verbatim_when_possible': True,
'use_latex_citations': True,
'latex_preamble': preamble}
if len(sys.argv) != 2:
print "Usage: build_paper.py paper_directory"
sys.exit(-1)
path = sys.argv[1]
if not os.path.isdir(path):
print("Cannot open directory: %s" % path)
sys.exit(-1)
rst = glob.glob(os.path.join(path, '*.rst'))[0]
content = open(rst, 'r').read()
content = '''
.. role:: math(raw)
:format: latex
''' + content
tex = dc.publish_string(source=content, writer=writer,
settings_overrides=settings)
out = open('/tmp/paper.tex', 'w')
out.write(tex)
out.close()
| <commit_before>#!/usr/bin/env python
import docutils.core as dc
from writer import writer
import os.path
import sys
import glob
preamble = '\n'.join([r'% PDF Standard Fonts',
r'\usepackage{mathptmx}',
r'\usepackage[scaled=.80]{helvet}',
r'\usepackage{courier}'],)
settings = {'documentclass': 'IEEEtran',
'use_verbatim_when_possible': True,
'use_latex_citations': True,
'latex_preamble': preamble}
if len(sys.argv) != 2:
print "Usage: build_paper.py paper_directory"
sys.exit(-1)
path = sys.argv[1]
if not os.path.isdir(path):
print("Cannot open directory: %s" % path)
sys.exit(-1)
rst = glob.glob(os.path.join(path, '*.rst'))[0]
content = open(rst, 'r').read()
content = '''
.. role:: math(raw)
:format: latex
''' + content
tex = dc.publish_string(source=content, writer=writer,
settings_overrides=settings)
out = open('/tmp/paper.tex', 'w')
out.write(tex)
out.close()
<commit_msg>Update preamble: no indent on quote, smaller verbatim font size.<commit_after> | #!/usr/bin/env python
import docutils.core as dc
from writer import writer
import os.path
import sys
import glob
preamble = r'''
% These preamble commands are from build_paper.py
% PDF Standard Fonts
\usepackage{mathptmx}
\usepackage[scaled=.90]{helvet}
\usepackage{courier}
% Make verbatim environment smaller
\makeatletter
\g@addto@macro\@verbatim\footnotesize
\makeatother
\renewcommand{\quote}{}
'''
settings = {'documentclass': 'IEEEtran',
'use_verbatim_when_possible': True,
'use_latex_citations': True,
'latex_preamble': preamble}
if len(sys.argv) != 2:
print "Usage: build_paper.py paper_directory"
sys.exit(-1)
path = sys.argv[1]
if not os.path.isdir(path):
print("Cannot open directory: %s" % path)
sys.exit(-1)
rst = glob.glob(os.path.join(path, '*.rst'))[0]
content = open(rst, 'r').read()
content = '''
.. role:: math(raw)
:format: latex
''' + content
tex = dc.publish_string(source=content, writer=writer,
settings_overrides=settings)
out = open('/tmp/paper.tex', 'w')
out.write(tex)
out.close()
| #!/usr/bin/env python
import docutils.core as dc
from writer import writer
import os.path
import sys
import glob
preamble = '\n'.join([r'% PDF Standard Fonts',
r'\usepackage{mathptmx}',
r'\usepackage[scaled=.80]{helvet}',
r'\usepackage{courier}'],)
settings = {'documentclass': 'IEEEtran',
'use_verbatim_when_possible': True,
'use_latex_citations': True,
'latex_preamble': preamble}
if len(sys.argv) != 2:
print "Usage: build_paper.py paper_directory"
sys.exit(-1)
path = sys.argv[1]
if not os.path.isdir(path):
print("Cannot open directory: %s" % path)
sys.exit(-1)
rst = glob.glob(os.path.join(path, '*.rst'))[0]
content = open(rst, 'r').read()
content = '''
.. role:: math(raw)
:format: latex
''' + content
tex = dc.publish_string(source=content, writer=writer,
settings_overrides=settings)
out = open('/tmp/paper.tex', 'w')
out.write(tex)
out.close()
Update preamble: no indent on quote, smaller verbatim font size.#!/usr/bin/env python
import docutils.core as dc
from writer import writer
import os.path
import sys
import glob
preamble = r'''
% These preamble commands are from build_paper.py
% PDF Standard Fonts
\usepackage{mathptmx}
\usepackage[scaled=.90]{helvet}
\usepackage{courier}
% Make verbatim environment smaller
\makeatletter
\g@addto@macro\@verbatim\footnotesize
\makeatother
\renewcommand{\quote}{}
'''
settings = {'documentclass': 'IEEEtran',
'use_verbatim_when_possible': True,
'use_latex_citations': True,
'latex_preamble': preamble}
if len(sys.argv) != 2:
print "Usage: build_paper.py paper_directory"
sys.exit(-1)
path = sys.argv[1]
if not os.path.isdir(path):
print("Cannot open directory: %s" % path)
sys.exit(-1)
rst = glob.glob(os.path.join(path, '*.rst'))[0]
content = open(rst, 'r').read()
content = '''
.. role:: math(raw)
:format: latex
''' + content
tex = dc.publish_string(source=content, writer=writer,
settings_overrides=settings)
out = open('/tmp/paper.tex', 'w')
out.write(tex)
out.close()
| <commit_before>#!/usr/bin/env python
import docutils.core as dc
from writer import writer
import os.path
import sys
import glob
preamble = '\n'.join([r'% PDF Standard Fonts',
r'\usepackage{mathptmx}',
r'\usepackage[scaled=.80]{helvet}',
r'\usepackage{courier}'],)
settings = {'documentclass': 'IEEEtran',
'use_verbatim_when_possible': True,
'use_latex_citations': True,
'latex_preamble': preamble}
if len(sys.argv) != 2:
print "Usage: build_paper.py paper_directory"
sys.exit(-1)
path = sys.argv[1]
if not os.path.isdir(path):
print("Cannot open directory: %s" % path)
sys.exit(-1)
rst = glob.glob(os.path.join(path, '*.rst'))[0]
content = open(rst, 'r').read()
content = '''
.. role:: math(raw)
:format: latex
''' + content
tex = dc.publish_string(source=content, writer=writer,
settings_overrides=settings)
out = open('/tmp/paper.tex', 'w')
out.write(tex)
out.close()
<commit_msg>Update preamble: no indent on quote, smaller verbatim font size.<commit_after>#!/usr/bin/env python
import docutils.core as dc
from writer import writer
import os.path
import sys
import glob
preamble = r'''
% These preamble commands are from build_paper.py
% PDF Standard Fonts
\usepackage{mathptmx}
\usepackage[scaled=.90]{helvet}
\usepackage{courier}
% Make verbatim environment smaller
\makeatletter
\g@addto@macro\@verbatim\footnotesize
\makeatother
\renewcommand{\quote}{}
'''
settings = {'documentclass': 'IEEEtran',
'use_verbatim_when_possible': True,
'use_latex_citations': True,
'latex_preamble': preamble}
if len(sys.argv) != 2:
print "Usage: build_paper.py paper_directory"
sys.exit(-1)
path = sys.argv[1]
if not os.path.isdir(path):
print("Cannot open directory: %s" % path)
sys.exit(-1)
rst = glob.glob(os.path.join(path, '*.rst'))[0]
content = open(rst, 'r').read()
content = '''
.. role:: math(raw)
:format: latex
''' + content
tex = dc.publish_string(source=content, writer=writer,
settings_overrides=settings)
out = open('/tmp/paper.tex', 'w')
out.write(tex)
out.close()
|
069d6085946a21c5e78621abf13fb60fd7eb4dcf | threadedcomments/migrations/0001_initial.py | threadedcomments/migrations/0001_initial.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations, connection
import django.db.models.deletion
is_index = connection.vendor != 'mysql'
class Migration(migrations.Migration):
dependencies = [
('django_comments', '__first__'),
]
operations = [
migrations.CreateModel(
name='ThreadedComment',
fields=[
('comment_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='django_comments.Comment')),
('title', models.TextField(verbose_name='Title', blank=True)),
('tree_path', models.CharField(verbose_name='Tree path', max_length=500, editable=False, db_index=is_index)),
('last_child', models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, verbose_name='Last child', blank=True, to='threadedcomments.ThreadedComment', null=True)),
('parent', models.ForeignKey(related_name='children', default=None, blank=True, to='threadedcomments.ThreadedComment', null=True, verbose_name='Parent')),
],
options={
'ordering': ('tree_path',),
'db_table': 'threadedcomments_comment',
'verbose_name': 'Threaded comment',
'verbose_name_plural': 'Threaded comments',
},
bases=('django_comments.comment',),
)
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.db import models, migrations, connection
import django.db.models.deletion
is_index = connection.vendor != 'mysql'
if 'django.contrib.comments' in settings.INSTALLED_APPS:
BASE_APP = 'comments'
else:
BASE_APP = 'django_comments'
class Migration(migrations.Migration):
dependencies = [
(BASE_APP, '__first__'),
]
operations = [
migrations.CreateModel(
name='ThreadedComment',
fields=[
('comment_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='django_comments.Comment')),
('title', models.TextField(verbose_name='Title', blank=True)),
('tree_path', models.CharField(verbose_name='Tree path', max_length=500, editable=False, db_index=is_index)),
('last_child', models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, verbose_name='Last child', blank=True, to='threadedcomments.ThreadedComment', null=True)),
('parent', models.ForeignKey(related_name='children', default=None, blank=True, to='threadedcomments.ThreadedComment', null=True, verbose_name='Parent')),
],
options={
'ordering': ('tree_path',),
'db_table': 'threadedcomments_comment',
'verbose_name': 'Threaded comment',
'verbose_name_plural': 'Threaded comments',
},
bases=('{base_app}.comment'.format(base_app=BASE_APP),),
)
]
| Fix Django 1.7 migration support | Fix Django 1.7 migration support
| Python | bsd-3-clause | HonzaKral/django-threadedcomments,HonzaKral/django-threadedcomments | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations, connection
import django.db.models.deletion
is_index = connection.vendor != 'mysql'
class Migration(migrations.Migration):
dependencies = [
('django_comments', '__first__'),
]
operations = [
migrations.CreateModel(
name='ThreadedComment',
fields=[
('comment_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='django_comments.Comment')),
('title', models.TextField(verbose_name='Title', blank=True)),
('tree_path', models.CharField(verbose_name='Tree path', max_length=500, editable=False, db_index=is_index)),
('last_child', models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, verbose_name='Last child', blank=True, to='threadedcomments.ThreadedComment', null=True)),
('parent', models.ForeignKey(related_name='children', default=None, blank=True, to='threadedcomments.ThreadedComment', null=True, verbose_name='Parent')),
],
options={
'ordering': ('tree_path',),
'db_table': 'threadedcomments_comment',
'verbose_name': 'Threaded comment',
'verbose_name_plural': 'Threaded comments',
},
bases=('django_comments.comment',),
)
]
Fix Django 1.7 migration support | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.db import models, migrations, connection
import django.db.models.deletion
is_index = connection.vendor != 'mysql'
if 'django.contrib.comments' in settings.INSTALLED_APPS:
BASE_APP = 'comments'
else:
BASE_APP = 'django_comments'
class Migration(migrations.Migration):
dependencies = [
(BASE_APP, '__first__'),
]
operations = [
migrations.CreateModel(
name='ThreadedComment',
fields=[
('comment_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='django_comments.Comment')),
('title', models.TextField(verbose_name='Title', blank=True)),
('tree_path', models.CharField(verbose_name='Tree path', max_length=500, editable=False, db_index=is_index)),
('last_child', models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, verbose_name='Last child', blank=True, to='threadedcomments.ThreadedComment', null=True)),
('parent', models.ForeignKey(related_name='children', default=None, blank=True, to='threadedcomments.ThreadedComment', null=True, verbose_name='Parent')),
],
options={
'ordering': ('tree_path',),
'db_table': 'threadedcomments_comment',
'verbose_name': 'Threaded comment',
'verbose_name_plural': 'Threaded comments',
},
bases=('{base_app}.comment'.format(base_app=BASE_APP),),
)
]
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations, connection
import django.db.models.deletion
is_index = connection.vendor != 'mysql'
class Migration(migrations.Migration):
dependencies = [
('django_comments', '__first__'),
]
operations = [
migrations.CreateModel(
name='ThreadedComment',
fields=[
('comment_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='django_comments.Comment')),
('title', models.TextField(verbose_name='Title', blank=True)),
('tree_path', models.CharField(verbose_name='Tree path', max_length=500, editable=False, db_index=is_index)),
('last_child', models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, verbose_name='Last child', blank=True, to='threadedcomments.ThreadedComment', null=True)),
('parent', models.ForeignKey(related_name='children', default=None, blank=True, to='threadedcomments.ThreadedComment', null=True, verbose_name='Parent')),
],
options={
'ordering': ('tree_path',),
'db_table': 'threadedcomments_comment',
'verbose_name': 'Threaded comment',
'verbose_name_plural': 'Threaded comments',
},
bases=('django_comments.comment',),
)
]
<commit_msg>Fix Django 1.7 migration support<commit_after> | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.db import models, migrations, connection
import django.db.models.deletion
is_index = connection.vendor != 'mysql'
if 'django.contrib.comments' in settings.INSTALLED_APPS:
BASE_APP = 'comments'
else:
BASE_APP = 'django_comments'
class Migration(migrations.Migration):
dependencies = [
(BASE_APP, '__first__'),
]
operations = [
migrations.CreateModel(
name='ThreadedComment',
fields=[
('comment_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='django_comments.Comment')),
('title', models.TextField(verbose_name='Title', blank=True)),
('tree_path', models.CharField(verbose_name='Tree path', max_length=500, editable=False, db_index=is_index)),
('last_child', models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, verbose_name='Last child', blank=True, to='threadedcomments.ThreadedComment', null=True)),
('parent', models.ForeignKey(related_name='children', default=None, blank=True, to='threadedcomments.ThreadedComment', null=True, verbose_name='Parent')),
],
options={
'ordering': ('tree_path',),
'db_table': 'threadedcomments_comment',
'verbose_name': 'Threaded comment',
'verbose_name_plural': 'Threaded comments',
},
bases=('{base_app}.comment'.format(base_app=BASE_APP),),
)
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations, connection
import django.db.models.deletion
is_index = connection.vendor != 'mysql'
class Migration(migrations.Migration):
dependencies = [
('django_comments', '__first__'),
]
operations = [
migrations.CreateModel(
name='ThreadedComment',
fields=[
('comment_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='django_comments.Comment')),
('title', models.TextField(verbose_name='Title', blank=True)),
('tree_path', models.CharField(verbose_name='Tree path', max_length=500, editable=False, db_index=is_index)),
('last_child', models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, verbose_name='Last child', blank=True, to='threadedcomments.ThreadedComment', null=True)),
('parent', models.ForeignKey(related_name='children', default=None, blank=True, to='threadedcomments.ThreadedComment', null=True, verbose_name='Parent')),
],
options={
'ordering': ('tree_path',),
'db_table': 'threadedcomments_comment',
'verbose_name': 'Threaded comment',
'verbose_name_plural': 'Threaded comments',
},
bases=('django_comments.comment',),
)
]
Fix Django 1.7 migration support# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.db import models, migrations, connection
import django.db.models.deletion
is_index = connection.vendor != 'mysql'
if 'django.contrib.comments' in settings.INSTALLED_APPS:
BASE_APP = 'comments'
else:
BASE_APP = 'django_comments'
class Migration(migrations.Migration):
dependencies = [
(BASE_APP, '__first__'),
]
operations = [
migrations.CreateModel(
name='ThreadedComment',
fields=[
('comment_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='django_comments.Comment')),
('title', models.TextField(verbose_name='Title', blank=True)),
('tree_path', models.CharField(verbose_name='Tree path', max_length=500, editable=False, db_index=is_index)),
('last_child', models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, verbose_name='Last child', blank=True, to='threadedcomments.ThreadedComment', null=True)),
('parent', models.ForeignKey(related_name='children', default=None, blank=True, to='threadedcomments.ThreadedComment', null=True, verbose_name='Parent')),
],
options={
'ordering': ('tree_path',),
'db_table': 'threadedcomments_comment',
'verbose_name': 'Threaded comment',
'verbose_name_plural': 'Threaded comments',
},
bases=('{base_app}.comment'.format(base_app=BASE_APP),),
)
]
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations, connection
import django.db.models.deletion
is_index = connection.vendor != 'mysql'
class Migration(migrations.Migration):
dependencies = [
('django_comments', '__first__'),
]
operations = [
migrations.CreateModel(
name='ThreadedComment',
fields=[
('comment_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='django_comments.Comment')),
('title', models.TextField(verbose_name='Title', blank=True)),
('tree_path', models.CharField(verbose_name='Tree path', max_length=500, editable=False, db_index=is_index)),
('last_child', models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, verbose_name='Last child', blank=True, to='threadedcomments.ThreadedComment', null=True)),
('parent', models.ForeignKey(related_name='children', default=None, blank=True, to='threadedcomments.ThreadedComment', null=True, verbose_name='Parent')),
],
options={
'ordering': ('tree_path',),
'db_table': 'threadedcomments_comment',
'verbose_name': 'Threaded comment',
'verbose_name_plural': 'Threaded comments',
},
bases=('django_comments.comment',),
)
]
<commit_msg>Fix Django 1.7 migration support<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.db import models, migrations, connection
import django.db.models.deletion
is_index = connection.vendor != 'mysql'
if 'django.contrib.comments' in settings.INSTALLED_APPS:
BASE_APP = 'comments'
else:
BASE_APP = 'django_comments'
class Migration(migrations.Migration):
dependencies = [
(BASE_APP, '__first__'),
]
operations = [
migrations.CreateModel(
name='ThreadedComment',
fields=[
('comment_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='django_comments.Comment')),
('title', models.TextField(verbose_name='Title', blank=True)),
('tree_path', models.CharField(verbose_name='Tree path', max_length=500, editable=False, db_index=is_index)),
('last_child', models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, verbose_name='Last child', blank=True, to='threadedcomments.ThreadedComment', null=True)),
('parent', models.ForeignKey(related_name='children', default=None, blank=True, to='threadedcomments.ThreadedComment', null=True, verbose_name='Parent')),
],
options={
'ordering': ('tree_path',),
'db_table': 'threadedcomments_comment',
'verbose_name': 'Threaded comment',
'verbose_name_plural': 'Threaded comments',
},
bases=('{base_app}.comment'.format(base_app=BASE_APP),),
)
]
|
6f363525314ed4b4652f48278ab1bab221e40b40 | suasimageparser.py | suasimageparser.py | from SUASImageParser import ImageParser
from SUASImageParser.utils.color import bcolors
from options import parseOptions
from options import getOption
import cv2
# ------------------------ Creating option parser -------------------------
parseOptions()
# ------------------------ Ensuring image provided ------------------------
if getOption("image") == None:
print(bcolors.FAIL + "[Error]" + bcolors.ENDC + " Please provide an image to parse")
exit(0)
# ------------------------ Loading image & parsing ------------------------
my_parser = ImageParser(mode="ADLC", debug=True)
img = my_parser.parse(getOption("image"))
# ------------------------ Displaying loaded image ------------------------
cv2.imshow("Parsed", img)
cv2.waitKey( 0 )
cv2.destroyAllWindows()
| from SUASImageParser import ImageParser
from SUASImageParser.utils.color import bcolors
from options import parseOptions
from options import getOption
# ------------------------ Creating option parser -------------------------
parseOptions()
# ------------------------ Ensuring image provided ------------------------
if getOption("image") == None:
print(bcolors.FAIL + "[Error]" + bcolors.ENDC + " Please provide an image to parse")
exit(0)
# ------------------------ Loading image & parsing ------------------------
my_parser = ImageParser(mode="ADLC", debug=True)
img = my_parser.parse(getOption("image"))
# ------------------------ Displaying loaded image ------------------------
cv2.imshow("Parsed", img)
cv2.waitKey( 0 )
cv2.destroyAllWindows()
| Remove useless import of OpenCV | Remove useless import of OpenCV
| Python | mit | FlintHill/SUAS-Competition,FlintHill/SUAS-Competition,FlintHill/SUAS-Competition,FlintHill/SUAS-Competition,FlintHill/SUAS-Competition | from SUASImageParser import ImageParser
from SUASImageParser.utils.color import bcolors
from options import parseOptions
from options import getOption
import cv2
# ------------------------ Creating option parser -------------------------
parseOptions()
# ------------------------ Ensuring image provided ------------------------
if getOption("image") == None:
print(bcolors.FAIL + "[Error]" + bcolors.ENDC + " Please provide an image to parse")
exit(0)
# ------------------------ Loading image & parsing ------------------------
my_parser = ImageParser(mode="ADLC", debug=True)
img = my_parser.parse(getOption("image"))
# ------------------------ Displaying loaded image ------------------------
cv2.imshow("Parsed", img)
cv2.waitKey( 0 )
cv2.destroyAllWindows()
Remove useless import of OpenCV | from SUASImageParser import ImageParser
from SUASImageParser.utils.color import bcolors
from options import parseOptions
from options import getOption
# ------------------------ Creating option parser -------------------------
parseOptions()
# ------------------------ Ensuring image provided ------------------------
if getOption("image") == None:
print(bcolors.FAIL + "[Error]" + bcolors.ENDC + " Please provide an image to parse")
exit(0)
# ------------------------ Loading image & parsing ------------------------
my_parser = ImageParser(mode="ADLC", debug=True)
img = my_parser.parse(getOption("image"))
# ------------------------ Displaying loaded image ------------------------
cv2.imshow("Parsed", img)
cv2.waitKey( 0 )
cv2.destroyAllWindows()
| <commit_before>from SUASImageParser import ImageParser
from SUASImageParser.utils.color import bcolors
from options import parseOptions
from options import getOption
import cv2
# ------------------------ Creating option parser -------------------------
parseOptions()
# ------------------------ Ensuring image provided ------------------------
if getOption("image") == None:
print(bcolors.FAIL + "[Error]" + bcolors.ENDC + " Please provide an image to parse")
exit(0)
# ------------------------ Loading image & parsing ------------------------
my_parser = ImageParser(mode="ADLC", debug=True)
img = my_parser.parse(getOption("image"))
# ------------------------ Displaying loaded image ------------------------
cv2.imshow("Parsed", img)
cv2.waitKey( 0 )
cv2.destroyAllWindows()
<commit_msg>Remove useless import of OpenCV<commit_after> | from SUASImageParser import ImageParser
from SUASImageParser.utils.color import bcolors
from options import parseOptions
from options import getOption
# ------------------------ Creating option parser -------------------------
parseOptions()
# ------------------------ Ensuring image provided ------------------------
if getOption("image") == None:
print(bcolors.FAIL + "[Error]" + bcolors.ENDC + " Please provide an image to parse")
exit(0)
# ------------------------ Loading image & parsing ------------------------
my_parser = ImageParser(mode="ADLC", debug=True)
img = my_parser.parse(getOption("image"))
# ------------------------ Displaying loaded image ------------------------
cv2.imshow("Parsed", img)
cv2.waitKey( 0 )
cv2.destroyAllWindows()
| from SUASImageParser import ImageParser
from SUASImageParser.utils.color import bcolors
from options import parseOptions
from options import getOption
import cv2
# ------------------------ Creating option parser -------------------------
parseOptions()
# ------------------------ Ensuring image provided ------------------------
if getOption("image") == None:
print(bcolors.FAIL + "[Error]" + bcolors.ENDC + " Please provide an image to parse")
exit(0)
# ------------------------ Loading image & parsing ------------------------
my_parser = ImageParser(mode="ADLC", debug=True)
img = my_parser.parse(getOption("image"))
# ------------------------ Displaying loaded image ------------------------
cv2.imshow("Parsed", img)
cv2.waitKey( 0 )
cv2.destroyAllWindows()
Remove useless import of OpenCVfrom SUASImageParser import ImageParser
from SUASImageParser.utils.color import bcolors
from options import parseOptions
from options import getOption
# ------------------------ Creating option parser -------------------------
parseOptions()
# ------------------------ Ensuring image provided ------------------------
if getOption("image") == None:
print(bcolors.FAIL + "[Error]" + bcolors.ENDC + " Please provide an image to parse")
exit(0)
# ------------------------ Loading image & parsing ------------------------
my_parser = ImageParser(mode="ADLC", debug=True)
img = my_parser.parse(getOption("image"))
# ------------------------ Displaying loaded image ------------------------
cv2.imshow("Parsed", img)
cv2.waitKey( 0 )
cv2.destroyAllWindows()
| <commit_before>from SUASImageParser import ImageParser
from SUASImageParser.utils.color import bcolors
from options import parseOptions
from options import getOption
import cv2
# ------------------------ Creating option parser -------------------------
parseOptions()
# ------------------------ Ensuring image provided ------------------------
if getOption("image") == None:
print(bcolors.FAIL + "[Error]" + bcolors.ENDC + " Please provide an image to parse")
exit(0)
# ------------------------ Loading image & parsing ------------------------
my_parser = ImageParser(mode="ADLC", debug=True)
img = my_parser.parse(getOption("image"))
# ------------------------ Displaying loaded image ------------------------
cv2.imshow("Parsed", img)
cv2.waitKey( 0 )
cv2.destroyAllWindows()
<commit_msg>Remove useless import of OpenCV<commit_after>from SUASImageParser import ImageParser
from SUASImageParser.utils.color import bcolors
from options import parseOptions
from options import getOption
# ------------------------ Creating option parser -------------------------
parseOptions()
# ------------------------ Ensuring image provided ------------------------
if getOption("image") == None:
print(bcolors.FAIL + "[Error]" + bcolors.ENDC + " Please provide an image to parse")
exit(0)
# ------------------------ Loading image & parsing ------------------------
my_parser = ImageParser(mode="ADLC", debug=True)
img = my_parser.parse(getOption("image"))
# ------------------------ Displaying loaded image ------------------------
cv2.imshow("Parsed", img)
cv2.waitKey( 0 )
cv2.destroyAllWindows()
|
de3722bdd4e5261ffe2ffd6264134ed51c131075 | src/test_dll.py | src/test_dll.py | """Testing dll.py."""
import pytest
@pytest.fixture
def new_dll():
"""Return empty dll."""
from dll import DoublyLinkedList
return DoublyLinkedList()
def test_init(new_dll):
"""Test initialization of empty doubly linked list."""
assert new_dll.head is None and new_dll.tail is None
def test_push(new_dll):
"""Test push to empty dll."""
new_dll.push(21)
assert new_dll.head.val == 21 and new_dll.tail is None
| """Testing dll.py."""
import pytest
@pytest.fixture
def new_dll():
"""Return empty dll."""
from dll import DoublyLinkedList
return DoublyLinkedList()
def test_init(new_dll):
"""Test initialization of empty doubly linked list."""
assert new_dll.head is None and new_dll.tail is None
def test_push(new_dll):
"""Test push to empty dll."""
new_dll.push(21)
assert new_dll.head.val == 21 and new_dll.tail is None
def test_new_node(new_dll):
"""Test if new node is created."""
from dll import DoubleNode
node = DoubleNode(27)
assert node.previous is None and node.next is None and node.val == (27)
| Test for creating a new node | Test for creating a new node
| Python | mit | fordf/data-structures | """Testing dll.py."""
import pytest
@pytest.fixture
def new_dll():
"""Return empty dll."""
from dll import DoublyLinkedList
return DoublyLinkedList()
def test_init(new_dll):
"""Test initialization of empty doubly linked list."""
assert new_dll.head is None and new_dll.tail is None
def test_push(new_dll):
"""Test push to empty dll."""
new_dll.push(21)
assert new_dll.head.val == 21 and new_dll.tail is None
Test for creating a new node | """Testing dll.py."""
import pytest
@pytest.fixture
def new_dll():
"""Return empty dll."""
from dll import DoublyLinkedList
return DoublyLinkedList()
def test_init(new_dll):
"""Test initialization of empty doubly linked list."""
assert new_dll.head is None and new_dll.tail is None
def test_push(new_dll):
"""Test push to empty dll."""
new_dll.push(21)
assert new_dll.head.val == 21 and new_dll.tail is None
def test_new_node(new_dll):
"""Test if new node is created."""
from dll import DoubleNode
node = DoubleNode(27)
assert node.previous is None and node.next is None and node.val == (27)
| <commit_before>"""Testing dll.py."""
import pytest
@pytest.fixture
def new_dll():
"""Return empty dll."""
from dll import DoublyLinkedList
return DoublyLinkedList()
def test_init(new_dll):
"""Test initialization of empty doubly linked list."""
assert new_dll.head is None and new_dll.tail is None
def test_push(new_dll):
"""Test push to empty dll."""
new_dll.push(21)
assert new_dll.head.val == 21 and new_dll.tail is None
<commit_msg>Test for creating a new node<commit_after> | """Testing dll.py."""
import pytest
@pytest.fixture
def new_dll():
"""Return empty dll."""
from dll import DoublyLinkedList
return DoublyLinkedList()
def test_init(new_dll):
"""Test initialization of empty doubly linked list."""
assert new_dll.head is None and new_dll.tail is None
def test_push(new_dll):
"""Test push to empty dll."""
new_dll.push(21)
assert new_dll.head.val == 21 and new_dll.tail is None
def test_new_node(new_dll):
"""Test if new node is created."""
from dll import DoubleNode
node = DoubleNode(27)
assert node.previous is None and node.next is None and node.val == (27)
| """Testing dll.py."""
import pytest
@pytest.fixture
def new_dll():
"""Return empty dll."""
from dll import DoublyLinkedList
return DoublyLinkedList()
def test_init(new_dll):
"""Test initialization of empty doubly linked list."""
assert new_dll.head is None and new_dll.tail is None
def test_push(new_dll):
"""Test push to empty dll."""
new_dll.push(21)
assert new_dll.head.val == 21 and new_dll.tail is None
Test for creating a new node"""Testing dll.py."""
import pytest
@pytest.fixture
def new_dll():
"""Return empty dll."""
from dll import DoublyLinkedList
return DoublyLinkedList()
def test_init(new_dll):
"""Test initialization of empty doubly linked list."""
assert new_dll.head is None and new_dll.tail is None
def test_push(new_dll):
"""Test push to empty dll."""
new_dll.push(21)
assert new_dll.head.val == 21 and new_dll.tail is None
def test_new_node(new_dll):
"""Test if new node is created."""
from dll import DoubleNode
node = DoubleNode(27)
assert node.previous is None and node.next is None and node.val == (27)
| <commit_before>"""Testing dll.py."""
import pytest
@pytest.fixture
def new_dll():
"""Return empty dll."""
from dll import DoublyLinkedList
return DoublyLinkedList()
def test_init(new_dll):
"""Test initialization of empty doubly linked list."""
assert new_dll.head is None and new_dll.tail is None
def test_push(new_dll):
"""Test push to empty dll."""
new_dll.push(21)
assert new_dll.head.val == 21 and new_dll.tail is None
<commit_msg>Test for creating a new node<commit_after>"""Testing dll.py."""
import pytest
@pytest.fixture
def new_dll():
"""Return empty dll."""
from dll import DoublyLinkedList
return DoublyLinkedList()
def test_init(new_dll):
"""Test initialization of empty doubly linked list."""
assert new_dll.head is None and new_dll.tail is None
def test_push(new_dll):
"""Test push to empty dll."""
new_dll.push(21)
assert new_dll.head.val == 21 and new_dll.tail is None
def test_new_node(new_dll):
"""Test if new node is created."""
from dll import DoubleNode
node = DoubleNode(27)
assert node.previous is None and node.next is None and node.val == (27)
|
f1afd87c3a13fe47321c242d3586b1fa670125df | stationspinner/accounting/models.py | stationspinner/accounting/models.py | from django.db import models
from django.contrib.auth.models import AbstractUser
from django_pgjson.fields import JsonField
class Capsuler(AbstractUser):
settings = JsonField(blank=True, default={})
def __unicode__(self):
return self.username
def get_active_keys(self):
return APIKey.objects.filter(owner=self, expired=False)
class APIKey(models.Model):
KEY_TYPES = (
('Account', 'Account'),
('Character', 'Character'),
('Corporation', 'Corporation')
)
name = models.CharField(max_length=100)
keyID = models.CharField(max_length=20)
vCode = models.CharField(max_length=128)
accessMask = models.IntegerField(null=True, editable=False)
type = models.CharField(max_length=11, choices=KEY_TYPES, editable=False, null=True)
expired = models.BooleanField(default=False, editable=False)
expires = models.DateTimeField(editable=False, null=True)
owner = models.ForeignKey(Capsuler)
def __unicode__(self):
return self.name
class APIUpdate(models.Model):
service = models.CharField(max_length=100)
last_update = models.DateTimeField(auto_now=True)
apikey = models.ForeignKey(APIKey)
class Meta:
unique_together = ('service', 'apikey')
| from django.db import models
from django.contrib.auth.models import AbstractUser
from django_pgjson.fields import JsonField
class Capsuler(AbstractUser):
settings = JsonField(blank=True, default={})
def __unicode__(self):
return self.username
def get_active_keys(self):
return APIKey.objects.filter(owner=self, expired=False)
class APIKey(models.Model):
KEY_TYPES = (
('Account', 'Account'),
('Character', 'Character'),
('Corporation', 'Corporation')
)
name = models.CharField(max_length=100)
keyID = models.CharField(max_length=20)
vCode = models.CharField(max_length=128)
accessMask = models.IntegerField(null=True, editable=False)
type = models.CharField(max_length=11, choices=KEY_TYPES, editable=False, null=True)
expired = models.BooleanField(default=False, editable=False)
expires = models.DateTimeField(editable=False, null=True)
characterID = models.IntegerField(null=True)
corporationID = models.IntegerField(null=True)
owner = models.ForeignKey(Capsuler)
def __unicode__(self):
return self.name
class APIUpdate(models.Model):
service = models.CharField(max_length=100)
last_update = models.DateTimeField(auto_now=True)
apikey = models.ForeignKey(APIKey)
class Meta:
unique_together = ('service', 'apikey')
| Make it easier to use corporate keys | Make it easier to use corporate keys
| Python | agpl-3.0 | kriberg/stationspinner,kriberg/stationspinner | from django.db import models
from django.contrib.auth.models import AbstractUser
from django_pgjson.fields import JsonField
class Capsuler(AbstractUser):
settings = JsonField(blank=True, default={})
def __unicode__(self):
return self.username
def get_active_keys(self):
return APIKey.objects.filter(owner=self, expired=False)
class APIKey(models.Model):
KEY_TYPES = (
('Account', 'Account'),
('Character', 'Character'),
('Corporation', 'Corporation')
)
name = models.CharField(max_length=100)
keyID = models.CharField(max_length=20)
vCode = models.CharField(max_length=128)
accessMask = models.IntegerField(null=True, editable=False)
type = models.CharField(max_length=11, choices=KEY_TYPES, editable=False, null=True)
expired = models.BooleanField(default=False, editable=False)
expires = models.DateTimeField(editable=False, null=True)
owner = models.ForeignKey(Capsuler)
def __unicode__(self):
return self.name
class APIUpdate(models.Model):
service = models.CharField(max_length=100)
last_update = models.DateTimeField(auto_now=True)
apikey = models.ForeignKey(APIKey)
class Meta:
unique_together = ('service', 'apikey')
Make it easier to use corporate keys | from django.db import models
from django.contrib.auth.models import AbstractUser
from django_pgjson.fields import JsonField
class Capsuler(AbstractUser):
settings = JsonField(blank=True, default={})
def __unicode__(self):
return self.username
def get_active_keys(self):
return APIKey.objects.filter(owner=self, expired=False)
class APIKey(models.Model):
KEY_TYPES = (
('Account', 'Account'),
('Character', 'Character'),
('Corporation', 'Corporation')
)
name = models.CharField(max_length=100)
keyID = models.CharField(max_length=20)
vCode = models.CharField(max_length=128)
accessMask = models.IntegerField(null=True, editable=False)
type = models.CharField(max_length=11, choices=KEY_TYPES, editable=False, null=True)
expired = models.BooleanField(default=False, editable=False)
expires = models.DateTimeField(editable=False, null=True)
characterID = models.IntegerField(null=True)
corporationID = models.IntegerField(null=True)
owner = models.ForeignKey(Capsuler)
def __unicode__(self):
return self.name
class APIUpdate(models.Model):
service = models.CharField(max_length=100)
last_update = models.DateTimeField(auto_now=True)
apikey = models.ForeignKey(APIKey)
class Meta:
unique_together = ('service', 'apikey')
| <commit_before>from django.db import models
from django.contrib.auth.models import AbstractUser
from django_pgjson.fields import JsonField
class Capsuler(AbstractUser):
settings = JsonField(blank=True, default={})
def __unicode__(self):
return self.username
def get_active_keys(self):
return APIKey.objects.filter(owner=self, expired=False)
class APIKey(models.Model):
KEY_TYPES = (
('Account', 'Account'),
('Character', 'Character'),
('Corporation', 'Corporation')
)
name = models.CharField(max_length=100)
keyID = models.CharField(max_length=20)
vCode = models.CharField(max_length=128)
accessMask = models.IntegerField(null=True, editable=False)
type = models.CharField(max_length=11, choices=KEY_TYPES, editable=False, null=True)
expired = models.BooleanField(default=False, editable=False)
expires = models.DateTimeField(editable=False, null=True)
owner = models.ForeignKey(Capsuler)
def __unicode__(self):
return self.name
class APIUpdate(models.Model):
service = models.CharField(max_length=100)
last_update = models.DateTimeField(auto_now=True)
apikey = models.ForeignKey(APIKey)
class Meta:
unique_together = ('service', 'apikey')
<commit_msg>Make it easier to use corporate keys<commit_after> | from django.db import models
from django.contrib.auth.models import AbstractUser
from django_pgjson.fields import JsonField
class Capsuler(AbstractUser):
settings = JsonField(blank=True, default={})
def __unicode__(self):
return self.username
def get_active_keys(self):
return APIKey.objects.filter(owner=self, expired=False)
class APIKey(models.Model):
KEY_TYPES = (
('Account', 'Account'),
('Character', 'Character'),
('Corporation', 'Corporation')
)
name = models.CharField(max_length=100)
keyID = models.CharField(max_length=20)
vCode = models.CharField(max_length=128)
accessMask = models.IntegerField(null=True, editable=False)
type = models.CharField(max_length=11, choices=KEY_TYPES, editable=False, null=True)
expired = models.BooleanField(default=False, editable=False)
expires = models.DateTimeField(editable=False, null=True)
characterID = models.IntegerField(null=True)
corporationID = models.IntegerField(null=True)
owner = models.ForeignKey(Capsuler)
def __unicode__(self):
return self.name
class APIUpdate(models.Model):
service = models.CharField(max_length=100)
last_update = models.DateTimeField(auto_now=True)
apikey = models.ForeignKey(APIKey)
class Meta:
unique_together = ('service', 'apikey')
| from django.db import models
from django.contrib.auth.models import AbstractUser
from django_pgjson.fields import JsonField
class Capsuler(AbstractUser):
settings = JsonField(blank=True, default={})
def __unicode__(self):
return self.username
def get_active_keys(self):
return APIKey.objects.filter(owner=self, expired=False)
class APIKey(models.Model):
KEY_TYPES = (
('Account', 'Account'),
('Character', 'Character'),
('Corporation', 'Corporation')
)
name = models.CharField(max_length=100)
keyID = models.CharField(max_length=20)
vCode = models.CharField(max_length=128)
accessMask = models.IntegerField(null=True, editable=False)
type = models.CharField(max_length=11, choices=KEY_TYPES, editable=False, null=True)
expired = models.BooleanField(default=False, editable=False)
expires = models.DateTimeField(editable=False, null=True)
owner = models.ForeignKey(Capsuler)
def __unicode__(self):
return self.name
class APIUpdate(models.Model):
service = models.CharField(max_length=100)
last_update = models.DateTimeField(auto_now=True)
apikey = models.ForeignKey(APIKey)
class Meta:
unique_together = ('service', 'apikey')
Make it easier to use corporate keysfrom django.db import models
from django.contrib.auth.models import AbstractUser
from django_pgjson.fields import JsonField
class Capsuler(AbstractUser):
settings = JsonField(blank=True, default={})
def __unicode__(self):
return self.username
def get_active_keys(self):
return APIKey.objects.filter(owner=self, expired=False)
class APIKey(models.Model):
KEY_TYPES = (
('Account', 'Account'),
('Character', 'Character'),
('Corporation', 'Corporation')
)
name = models.CharField(max_length=100)
keyID = models.CharField(max_length=20)
vCode = models.CharField(max_length=128)
accessMask = models.IntegerField(null=True, editable=False)
type = models.CharField(max_length=11, choices=KEY_TYPES, editable=False, null=True)
expired = models.BooleanField(default=False, editable=False)
expires = models.DateTimeField(editable=False, null=True)
characterID = models.IntegerField(null=True)
corporationID = models.IntegerField(null=True)
owner = models.ForeignKey(Capsuler)
def __unicode__(self):
return self.name
class APIUpdate(models.Model):
service = models.CharField(max_length=100)
last_update = models.DateTimeField(auto_now=True)
apikey = models.ForeignKey(APIKey)
class Meta:
unique_together = ('service', 'apikey')
| <commit_before>from django.db import models
from django.contrib.auth.models import AbstractUser
from django_pgjson.fields import JsonField
class Capsuler(AbstractUser):
settings = JsonField(blank=True, default={})
def __unicode__(self):
return self.username
def get_active_keys(self):
return APIKey.objects.filter(owner=self, expired=False)
class APIKey(models.Model):
KEY_TYPES = (
('Account', 'Account'),
('Character', 'Character'),
('Corporation', 'Corporation')
)
name = models.CharField(max_length=100)
keyID = models.CharField(max_length=20)
vCode = models.CharField(max_length=128)
accessMask = models.IntegerField(null=True, editable=False)
type = models.CharField(max_length=11, choices=KEY_TYPES, editable=False, null=True)
expired = models.BooleanField(default=False, editable=False)
expires = models.DateTimeField(editable=False, null=True)
owner = models.ForeignKey(Capsuler)
def __unicode__(self):
return self.name
class APIUpdate(models.Model):
service = models.CharField(max_length=100)
last_update = models.DateTimeField(auto_now=True)
apikey = models.ForeignKey(APIKey)
class Meta:
unique_together = ('service', 'apikey')
<commit_msg>Make it easier to use corporate keys<commit_after>from django.db import models
from django.contrib.auth.models import AbstractUser
from django_pgjson.fields import JsonField
class Capsuler(AbstractUser):
settings = JsonField(blank=True, default={})
def __unicode__(self):
return self.username
def get_active_keys(self):
return APIKey.objects.filter(owner=self, expired=False)
class APIKey(models.Model):
KEY_TYPES = (
('Account', 'Account'),
('Character', 'Character'),
('Corporation', 'Corporation')
)
name = models.CharField(max_length=100)
keyID = models.CharField(max_length=20)
vCode = models.CharField(max_length=128)
accessMask = models.IntegerField(null=True, editable=False)
type = models.CharField(max_length=11, choices=KEY_TYPES, editable=False, null=True)
expired = models.BooleanField(default=False, editable=False)
expires = models.DateTimeField(editable=False, null=True)
characterID = models.IntegerField(null=True)
corporationID = models.IntegerField(null=True)
owner = models.ForeignKey(Capsuler)
def __unicode__(self):
return self.name
class APIUpdate(models.Model):
service = models.CharField(max_length=100)
last_update = models.DateTimeField(auto_now=True)
apikey = models.ForeignKey(APIKey)
class Meta:
unique_together = ('service', 'apikey')
|
5b7cd2f62bb86658b6fce7503a4fab9238b8faa5 | channelguide/init.py | channelguide/init.py | """Contains code needed to initialize channelguide. This should be run at
startup, before any real work starts.
"""
import locale
import logging
import logging.handlers
import random
import os
import sys
import traceback
from django.conf import settings
from django.core import signals
from django.dispatch import dispatcher
def init_logging():
logger = logging.getLogger()
logger.setLevel(logging.INFO)
log_path = os.path.join(settings.SITE_DIR, 'log', 'cg.log')
handler = logging.handlers.RotatingFileHandler(log_path, maxBytes=2**20)
handler.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s'))
logger.addHandler(handler)
def init_external_libraries():
sys.path.insert(0, settings.EXTERNAL_LIBRARY_DIR)
def initialize():
init_logging()
init_external_libraries()
random.seed()
locale.setlocale(locale.LC_ALL, '')
| """Contains code needed to initialize channelguide. This should be run at
startup, before any real work starts.
"""
import locale
import logging
import logging.handlers
import random
import os
import sys
import traceback
from django.conf import settings
from django.core import signals
from django.dispatch import dispatcher
import django.db
def init_logging():
logger = logging.getLogger()
logger.setLevel(logging.INFO)
log_path = os.path.join(settings.SITE_DIR, 'log', 'cg.log')
handler = logging.handlers.RotatingFileHandler(log_path, maxBytes=2**20)
handler.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s'))
logger.addHandler(handler)
def init_external_libraries():
sys.path.insert(0, settings.EXTERNAL_LIBRARY_DIR)
def initialize():
init_logging()
init_external_libraries()
random.seed()
locale.setlocale(locale.LC_ALL, '')
# hack for the fact that django tries to rollback its non-existant
# connection when requests finish.
dispatcher.disconnect(django.db._rollback_on_exception,
signal=signals.got_request_exception)
| Fix for django trying to rollback connections on request exceptions. | Fix for django trying to rollback connections on request exceptions.
git-svn-id: 98eea730e22c7fb5f8b38c49248ce5c7e9bb5936@525 be7adf91-e322-0410-8f47-e6edb61c52aa
| Python | agpl-3.0 | kmshi/miroguide,kmshi/miroguide,kmshi/miroguide | """Contains code needed to initialize channelguide. This should be run at
startup, before any real work starts.
"""
import locale
import logging
import logging.handlers
import random
import os
import sys
import traceback
from django.conf import settings
from django.core import signals
from django.dispatch import dispatcher
def init_logging():
logger = logging.getLogger()
logger.setLevel(logging.INFO)
log_path = os.path.join(settings.SITE_DIR, 'log', 'cg.log')
handler = logging.handlers.RotatingFileHandler(log_path, maxBytes=2**20)
handler.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s'))
logger.addHandler(handler)
def init_external_libraries():
sys.path.insert(0, settings.EXTERNAL_LIBRARY_DIR)
def initialize():
init_logging()
init_external_libraries()
random.seed()
locale.setlocale(locale.LC_ALL, '')
Fix for django trying to rollback connections on request exceptions.
git-svn-id: 98eea730e22c7fb5f8b38c49248ce5c7e9bb5936@525 be7adf91-e322-0410-8f47-e6edb61c52aa | """Contains code needed to initialize channelguide. This should be run at
startup, before any real work starts.
"""
import locale
import logging
import logging.handlers
import random
import os
import sys
import traceback
from django.conf import settings
from django.core import signals
from django.dispatch import dispatcher
import django.db
def init_logging():
logger = logging.getLogger()
logger.setLevel(logging.INFO)
log_path = os.path.join(settings.SITE_DIR, 'log', 'cg.log')
handler = logging.handlers.RotatingFileHandler(log_path, maxBytes=2**20)
handler.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s'))
logger.addHandler(handler)
def init_external_libraries():
sys.path.insert(0, settings.EXTERNAL_LIBRARY_DIR)
def initialize():
init_logging()
init_external_libraries()
random.seed()
locale.setlocale(locale.LC_ALL, '')
# hack for the fact that django tries to rollback its non-existant
# connection when requests finish.
dispatcher.disconnect(django.db._rollback_on_exception,
signal=signals.got_request_exception)
| <commit_before>"""Contains code needed to initialize channelguide. This should be run at
startup, before any real work starts.
"""
import locale
import logging
import logging.handlers
import random
import os
import sys
import traceback
from django.conf import settings
from django.core import signals
from django.dispatch import dispatcher
def init_logging():
logger = logging.getLogger()
logger.setLevel(logging.INFO)
log_path = os.path.join(settings.SITE_DIR, 'log', 'cg.log')
handler = logging.handlers.RotatingFileHandler(log_path, maxBytes=2**20)
handler.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s'))
logger.addHandler(handler)
def init_external_libraries():
sys.path.insert(0, settings.EXTERNAL_LIBRARY_DIR)
def initialize():
init_logging()
init_external_libraries()
random.seed()
locale.setlocale(locale.LC_ALL, '')
<commit_msg>Fix for django trying to rollback connections on request exceptions.
git-svn-id: 98eea730e22c7fb5f8b38c49248ce5c7e9bb5936@525 be7adf91-e322-0410-8f47-e6edb61c52aa<commit_after> | """Contains code needed to initialize channelguide. This should be run at
startup, before any real work starts.
"""
import locale
import logging
import logging.handlers
import random
import os
import sys
import traceback
from django.conf import settings
from django.core import signals
from django.dispatch import dispatcher
import django.db
def init_logging():
logger = logging.getLogger()
logger.setLevel(logging.INFO)
log_path = os.path.join(settings.SITE_DIR, 'log', 'cg.log')
handler = logging.handlers.RotatingFileHandler(log_path, maxBytes=2**20)
handler.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s'))
logger.addHandler(handler)
def init_external_libraries():
sys.path.insert(0, settings.EXTERNAL_LIBRARY_DIR)
def initialize():
init_logging()
init_external_libraries()
random.seed()
locale.setlocale(locale.LC_ALL, '')
# hack for the fact that django tries to rollback its non-existant
# connection when requests finish.
dispatcher.disconnect(django.db._rollback_on_exception,
signal=signals.got_request_exception)
| """Contains code needed to initialize channelguide. This should be run at
startup, before any real work starts.
"""
import locale
import logging
import logging.handlers
import random
import os
import sys
import traceback
from django.conf import settings
from django.core import signals
from django.dispatch import dispatcher
def init_logging():
logger = logging.getLogger()
logger.setLevel(logging.INFO)
log_path = os.path.join(settings.SITE_DIR, 'log', 'cg.log')
handler = logging.handlers.RotatingFileHandler(log_path, maxBytes=2**20)
handler.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s'))
logger.addHandler(handler)
def init_external_libraries():
sys.path.insert(0, settings.EXTERNAL_LIBRARY_DIR)
def initialize():
init_logging()
init_external_libraries()
random.seed()
locale.setlocale(locale.LC_ALL, '')
Fix for django trying to rollback connections on request exceptions.
git-svn-id: 98eea730e22c7fb5f8b38c49248ce5c7e9bb5936@525 be7adf91-e322-0410-8f47-e6edb61c52aa"""Contains code needed to initialize channelguide. This should be run at
startup, before any real work starts.
"""
import locale
import logging
import logging.handlers
import random
import os
import sys
import traceback
from django.conf import settings
from django.core import signals
from django.dispatch import dispatcher
import django.db
def init_logging():
logger = logging.getLogger()
logger.setLevel(logging.INFO)
log_path = os.path.join(settings.SITE_DIR, 'log', 'cg.log')
handler = logging.handlers.RotatingFileHandler(log_path, maxBytes=2**20)
handler.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s'))
logger.addHandler(handler)
def init_external_libraries():
sys.path.insert(0, settings.EXTERNAL_LIBRARY_DIR)
def initialize():
init_logging()
init_external_libraries()
random.seed()
locale.setlocale(locale.LC_ALL, '')
# hack for the fact that django tries to rollback its non-existant
# connection when requests finish.
dispatcher.disconnect(django.db._rollback_on_exception,
signal=signals.got_request_exception)
| <commit_before>"""Contains code needed to initialize channelguide. This should be run at
startup, before any real work starts.
"""
import locale
import logging
import logging.handlers
import random
import os
import sys
import traceback
from django.conf import settings
from django.core import signals
from django.dispatch import dispatcher
def init_logging():
logger = logging.getLogger()
logger.setLevel(logging.INFO)
log_path = os.path.join(settings.SITE_DIR, 'log', 'cg.log')
handler = logging.handlers.RotatingFileHandler(log_path, maxBytes=2**20)
handler.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s'))
logger.addHandler(handler)
def init_external_libraries():
sys.path.insert(0, settings.EXTERNAL_LIBRARY_DIR)
def initialize():
init_logging()
init_external_libraries()
random.seed()
locale.setlocale(locale.LC_ALL, '')
<commit_msg>Fix for django trying to rollback connections on request exceptions.
git-svn-id: 98eea730e22c7fb5f8b38c49248ce5c7e9bb5936@525 be7adf91-e322-0410-8f47-e6edb61c52aa<commit_after>"""Contains code needed to initialize channelguide. This should be run at
startup, before any real work starts.
"""
import locale
import logging
import logging.handlers
import random
import os
import sys
import traceback
from django.conf import settings
from django.core import signals
from django.dispatch import dispatcher
import django.db
def init_logging():
logger = logging.getLogger()
logger.setLevel(logging.INFO)
log_path = os.path.join(settings.SITE_DIR, 'log', 'cg.log')
handler = logging.handlers.RotatingFileHandler(log_path, maxBytes=2**20)
handler.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s'))
logger.addHandler(handler)
def init_external_libraries():
sys.path.insert(0, settings.EXTERNAL_LIBRARY_DIR)
def initialize():
init_logging()
init_external_libraries()
random.seed()
locale.setlocale(locale.LC_ALL, '')
# hack for the fact that django tries to rollback its non-existant
# connection when requests finish.
dispatcher.disconnect(django.db._rollback_on_exception,
signal=signals.got_request_exception)
|
43c2d1ac03234043aa5536f900fce72d593f3bba | lib/speedway.py | lib/speedway.py | #!/usr/bin/python2.4
#
# Copyright 2011 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Speedway iptables generator.
This is a subclass of Iptables library. The primary difference is
that this library produced 'iptable-restore' compatible output."""
__author__ = 'watson@google.com (Tony Watson)'
import iptables
class Term(iptables.Term):
"""Generate Iptables policy terms."""
_PLATFORM = 'speedway'
_PREJUMP_FORMAT = None
_POSTJUMP_FORMAT = '-A %s -j %s'
class Speedway(iptables.Iptables):
"""Generates filters and terms from provided policy object."""
_PLATFORM = 'speedway'
_DEFAULT_PROTOCOL = 'all'
_SUFFIX = '.ipt'
_RENDER_PREFIX = '*filter'
_RENDER_SUFFIX = 'COMMIT'
_DEFAULTACTION_FORMAT = ':%s %s'
_TERM = Term
| #!/usr/bin/python2.4
#
# Copyright 2011 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Speedway iptables generator.
This is a subclass of Iptables library. The primary difference is
that this library produced 'iptable-restore' compatible output."""
__author__ = 'watson@google.com (Tony Watson)'
import iptables
class Term(iptables.Term):
"""Generate Iptables policy terms."""
_PLATFORM = 'speedway'
_PREJUMP_FORMAT = None
_POSTJUMP_FORMAT = '-A %s -j %s'
class Speedway(iptables.Iptables):
"""Generates filters and terms from provided policy object."""
_PLATFORM = 'speedway'
_DEFAULT_PROTOCOL = 'all'
_SUFFIX = '.ipt'
_RENDER_PREFIX = '*filter'
_RENDER_SUFFIX = 'COMMIT\n'
_DEFAULTACTION_FORMAT = ':%s %s'
_TERM = Term
| Append newline after 'COMMIT' in iptables policies. Without newline, the iptables-restore command complains. | Append newline after 'COMMIT' in iptables policies.
Without newline, the iptables-restore command complains.
| Python | apache-2.0 | jayceechou/capirca,pettai/capirca,azet/capirca,google/capirca,jayceechou/capirca,jethrogb/capirca,s3u/capirca,sneakywombat/capirca,s3u/capirca,ryantierney513/capirca,rarcotvmw/capirca,google/capirca,rarcotvmw/capirca,sneakywombat/capirca,ryantierney513/capirca,google/capirca,pettai/capirca,dagmartin/capirca,ryantierney513/capirca,dagmartin/capirca,azet/capirca,sneakywombat/capirca,rarcotvmw/capirca,jethrogb/capirca,google/capirca | #!/usr/bin/python2.4
#
# Copyright 2011 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Speedway iptables generator.
This is a subclass of Iptables library. The primary difference is
that this library produced 'iptable-restore' compatible output."""
__author__ = 'watson@google.com (Tony Watson)'
import iptables
class Term(iptables.Term):
"""Generate Iptables policy terms."""
_PLATFORM = 'speedway'
_PREJUMP_FORMAT = None
_POSTJUMP_FORMAT = '-A %s -j %s'
class Speedway(iptables.Iptables):
"""Generates filters and terms from provided policy object."""
_PLATFORM = 'speedway'
_DEFAULT_PROTOCOL = 'all'
_SUFFIX = '.ipt'
_RENDER_PREFIX = '*filter'
_RENDER_SUFFIX = 'COMMIT'
_DEFAULTACTION_FORMAT = ':%s %s'
_TERM = Term
Append newline after 'COMMIT' in iptables policies.
Without newline, the iptables-restore command complains. | #!/usr/bin/python2.4
#
# Copyright 2011 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Speedway iptables generator.
This is a subclass of Iptables library. The primary difference is
that this library produced 'iptable-restore' compatible output."""
__author__ = 'watson@google.com (Tony Watson)'
import iptables
class Term(iptables.Term):
"""Generate Iptables policy terms."""
_PLATFORM = 'speedway'
_PREJUMP_FORMAT = None
_POSTJUMP_FORMAT = '-A %s -j %s'
class Speedway(iptables.Iptables):
"""Generates filters and terms from provided policy object."""
_PLATFORM = 'speedway'
_DEFAULT_PROTOCOL = 'all'
_SUFFIX = '.ipt'
_RENDER_PREFIX = '*filter'
_RENDER_SUFFIX = 'COMMIT\n'
_DEFAULTACTION_FORMAT = ':%s %s'
_TERM = Term
| <commit_before>#!/usr/bin/python2.4
#
# Copyright 2011 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Speedway iptables generator.
This is a subclass of Iptables library. The primary difference is
that this library produced 'iptable-restore' compatible output."""
__author__ = 'watson@google.com (Tony Watson)'
import iptables
class Term(iptables.Term):
"""Generate Iptables policy terms."""
_PLATFORM = 'speedway'
_PREJUMP_FORMAT = None
_POSTJUMP_FORMAT = '-A %s -j %s'
class Speedway(iptables.Iptables):
"""Generates filters and terms from provided policy object."""
_PLATFORM = 'speedway'
_DEFAULT_PROTOCOL = 'all'
_SUFFIX = '.ipt'
_RENDER_PREFIX = '*filter'
_RENDER_SUFFIX = 'COMMIT'
_DEFAULTACTION_FORMAT = ':%s %s'
_TERM = Term
<commit_msg>Append newline after 'COMMIT' in iptables policies.
Without newline, the iptables-restore command complains.<commit_after> | #!/usr/bin/python2.4
#
# Copyright 2011 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Speedway iptables generator.
This is a subclass of Iptables library. The primary difference is
that this library produced 'iptable-restore' compatible output."""
__author__ = 'watson@google.com (Tony Watson)'
import iptables
class Term(iptables.Term):
"""Generate Iptables policy terms."""
_PLATFORM = 'speedway'
_PREJUMP_FORMAT = None
_POSTJUMP_FORMAT = '-A %s -j %s'
class Speedway(iptables.Iptables):
"""Generates filters and terms from provided policy object."""
_PLATFORM = 'speedway'
_DEFAULT_PROTOCOL = 'all'
_SUFFIX = '.ipt'
_RENDER_PREFIX = '*filter'
_RENDER_SUFFIX = 'COMMIT\n'
_DEFAULTACTION_FORMAT = ':%s %s'
_TERM = Term
| #!/usr/bin/python2.4
#
# Copyright 2011 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Speedway iptables generator.
This is a subclass of Iptables library. The primary difference is
that this library produced 'iptable-restore' compatible output."""
__author__ = 'watson@google.com (Tony Watson)'
import iptables
class Term(iptables.Term):
"""Generate Iptables policy terms."""
_PLATFORM = 'speedway'
_PREJUMP_FORMAT = None
_POSTJUMP_FORMAT = '-A %s -j %s'
class Speedway(iptables.Iptables):
"""Generates filters and terms from provided policy object."""
_PLATFORM = 'speedway'
_DEFAULT_PROTOCOL = 'all'
_SUFFIX = '.ipt'
_RENDER_PREFIX = '*filter'
_RENDER_SUFFIX = 'COMMIT'
_DEFAULTACTION_FORMAT = ':%s %s'
_TERM = Term
Append newline after 'COMMIT' in iptables policies.
Without newline, the iptables-restore command complains.#!/usr/bin/python2.4
#
# Copyright 2011 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Speedway iptables generator.
This is a subclass of Iptables library. The primary difference is
that this library produced 'iptable-restore' compatible output."""
__author__ = 'watson@google.com (Tony Watson)'
import iptables
class Term(iptables.Term):
"""Generate Iptables policy terms."""
_PLATFORM = 'speedway'
_PREJUMP_FORMAT = None
_POSTJUMP_FORMAT = '-A %s -j %s'
class Speedway(iptables.Iptables):
"""Generates filters and terms from provided policy object."""
_PLATFORM = 'speedway'
_DEFAULT_PROTOCOL = 'all'
_SUFFIX = '.ipt'
_RENDER_PREFIX = '*filter'
_RENDER_SUFFIX = 'COMMIT\n'
_DEFAULTACTION_FORMAT = ':%s %s'
_TERM = Term
| <commit_before>#!/usr/bin/python2.4
#
# Copyright 2011 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Speedway iptables generator.
This is a subclass of Iptables library. The primary difference is
that this library produced 'iptable-restore' compatible output."""
__author__ = 'watson@google.com (Tony Watson)'
import iptables
class Term(iptables.Term):
"""Generate Iptables policy terms."""
_PLATFORM = 'speedway'
_PREJUMP_FORMAT = None
_POSTJUMP_FORMAT = '-A %s -j %s'
class Speedway(iptables.Iptables):
"""Generates filters and terms from provided policy object."""
_PLATFORM = 'speedway'
_DEFAULT_PROTOCOL = 'all'
_SUFFIX = '.ipt'
_RENDER_PREFIX = '*filter'
_RENDER_SUFFIX = 'COMMIT'
_DEFAULTACTION_FORMAT = ':%s %s'
_TERM = Term
<commit_msg>Append newline after 'COMMIT' in iptables policies.
Without newline, the iptables-restore command complains.<commit_after>#!/usr/bin/python2.4
#
# Copyright 2011 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Speedway iptables generator.
This is a subclass of Iptables library. The primary difference is
that this library produced 'iptable-restore' compatible output."""
__author__ = 'watson@google.com (Tony Watson)'
import iptables
class Term(iptables.Term):
"""Generate Iptables policy terms."""
_PLATFORM = 'speedway'
_PREJUMP_FORMAT = None
_POSTJUMP_FORMAT = '-A %s -j %s'
class Speedway(iptables.Iptables):
"""Generates filters and terms from provided policy object."""
_PLATFORM = 'speedway'
_DEFAULT_PROTOCOL = 'all'
_SUFFIX = '.ipt'
_RENDER_PREFIX = '*filter'
_RENDER_SUFFIX = 'COMMIT\n'
_DEFAULTACTION_FORMAT = ':%s %s'
_TERM = Term
|
721f6f7916d698f22c9d96ce52cce3773fa514cc | uwsgiplugin.py | uwsgiplugin.py | import os
import os.path
import inspect
base_path = os.path.dirname(inspect.getframeinfo(inspect.currentframe())[0])
NAME = 'rust'
GCC_LIST = ['rust', '%s/plugin.a' % base_path]
CFLAGS = []
if os.uname()[0] == 'Darwin':
CFLAGS.append('-mmacosx-version-min=10.7')
if os.system("rustc -o %s/plugin.a --crate-type staticlib %s/plugin.rs" % (base_path, base_path)) != 0:
os._exit(1)
| import os
import os.path
import inspect
base_path = os.path.dirname(inspect.getframeinfo(inspect.currentframe())[0])
NAME = 'rust'
GCC_LIST = ['rust', '%s/target/release/libuwsgi_rust.a' % base_path]
CFLAGS = []
if os.uname()[0] == 'Darwin':
CFLAGS.append('-mmacosx-version-min=10.7')
if os.system("cargo build --release") != 0:
os._exit(1)
# To also build the example app:
#os.system("cargo build --release --manifest-path examples/Cargo.toml")
| Update script to build rust code via cargo | Update script to build rust code via cargo
Signed-off-by: Luca Bruno <d11e81b0438fe9a6fbb85b72e5bb4c36a65f49c7@debian.org>
| Python | mit | unbit/uwsgi-rust,unbit/uwsgi-rust,unbit/uwsgi-rust | import os
import os.path
import inspect
base_path = os.path.dirname(inspect.getframeinfo(inspect.currentframe())[0])
NAME = 'rust'
GCC_LIST = ['rust', '%s/plugin.a' % base_path]
CFLAGS = []
if os.uname()[0] == 'Darwin':
CFLAGS.append('-mmacosx-version-min=10.7')
if os.system("rustc -o %s/plugin.a --crate-type staticlib %s/plugin.rs" % (base_path, base_path)) != 0:
os._exit(1)
Update script to build rust code via cargo
Signed-off-by: Luca Bruno <d11e81b0438fe9a6fbb85b72e5bb4c36a65f49c7@debian.org> | import os
import os.path
import inspect
base_path = os.path.dirname(inspect.getframeinfo(inspect.currentframe())[0])
NAME = 'rust'
GCC_LIST = ['rust', '%s/target/release/libuwsgi_rust.a' % base_path]
CFLAGS = []
if os.uname()[0] == 'Darwin':
CFLAGS.append('-mmacosx-version-min=10.7')
if os.system("cargo build --release") != 0:
os._exit(1)
# To also build the example app:
#os.system("cargo build --release --manifest-path examples/Cargo.toml")
| <commit_before>import os
import os.path
import inspect
base_path = os.path.dirname(inspect.getframeinfo(inspect.currentframe())[0])
NAME = 'rust'
GCC_LIST = ['rust', '%s/plugin.a' % base_path]
CFLAGS = []
if os.uname()[0] == 'Darwin':
CFLAGS.append('-mmacosx-version-min=10.7')
if os.system("rustc -o %s/plugin.a --crate-type staticlib %s/plugin.rs" % (base_path, base_path)) != 0:
os._exit(1)
<commit_msg>Update script to build rust code via cargo
Signed-off-by: Luca Bruno <d11e81b0438fe9a6fbb85b72e5bb4c36a65f49c7@debian.org><commit_after> | import os
import os.path
import inspect
base_path = os.path.dirname(inspect.getframeinfo(inspect.currentframe())[0])
NAME = 'rust'
GCC_LIST = ['rust', '%s/target/release/libuwsgi_rust.a' % base_path]
CFLAGS = []
if os.uname()[0] == 'Darwin':
CFLAGS.append('-mmacosx-version-min=10.7')
if os.system("cargo build --release") != 0:
os._exit(1)
# To also build the example app:
#os.system("cargo build --release --manifest-path examples/Cargo.toml")
| import os
import os.path
import inspect
base_path = os.path.dirname(inspect.getframeinfo(inspect.currentframe())[0])
NAME = 'rust'
GCC_LIST = ['rust', '%s/plugin.a' % base_path]
CFLAGS = []
if os.uname()[0] == 'Darwin':
CFLAGS.append('-mmacosx-version-min=10.7')
if os.system("rustc -o %s/plugin.a --crate-type staticlib %s/plugin.rs" % (base_path, base_path)) != 0:
os._exit(1)
Update script to build rust code via cargo
Signed-off-by: Luca Bruno <d11e81b0438fe9a6fbb85b72e5bb4c36a65f49c7@debian.org>import os
import os.path
import inspect
base_path = os.path.dirname(inspect.getframeinfo(inspect.currentframe())[0])
NAME = 'rust'
GCC_LIST = ['rust', '%s/target/release/libuwsgi_rust.a' % base_path]
CFLAGS = []
if os.uname()[0] == 'Darwin':
CFLAGS.append('-mmacosx-version-min=10.7')
if os.system("cargo build --release") != 0:
os._exit(1)
# To also build the example app:
#os.system("cargo build --release --manifest-path examples/Cargo.toml")
| <commit_before>import os
import os.path
import inspect
base_path = os.path.dirname(inspect.getframeinfo(inspect.currentframe())[0])
NAME = 'rust'
GCC_LIST = ['rust', '%s/plugin.a' % base_path]
CFLAGS = []
if os.uname()[0] == 'Darwin':
CFLAGS.append('-mmacosx-version-min=10.7')
if os.system("rustc -o %s/plugin.a --crate-type staticlib %s/plugin.rs" % (base_path, base_path)) != 0:
os._exit(1)
<commit_msg>Update script to build rust code via cargo
Signed-off-by: Luca Bruno <d11e81b0438fe9a6fbb85b72e5bb4c36a65f49c7@debian.org><commit_after>import os
import os.path
import inspect
base_path = os.path.dirname(inspect.getframeinfo(inspect.currentframe())[0])
NAME = 'rust'
GCC_LIST = ['rust', '%s/target/release/libuwsgi_rust.a' % base_path]
CFLAGS = []
if os.uname()[0] == 'Darwin':
CFLAGS.append('-mmacosx-version-min=10.7')
if os.system("cargo build --release") != 0:
os._exit(1)
# To also build the example app:
#os.system("cargo build --release --manifest-path examples/Cargo.toml")
|
692141042bd21bfd7d72567bdabf080304a48474 | planner/admin.py | planner/admin.py | from django.contrib import admin
from planner.models import Route, Waypoint, RoadTrip, TripDetail
class WaypointInline(admin.StackedInline):
model = Waypoint
extra = 1
class RouteAdmin(admin.ModelAdmin):
model = Route
inlines = [WaypointInline]
class RoadTripAdmin(admin.ModelAdmin):
model = RoadTrip
list_display = ('route', 'start_date', 'end_date',)
class TripDetailAdmin(admin.ModelAdmin):
model = TripDetail
list_display = ('destination',)
admin.site.register(Route, RouteAdmin)
admin.site.register(Waypoint)
admin.site.register(TripDetail)
admin.site.register(RoadTrip, RoadTripAdmin)
| from django.contrib import admin
from planner.models import Route, Waypoint, RoadTrip, TripDetail
class WaypointInline(admin.StackedInline):
model = Waypoint
extra = 1
class RouteAdmin(admin.ModelAdmin):
model = Route
inlines = [WaypointInline]
class RoadTripAdmin(admin.ModelAdmin):
def route__waypoints(self, obj):
return obj.route.waypoints.all()
model = RoadTrip
list_display = ('route', 'start_date', 'end_date', 'route__waypoints')
class TripDetailAdmin(admin.ModelAdmin):
model = TripDetail
list_display = ('destination',)
admin.site.register(Route, RouteAdmin)
admin.site.register(Waypoint)
admin.site.register(TripDetail)
admin.site.register(RoadTrip, RoadTripAdmin)
| Add route waypoints to list display in Admin interface | Add route waypoints to list display in Admin interface
| Python | apache-2.0 | jwarren116/RoadTrip,jwarren116/RoadTrip,jwarren116/RoadTrip | from django.contrib import admin
from planner.models import Route, Waypoint, RoadTrip, TripDetail
class WaypointInline(admin.StackedInline):
model = Waypoint
extra = 1
class RouteAdmin(admin.ModelAdmin):
model = Route
inlines = [WaypointInline]
class RoadTripAdmin(admin.ModelAdmin):
model = RoadTrip
list_display = ('route', 'start_date', 'end_date',)
class TripDetailAdmin(admin.ModelAdmin):
model = TripDetail
list_display = ('destination',)
admin.site.register(Route, RouteAdmin)
admin.site.register(Waypoint)
admin.site.register(TripDetail)
admin.site.register(RoadTrip, RoadTripAdmin)
Add route waypoints to list display in Admin interface | from django.contrib import admin
from planner.models import Route, Waypoint, RoadTrip, TripDetail
class WaypointInline(admin.StackedInline):
model = Waypoint
extra = 1
class RouteAdmin(admin.ModelAdmin):
model = Route
inlines = [WaypointInline]
class RoadTripAdmin(admin.ModelAdmin):
def route__waypoints(self, obj):
return obj.route.waypoints.all()
model = RoadTrip
list_display = ('route', 'start_date', 'end_date', 'route__waypoints')
class TripDetailAdmin(admin.ModelAdmin):
model = TripDetail
list_display = ('destination',)
admin.site.register(Route, RouteAdmin)
admin.site.register(Waypoint)
admin.site.register(TripDetail)
admin.site.register(RoadTrip, RoadTripAdmin)
| <commit_before>from django.contrib import admin
from planner.models import Route, Waypoint, RoadTrip, TripDetail
class WaypointInline(admin.StackedInline):
model = Waypoint
extra = 1
class RouteAdmin(admin.ModelAdmin):
model = Route
inlines = [WaypointInline]
class RoadTripAdmin(admin.ModelAdmin):
model = RoadTrip
list_display = ('route', 'start_date', 'end_date',)
class TripDetailAdmin(admin.ModelAdmin):
model = TripDetail
list_display = ('destination',)
admin.site.register(Route, RouteAdmin)
admin.site.register(Waypoint)
admin.site.register(TripDetail)
admin.site.register(RoadTrip, RoadTripAdmin)
<commit_msg>Add route waypoints to list display in Admin interface<commit_after> | from django.contrib import admin
from planner.models import Route, Waypoint, RoadTrip, TripDetail
class WaypointInline(admin.StackedInline):
model = Waypoint
extra = 1
class RouteAdmin(admin.ModelAdmin):
model = Route
inlines = [WaypointInline]
class RoadTripAdmin(admin.ModelAdmin):
def route__waypoints(self, obj):
return obj.route.waypoints.all()
model = RoadTrip
list_display = ('route', 'start_date', 'end_date', 'route__waypoints')
class TripDetailAdmin(admin.ModelAdmin):
model = TripDetail
list_display = ('destination',)
admin.site.register(Route, RouteAdmin)
admin.site.register(Waypoint)
admin.site.register(TripDetail)
admin.site.register(RoadTrip, RoadTripAdmin)
| from django.contrib import admin
from planner.models import Route, Waypoint, RoadTrip, TripDetail
class WaypointInline(admin.StackedInline):
model = Waypoint
extra = 1
class RouteAdmin(admin.ModelAdmin):
model = Route
inlines = [WaypointInline]
class RoadTripAdmin(admin.ModelAdmin):
model = RoadTrip
list_display = ('route', 'start_date', 'end_date',)
class TripDetailAdmin(admin.ModelAdmin):
model = TripDetail
list_display = ('destination',)
admin.site.register(Route, RouteAdmin)
admin.site.register(Waypoint)
admin.site.register(TripDetail)
admin.site.register(RoadTrip, RoadTripAdmin)
Add route waypoints to list display in Admin interfacefrom django.contrib import admin
from planner.models import Route, Waypoint, RoadTrip, TripDetail
class WaypointInline(admin.StackedInline):
model = Waypoint
extra = 1
class RouteAdmin(admin.ModelAdmin):
model = Route
inlines = [WaypointInline]
class RoadTripAdmin(admin.ModelAdmin):
def route__waypoints(self, obj):
return obj.route.waypoints.all()
model = RoadTrip
list_display = ('route', 'start_date', 'end_date', 'route__waypoints')
class TripDetailAdmin(admin.ModelAdmin):
model = TripDetail
list_display = ('destination',)
admin.site.register(Route, RouteAdmin)
admin.site.register(Waypoint)
admin.site.register(TripDetail)
admin.site.register(RoadTrip, RoadTripAdmin)
| <commit_before>from django.contrib import admin
from planner.models import Route, Waypoint, RoadTrip, TripDetail
class WaypointInline(admin.StackedInline):
model = Waypoint
extra = 1
class RouteAdmin(admin.ModelAdmin):
model = Route
inlines = [WaypointInline]
class RoadTripAdmin(admin.ModelAdmin):
model = RoadTrip
list_display = ('route', 'start_date', 'end_date',)
class TripDetailAdmin(admin.ModelAdmin):
model = TripDetail
list_display = ('destination',)
admin.site.register(Route, RouteAdmin)
admin.site.register(Waypoint)
admin.site.register(TripDetail)
admin.site.register(RoadTrip, RoadTripAdmin)
<commit_msg>Add route waypoints to list display in Admin interface<commit_after>from django.contrib import admin
from planner.models import Route, Waypoint, RoadTrip, TripDetail
class WaypointInline(admin.StackedInline):
model = Waypoint
extra = 1
class RouteAdmin(admin.ModelAdmin):
model = Route
inlines = [WaypointInline]
class RoadTripAdmin(admin.ModelAdmin):
def route__waypoints(self, obj):
return obj.route.waypoints.all()
model = RoadTrip
list_display = ('route', 'start_date', 'end_date', 'route__waypoints')
class TripDetailAdmin(admin.ModelAdmin):
model = TripDetail
list_display = ('destination',)
admin.site.register(Route, RouteAdmin)
admin.site.register(Waypoint)
admin.site.register(TripDetail)
admin.site.register(RoadTrip, RoadTripAdmin)
|
8351b73693019360c3f0ea3c60531ac13bef1c24 | structure/models.py | structure/models.py | from django.db import models
from django.contrib.auth.models import AbstractUser
from django.utils.translation import ugettext_lazy as _
class Organization(models.Model):
name = models.CharField(_('Name'), max_length=80)
slug = models.SlugField()
def __unicode__(self):
return self.name
class Team(models.Model):
name = models.CharField(_('Name'), max_length=80)
organization = models.ForeignKey('Organization')
def __unicode__(self):
return self.name
class User(AbstractUser):
teams = models.ManyToManyField(Team, blank=True, related_name='users')
def __unicode__(self):
return self.username
class Contract(models.Model):
name = models.CharField(_('Name'), max_length=150)
def __unicode__(self):
return self.name
class ContractOrganization(models.Model):
contract = models.ForeignKey('Contract')
organization = models.ForeignKey('Organization')
default_team = models.ForeignKey('Team')
def __unicode__(self):
return self.contract.name + ' - ' + self.organization.name
class Meta:
verbose_name = _('Contracted organization')
verbose_name_plural = _('Contracted organizations')
class ContractTeam(models.Model):
contract = models.ForeignKey('Contract')
team = models.ForeignKey('Team')
def __unicode__(self):
return self.contract.name + ' - ' + self.team.name
class Meta:
verbose_name = _('Contracted team')
verbose_name_plural = _('Contracted teams')
| from django.db import models
from django.contrib.auth.models import AbstractUser
from django.utils.translation import ugettext_lazy as _
class Organization(models.Model):
name = models.CharField(_('Name'), max_length=80)
slug = models.SlugField()
def __unicode__(self):
return self.name
class Team(models.Model):
name = models.CharField(_('Name'), max_length=80)
organization = models.ForeignKey('Organization')
def __unicode__(self):
return self.name
class User(AbstractUser):
name = models.CharField(_('Name'), max_length=40)
teams = models.ManyToManyField(Team, blank=True, related_name='users')
def __unicode__(self):
return self.name
class Contract(models.Model):
name = models.CharField(_('Name'), max_length=150)
def __unicode__(self):
return self.name
class ContractOrganization(models.Model):
contract = models.ForeignKey('Contract')
organization = models.ForeignKey('Organization')
default_team = models.ForeignKey('Team')
def __unicode__(self):
return self.contract.name + ' - ' + self.organization.name
class Meta:
verbose_name = _('Contracted organization')
verbose_name_plural = _('Contracted organizations')
class ContractTeam(models.Model):
contract = models.ForeignKey('Contract')
team = models.ForeignKey('Team')
def __unicode__(self):
return self.contract.name + ' - ' + self.team.name
class Meta:
verbose_name = _('Contracted team')
verbose_name_plural = _('Contracted teams')
| Add name to User model. | Add name to User model.
| Python | bsd-3-clause | RocknRoot/LIIT | from django.db import models
from django.contrib.auth.models import AbstractUser
from django.utils.translation import ugettext_lazy as _
class Organization(models.Model):
name = models.CharField(_('Name'), max_length=80)
slug = models.SlugField()
def __unicode__(self):
return self.name
class Team(models.Model):
name = models.CharField(_('Name'), max_length=80)
organization = models.ForeignKey('Organization')
def __unicode__(self):
return self.name
class User(AbstractUser):
teams = models.ManyToManyField(Team, blank=True, related_name='users')
def __unicode__(self):
return self.username
class Contract(models.Model):
name = models.CharField(_('Name'), max_length=150)
def __unicode__(self):
return self.name
class ContractOrganization(models.Model):
contract = models.ForeignKey('Contract')
organization = models.ForeignKey('Organization')
default_team = models.ForeignKey('Team')
def __unicode__(self):
return self.contract.name + ' - ' + self.organization.name
class Meta:
verbose_name = _('Contracted organization')
verbose_name_plural = _('Contracted organizations')
class ContractTeam(models.Model):
contract = models.ForeignKey('Contract')
team = models.ForeignKey('Team')
def __unicode__(self):
return self.contract.name + ' - ' + self.team.name
class Meta:
verbose_name = _('Contracted team')
verbose_name_plural = _('Contracted teams')
Add name to User model. | from django.db import models
from django.contrib.auth.models import AbstractUser
from django.utils.translation import ugettext_lazy as _
class Organization(models.Model):
name = models.CharField(_('Name'), max_length=80)
slug = models.SlugField()
def __unicode__(self):
return self.name
class Team(models.Model):
name = models.CharField(_('Name'), max_length=80)
organization = models.ForeignKey('Organization')
def __unicode__(self):
return self.name
class User(AbstractUser):
name = models.CharField(_('Name'), max_length=40)
teams = models.ManyToManyField(Team, blank=True, related_name='users')
def __unicode__(self):
return self.name
class Contract(models.Model):
name = models.CharField(_('Name'), max_length=150)
def __unicode__(self):
return self.name
class ContractOrganization(models.Model):
contract = models.ForeignKey('Contract')
organization = models.ForeignKey('Organization')
default_team = models.ForeignKey('Team')
def __unicode__(self):
return self.contract.name + ' - ' + self.organization.name
class Meta:
verbose_name = _('Contracted organization')
verbose_name_plural = _('Contracted organizations')
class ContractTeam(models.Model):
contract = models.ForeignKey('Contract')
team = models.ForeignKey('Team')
def __unicode__(self):
return self.contract.name + ' - ' + self.team.name
class Meta:
verbose_name = _('Contracted team')
verbose_name_plural = _('Contracted teams')
| <commit_before>from django.db import models
from django.contrib.auth.models import AbstractUser
from django.utils.translation import ugettext_lazy as _
class Organization(models.Model):
name = models.CharField(_('Name'), max_length=80)
slug = models.SlugField()
def __unicode__(self):
return self.name
class Team(models.Model):
name = models.CharField(_('Name'), max_length=80)
organization = models.ForeignKey('Organization')
def __unicode__(self):
return self.name
class User(AbstractUser):
teams = models.ManyToManyField(Team, blank=True, related_name='users')
def __unicode__(self):
return self.username
class Contract(models.Model):
name = models.CharField(_('Name'), max_length=150)
def __unicode__(self):
return self.name
class ContractOrganization(models.Model):
contract = models.ForeignKey('Contract')
organization = models.ForeignKey('Organization')
default_team = models.ForeignKey('Team')
def __unicode__(self):
return self.contract.name + ' - ' + self.organization.name
class Meta:
verbose_name = _('Contracted organization')
verbose_name_plural = _('Contracted organizations')
class ContractTeam(models.Model):
contract = models.ForeignKey('Contract')
team = models.ForeignKey('Team')
def __unicode__(self):
return self.contract.name + ' - ' + self.team.name
class Meta:
verbose_name = _('Contracted team')
verbose_name_plural = _('Contracted teams')
<commit_msg>Add name to User model.<commit_after> | from django.db import models
from django.contrib.auth.models import AbstractUser
from django.utils.translation import ugettext_lazy as _
class Organization(models.Model):
name = models.CharField(_('Name'), max_length=80)
slug = models.SlugField()
def __unicode__(self):
return self.name
class Team(models.Model):
name = models.CharField(_('Name'), max_length=80)
organization = models.ForeignKey('Organization')
def __unicode__(self):
return self.name
class User(AbstractUser):
name = models.CharField(_('Name'), max_length=40)
teams = models.ManyToManyField(Team, blank=True, related_name='users')
def __unicode__(self):
return self.name
class Contract(models.Model):
name = models.CharField(_('Name'), max_length=150)
def __unicode__(self):
return self.name
class ContractOrganization(models.Model):
contract = models.ForeignKey('Contract')
organization = models.ForeignKey('Organization')
default_team = models.ForeignKey('Team')
def __unicode__(self):
return self.contract.name + ' - ' + self.organization.name
class Meta:
verbose_name = _('Contracted organization')
verbose_name_plural = _('Contracted organizations')
class ContractTeam(models.Model):
contract = models.ForeignKey('Contract')
team = models.ForeignKey('Team')
def __unicode__(self):
return self.contract.name + ' - ' + self.team.name
class Meta:
verbose_name = _('Contracted team')
verbose_name_plural = _('Contracted teams')
| from django.db import models
from django.contrib.auth.models import AbstractUser
from django.utils.translation import ugettext_lazy as _
class Organization(models.Model):
name = models.CharField(_('Name'), max_length=80)
slug = models.SlugField()
def __unicode__(self):
return self.name
class Team(models.Model):
name = models.CharField(_('Name'), max_length=80)
organization = models.ForeignKey('Organization')
def __unicode__(self):
return self.name
class User(AbstractUser):
teams = models.ManyToManyField(Team, blank=True, related_name='users')
def __unicode__(self):
return self.username
class Contract(models.Model):
name = models.CharField(_('Name'), max_length=150)
def __unicode__(self):
return self.name
class ContractOrganization(models.Model):
contract = models.ForeignKey('Contract')
organization = models.ForeignKey('Organization')
default_team = models.ForeignKey('Team')
def __unicode__(self):
return self.contract.name + ' - ' + self.organization.name
class Meta:
verbose_name = _('Contracted organization')
verbose_name_plural = _('Contracted organizations')
class ContractTeam(models.Model):
contract = models.ForeignKey('Contract')
team = models.ForeignKey('Team')
def __unicode__(self):
return self.contract.name + ' - ' + self.team.name
class Meta:
verbose_name = _('Contracted team')
verbose_name_plural = _('Contracted teams')
Add name to User model.from django.db import models
from django.contrib.auth.models import AbstractUser
from django.utils.translation import ugettext_lazy as _
class Organization(models.Model):
name = models.CharField(_('Name'), max_length=80)
slug = models.SlugField()
def __unicode__(self):
return self.name
class Team(models.Model):
name = models.CharField(_('Name'), max_length=80)
organization = models.ForeignKey('Organization')
def __unicode__(self):
return self.name
class User(AbstractUser):
name = models.CharField(_('Name'), max_length=40)
teams = models.ManyToManyField(Team, blank=True, related_name='users')
def __unicode__(self):
return self.name
class Contract(models.Model):
name = models.CharField(_('Name'), max_length=150)
def __unicode__(self):
return self.name
class ContractOrganization(models.Model):
contract = models.ForeignKey('Contract')
organization = models.ForeignKey('Organization')
default_team = models.ForeignKey('Team')
def __unicode__(self):
return self.contract.name + ' - ' + self.organization.name
class Meta:
verbose_name = _('Contracted organization')
verbose_name_plural = _('Contracted organizations')
class ContractTeam(models.Model):
contract = models.ForeignKey('Contract')
team = models.ForeignKey('Team')
def __unicode__(self):
return self.contract.name + ' - ' + self.team.name
class Meta:
verbose_name = _('Contracted team')
verbose_name_plural = _('Contracted teams')
| <commit_before>from django.db import models
from django.contrib.auth.models import AbstractUser
from django.utils.translation import ugettext_lazy as _
class Organization(models.Model):
name = models.CharField(_('Name'), max_length=80)
slug = models.SlugField()
def __unicode__(self):
return self.name
class Team(models.Model):
name = models.CharField(_('Name'), max_length=80)
organization = models.ForeignKey('Organization')
def __unicode__(self):
return self.name
class User(AbstractUser):
teams = models.ManyToManyField(Team, blank=True, related_name='users')
def __unicode__(self):
return self.username
class Contract(models.Model):
name = models.CharField(_('Name'), max_length=150)
def __unicode__(self):
return self.name
class ContractOrganization(models.Model):
contract = models.ForeignKey('Contract')
organization = models.ForeignKey('Organization')
default_team = models.ForeignKey('Team')
def __unicode__(self):
return self.contract.name + ' - ' + self.organization.name
class Meta:
verbose_name = _('Contracted organization')
verbose_name_plural = _('Contracted organizations')
class ContractTeam(models.Model):
contract = models.ForeignKey('Contract')
team = models.ForeignKey('Team')
def __unicode__(self):
return self.contract.name + ' - ' + self.team.name
class Meta:
verbose_name = _('Contracted team')
verbose_name_plural = _('Contracted teams')
<commit_msg>Add name to User model.<commit_after>from django.db import models
from django.contrib.auth.models import AbstractUser
from django.utils.translation import ugettext_lazy as _
class Organization(models.Model):
name = models.CharField(_('Name'), max_length=80)
slug = models.SlugField()
def __unicode__(self):
return self.name
class Team(models.Model):
name = models.CharField(_('Name'), max_length=80)
organization = models.ForeignKey('Organization')
def __unicode__(self):
return self.name
class User(AbstractUser):
name = models.CharField(_('Name'), max_length=40)
teams = models.ManyToManyField(Team, blank=True, related_name='users')
def __unicode__(self):
return self.name
class Contract(models.Model):
name = models.CharField(_('Name'), max_length=150)
def __unicode__(self):
return self.name
class ContractOrganization(models.Model):
contract = models.ForeignKey('Contract')
organization = models.ForeignKey('Organization')
default_team = models.ForeignKey('Team')
def __unicode__(self):
return self.contract.name + ' - ' + self.organization.name
class Meta:
verbose_name = _('Contracted organization')
verbose_name_plural = _('Contracted organizations')
class ContractTeam(models.Model):
contract = models.ForeignKey('Contract')
team = models.ForeignKey('Team')
def __unicode__(self):
return self.contract.name + ' - ' + self.team.name
class Meta:
verbose_name = _('Contracted team')
verbose_name_plural = _('Contracted teams')
|
38aed64e1c20d25a6bda750a096a513b7d414c45 | websod/views.py | websod/views.py | from werkzeug import redirect
from werkzeug.exceptions import NotFound
from websod.utils import session, expose, url_for, serve_template
from websod.models import Integration
from datetime import timedelta, datetime
@expose('/')
def home(request):
# show results from last 3 days
integrations_from = datetime.now() + timedelta(days=-3)
from_str = integrations_from.strftime("%Y-%m-%d 00:00:00")
latest_integrations = session.query(Integration).\
filter("started > '%s'" % from_str).\
order_by(Integration.started.desc()).all()
return serve_template('home.html', latest_integrations=latest_integrations)
@expose('/integration/<int:id>')
def integration(request, id):
integration = session.query(Integration).get(id)
return serve_template('integration.html', integration=integration)
@expose('/integration/')
def integration_list(request):
integrations = session.query(Integration).all()
return serve_template('integration_list.html', integrations=integrations)
| from werkzeug import redirect
from werkzeug.exceptions import NotFound
from websod.utils import session, expose, url_for, serve_template
from websod.models import Integration
from datetime import timedelta, datetime
def home(request):
# show results from last 3 days
integrations_from = datetime.now() + timedelta(days=-3)
from_str = integrations_from.strftime("%Y-%m-%d 00:00:00")
latest_integrations = session.query(Integration).\
filter("started > '%s'" % from_str).\
order_by(Integration.started.desc()).all()
return serve_template('home.html', latest_integrations=latest_integrations)
@expose('/integration/<int:id>')
def integration(request, id):
integration = session.query(Integration).get(id)
return serve_template('integration.html', integration=integration)
@expose('/')
@expose('/integration/')
def integration_list(request):
integrations = session.query(Integration).all()
return serve_template('integration_list.html', integrations=integrations)
| Set integrations for the index page now | Set integrations for the index page now
| Python | mit | schettino72/serveronduty | from werkzeug import redirect
from werkzeug.exceptions import NotFound
from websod.utils import session, expose, url_for, serve_template
from websod.models import Integration
from datetime import timedelta, datetime
@expose('/')
def home(request):
# show results from last 3 days
integrations_from = datetime.now() + timedelta(days=-3)
from_str = integrations_from.strftime("%Y-%m-%d 00:00:00")
latest_integrations = session.query(Integration).\
filter("started > '%s'" % from_str).\
order_by(Integration.started.desc()).all()
return serve_template('home.html', latest_integrations=latest_integrations)
@expose('/integration/<int:id>')
def integration(request, id):
integration = session.query(Integration).get(id)
return serve_template('integration.html', integration=integration)
@expose('/integration/')
def integration_list(request):
integrations = session.query(Integration).all()
return serve_template('integration_list.html', integrations=integrations)
Set integrations for the index page now | from werkzeug import redirect
from werkzeug.exceptions import NotFound
from websod.utils import session, expose, url_for, serve_template
from websod.models import Integration
from datetime import timedelta, datetime
def home(request):
# show results from last 3 days
integrations_from = datetime.now() + timedelta(days=-3)
from_str = integrations_from.strftime("%Y-%m-%d 00:00:00")
latest_integrations = session.query(Integration).\
filter("started > '%s'" % from_str).\
order_by(Integration.started.desc()).all()
return serve_template('home.html', latest_integrations=latest_integrations)
@expose('/integration/<int:id>')
def integration(request, id):
integration = session.query(Integration).get(id)
return serve_template('integration.html', integration=integration)
@expose('/')
@expose('/integration/')
def integration_list(request):
integrations = session.query(Integration).all()
return serve_template('integration_list.html', integrations=integrations)
| <commit_before>from werkzeug import redirect
from werkzeug.exceptions import NotFound
from websod.utils import session, expose, url_for, serve_template
from websod.models import Integration
from datetime import timedelta, datetime
@expose('/')
def home(request):
# show results from last 3 days
integrations_from = datetime.now() + timedelta(days=-3)
from_str = integrations_from.strftime("%Y-%m-%d 00:00:00")
latest_integrations = session.query(Integration).\
filter("started > '%s'" % from_str).\
order_by(Integration.started.desc()).all()
return serve_template('home.html', latest_integrations=latest_integrations)
@expose('/integration/<int:id>')
def integration(request, id):
integration = session.query(Integration).get(id)
return serve_template('integration.html', integration=integration)
@expose('/integration/')
def integration_list(request):
integrations = session.query(Integration).all()
return serve_template('integration_list.html', integrations=integrations)
<commit_msg>Set integrations for the index page now<commit_after> | from werkzeug import redirect
from werkzeug.exceptions import NotFound
from websod.utils import session, expose, url_for, serve_template
from websod.models import Integration
from datetime import timedelta, datetime
def home(request):
# show results from last 3 days
integrations_from = datetime.now() + timedelta(days=-3)
from_str = integrations_from.strftime("%Y-%m-%d 00:00:00")
latest_integrations = session.query(Integration).\
filter("started > '%s'" % from_str).\
order_by(Integration.started.desc()).all()
return serve_template('home.html', latest_integrations=latest_integrations)
@expose('/integration/<int:id>')
def integration(request, id):
integration = session.query(Integration).get(id)
return serve_template('integration.html', integration=integration)
@expose('/')
@expose('/integration/')
def integration_list(request):
integrations = session.query(Integration).all()
return serve_template('integration_list.html', integrations=integrations)
| from werkzeug import redirect
from werkzeug.exceptions import NotFound
from websod.utils import session, expose, url_for, serve_template
from websod.models import Integration
from datetime import timedelta, datetime
@expose('/')
def home(request):
# show results from last 3 days
integrations_from = datetime.now() + timedelta(days=-3)
from_str = integrations_from.strftime("%Y-%m-%d 00:00:00")
latest_integrations = session.query(Integration).\
filter("started > '%s'" % from_str).\
order_by(Integration.started.desc()).all()
return serve_template('home.html', latest_integrations=latest_integrations)
@expose('/integration/<int:id>')
def integration(request, id):
integration = session.query(Integration).get(id)
return serve_template('integration.html', integration=integration)
@expose('/integration/')
def integration_list(request):
integrations = session.query(Integration).all()
return serve_template('integration_list.html', integrations=integrations)
Set integrations for the index page nowfrom werkzeug import redirect
from werkzeug.exceptions import NotFound
from websod.utils import session, expose, url_for, serve_template
from websod.models import Integration
from datetime import timedelta, datetime
def home(request):
# show results from last 3 days
integrations_from = datetime.now() + timedelta(days=-3)
from_str = integrations_from.strftime("%Y-%m-%d 00:00:00")
latest_integrations = session.query(Integration).\
filter("started > '%s'" % from_str).\
order_by(Integration.started.desc()).all()
return serve_template('home.html', latest_integrations=latest_integrations)
@expose('/integration/<int:id>')
def integration(request, id):
integration = session.query(Integration).get(id)
return serve_template('integration.html', integration=integration)
@expose('/')
@expose('/integration/')
def integration_list(request):
integrations = session.query(Integration).all()
return serve_template('integration_list.html', integrations=integrations)
| <commit_before>from werkzeug import redirect
from werkzeug.exceptions import NotFound
from websod.utils import session, expose, url_for, serve_template
from websod.models import Integration
from datetime import timedelta, datetime
@expose('/')
def home(request):
# show results from last 3 days
integrations_from = datetime.now() + timedelta(days=-3)
from_str = integrations_from.strftime("%Y-%m-%d 00:00:00")
latest_integrations = session.query(Integration).\
filter("started > '%s'" % from_str).\
order_by(Integration.started.desc()).all()
return serve_template('home.html', latest_integrations=latest_integrations)
@expose('/integration/<int:id>')
def integration(request, id):
integration = session.query(Integration).get(id)
return serve_template('integration.html', integration=integration)
@expose('/integration/')
def integration_list(request):
integrations = session.query(Integration).all()
return serve_template('integration_list.html', integrations=integrations)
<commit_msg>Set integrations for the index page now<commit_after>from werkzeug import redirect
from werkzeug.exceptions import NotFound
from websod.utils import session, expose, url_for, serve_template
from websod.models import Integration
from datetime import timedelta, datetime
def home(request):
# show results from last 3 days
integrations_from = datetime.now() + timedelta(days=-3)
from_str = integrations_from.strftime("%Y-%m-%d 00:00:00")
latest_integrations = session.query(Integration).\
filter("started > '%s'" % from_str).\
order_by(Integration.started.desc()).all()
return serve_template('home.html', latest_integrations=latest_integrations)
@expose('/integration/<int:id>')
def integration(request, id):
integration = session.query(Integration).get(id)
return serve_template('integration.html', integration=integration)
@expose('/')
@expose('/integration/')
def integration_list(request):
integrations = session.query(Integration).all()
return serve_template('integration_list.html', integrations=integrations)
|
ccb4c2780c0c9acc36d73acc32f8867f0bd2b944 | pywikibot/families/wikivoyage_family.py | pywikibot/families/wikivoyage_family.py | # -*- coding: utf-8 -*-
"""Family module for Wikivoyage."""
#
# (C) Pywikibot team, 2012-2018
#
# Distributed under the terms of the MIT license.
#
# The new wikivoyage family that is hosted at wikimedia
from __future__ import absolute_import, unicode_literals
from pywikibot import family
class Family(family.SubdomainFamily, family.WikimediaFamily):
"""Family class for Wikivoyage."""
name = 'wikivoyage'
languages_by_size = [
'en', 'de', 'fa', 'it', 'fr', 'pl', 'ru', 'nl', 'pt', 'zh', 'es',
'he', 'fi', 'vi', 'sv', 'el', 'ro', 'uk', 'hi',
]
category_redirect_templates = {
'_default': (),
'zh': ('分类重定向',),
}
# Global bot allowed languages on
# https://meta.wikimedia.org/wiki/BPI#Current_implementation
# & https://meta.wikimedia.org/wiki/Special:WikiSets/2
cross_allowed = [
'el', 'en', 'es', 'fa', 'ru',
]
| # -*- coding: utf-8 -*-
"""Family module for Wikivoyage."""
#
# (C) Pywikibot team, 2012-2018
#
# Distributed under the terms of the MIT license.
#
# The new wikivoyage family that is hosted at wikimedia
from __future__ import absolute_import, unicode_literals
from pywikibot import family
class Family(family.SubdomainFamily, family.WikimediaFamily):
"""Family class for Wikivoyage."""
name = 'wikivoyage'
languages_by_size = [
'en', 'de', 'fa', 'it', 'fr', 'pl', 'ru', 'nl', 'pt', 'zh', 'es',
'he', 'fi', 'vi', 'sv', 'el', 'ro', 'uk', 'hi', 'bn',
]
category_redirect_templates = {
'_default': (),
'zh': ('分类重定向',),
}
# Global bot allowed languages on
# https://meta.wikimedia.org/wiki/BPI#Current_implementation
# & https://meta.wikimedia.org/wiki/Special:WikiSets/2
cross_allowed = [
'el', 'en', 'es', 'fa', 'ru',
]
| Add Wikivoyage Bengali to pywikibot | Add Wikivoyage Bengali to pywikibot
Bug: T196363
Change-Id: Ifc49b3b3734eaac20ef4f909c83d68b11bc8d91d
| Python | mit | wikimedia/pywikibot-core,PersianWikipedia/pywikibot-core,wikimedia/pywikibot-core | # -*- coding: utf-8 -*-
"""Family module for Wikivoyage."""
#
# (C) Pywikibot team, 2012-2018
#
# Distributed under the terms of the MIT license.
#
# The new wikivoyage family that is hosted at wikimedia
from __future__ import absolute_import, unicode_literals
from pywikibot import family
class Family(family.SubdomainFamily, family.WikimediaFamily):
"""Family class for Wikivoyage."""
name = 'wikivoyage'
languages_by_size = [
'en', 'de', 'fa', 'it', 'fr', 'pl', 'ru', 'nl', 'pt', 'zh', 'es',
'he', 'fi', 'vi', 'sv', 'el', 'ro', 'uk', 'hi',
]
category_redirect_templates = {
'_default': (),
'zh': ('分类重定向',),
}
# Global bot allowed languages on
# https://meta.wikimedia.org/wiki/BPI#Current_implementation
# & https://meta.wikimedia.org/wiki/Special:WikiSets/2
cross_allowed = [
'el', 'en', 'es', 'fa', 'ru',
]
Add Wikivoyage Bengali to pywikibot
Bug: T196363
Change-Id: Ifc49b3b3734eaac20ef4f909c83d68b11bc8d91d | # -*- coding: utf-8 -*-
"""Family module for Wikivoyage."""
#
# (C) Pywikibot team, 2012-2018
#
# Distributed under the terms of the MIT license.
#
# The new wikivoyage family that is hosted at wikimedia
from __future__ import absolute_import, unicode_literals
from pywikibot import family
class Family(family.SubdomainFamily, family.WikimediaFamily):
"""Family class for Wikivoyage."""
name = 'wikivoyage'
languages_by_size = [
'en', 'de', 'fa', 'it', 'fr', 'pl', 'ru', 'nl', 'pt', 'zh', 'es',
'he', 'fi', 'vi', 'sv', 'el', 'ro', 'uk', 'hi', 'bn',
]
category_redirect_templates = {
'_default': (),
'zh': ('分类重定向',),
}
# Global bot allowed languages on
# https://meta.wikimedia.org/wiki/BPI#Current_implementation
# & https://meta.wikimedia.org/wiki/Special:WikiSets/2
cross_allowed = [
'el', 'en', 'es', 'fa', 'ru',
]
| <commit_before># -*- coding: utf-8 -*-
"""Family module for Wikivoyage."""
#
# (C) Pywikibot team, 2012-2018
#
# Distributed under the terms of the MIT license.
#
# The new wikivoyage family that is hosted at wikimedia
from __future__ import absolute_import, unicode_literals
from pywikibot import family
class Family(family.SubdomainFamily, family.WikimediaFamily):
"""Family class for Wikivoyage."""
name = 'wikivoyage'
languages_by_size = [
'en', 'de', 'fa', 'it', 'fr', 'pl', 'ru', 'nl', 'pt', 'zh', 'es',
'he', 'fi', 'vi', 'sv', 'el', 'ro', 'uk', 'hi',
]
category_redirect_templates = {
'_default': (),
'zh': ('分类重定向',),
}
# Global bot allowed languages on
# https://meta.wikimedia.org/wiki/BPI#Current_implementation
# & https://meta.wikimedia.org/wiki/Special:WikiSets/2
cross_allowed = [
'el', 'en', 'es', 'fa', 'ru',
]
<commit_msg>Add Wikivoyage Bengali to pywikibot
Bug: T196363
Change-Id: Ifc49b3b3734eaac20ef4f909c83d68b11bc8d91d<commit_after> | # -*- coding: utf-8 -*-
"""Family module for Wikivoyage."""
#
# (C) Pywikibot team, 2012-2018
#
# Distributed under the terms of the MIT license.
#
# The new wikivoyage family that is hosted at wikimedia
from __future__ import absolute_import, unicode_literals
from pywikibot import family
class Family(family.SubdomainFamily, family.WikimediaFamily):
"""Family class for Wikivoyage."""
name = 'wikivoyage'
languages_by_size = [
'en', 'de', 'fa', 'it', 'fr', 'pl', 'ru', 'nl', 'pt', 'zh', 'es',
'he', 'fi', 'vi', 'sv', 'el', 'ro', 'uk', 'hi', 'bn',
]
category_redirect_templates = {
'_default': (),
'zh': ('分类重定向',),
}
# Global bot allowed languages on
# https://meta.wikimedia.org/wiki/BPI#Current_implementation
# & https://meta.wikimedia.org/wiki/Special:WikiSets/2
cross_allowed = [
'el', 'en', 'es', 'fa', 'ru',
]
| # -*- coding: utf-8 -*-
"""Family module for Wikivoyage."""
#
# (C) Pywikibot team, 2012-2018
#
# Distributed under the terms of the MIT license.
#
# The new wikivoyage family that is hosted at wikimedia
from __future__ import absolute_import, unicode_literals
from pywikibot import family
class Family(family.SubdomainFamily, family.WikimediaFamily):
"""Family class for Wikivoyage."""
name = 'wikivoyage'
languages_by_size = [
'en', 'de', 'fa', 'it', 'fr', 'pl', 'ru', 'nl', 'pt', 'zh', 'es',
'he', 'fi', 'vi', 'sv', 'el', 'ro', 'uk', 'hi',
]
category_redirect_templates = {
'_default': (),
'zh': ('分类重定向',),
}
# Global bot allowed languages on
# https://meta.wikimedia.org/wiki/BPI#Current_implementation
# & https://meta.wikimedia.org/wiki/Special:WikiSets/2
cross_allowed = [
'el', 'en', 'es', 'fa', 'ru',
]
Add Wikivoyage Bengali to pywikibot
Bug: T196363
Change-Id: Ifc49b3b3734eaac20ef4f909c83d68b11bc8d91d# -*- coding: utf-8 -*-
"""Family module for Wikivoyage."""
#
# (C) Pywikibot team, 2012-2018
#
# Distributed under the terms of the MIT license.
#
# The new wikivoyage family that is hosted at wikimedia
from __future__ import absolute_import, unicode_literals
from pywikibot import family
class Family(family.SubdomainFamily, family.WikimediaFamily):
"""Family class for Wikivoyage."""
name = 'wikivoyage'
languages_by_size = [
'en', 'de', 'fa', 'it', 'fr', 'pl', 'ru', 'nl', 'pt', 'zh', 'es',
'he', 'fi', 'vi', 'sv', 'el', 'ro', 'uk', 'hi', 'bn',
]
category_redirect_templates = {
'_default': (),
'zh': ('分类重定向',),
}
# Global bot allowed languages on
# https://meta.wikimedia.org/wiki/BPI#Current_implementation
# & https://meta.wikimedia.org/wiki/Special:WikiSets/2
cross_allowed = [
'el', 'en', 'es', 'fa', 'ru',
]
| <commit_before># -*- coding: utf-8 -*-
"""Family module for Wikivoyage."""
#
# (C) Pywikibot team, 2012-2018
#
# Distributed under the terms of the MIT license.
#
# The new wikivoyage family that is hosted at wikimedia
from __future__ import absolute_import, unicode_literals
from pywikibot import family
class Family(family.SubdomainFamily, family.WikimediaFamily):
"""Family class for Wikivoyage."""
name = 'wikivoyage'
languages_by_size = [
'en', 'de', 'fa', 'it', 'fr', 'pl', 'ru', 'nl', 'pt', 'zh', 'es',
'he', 'fi', 'vi', 'sv', 'el', 'ro', 'uk', 'hi',
]
category_redirect_templates = {
'_default': (),
'zh': ('分类重定向',),
}
# Global bot allowed languages on
# https://meta.wikimedia.org/wiki/BPI#Current_implementation
# & https://meta.wikimedia.org/wiki/Special:WikiSets/2
cross_allowed = [
'el', 'en', 'es', 'fa', 'ru',
]
<commit_msg>Add Wikivoyage Bengali to pywikibot
Bug: T196363
Change-Id: Ifc49b3b3734eaac20ef4f909c83d68b11bc8d91d<commit_after># -*- coding: utf-8 -*-
"""Family module for Wikivoyage."""
#
# (C) Pywikibot team, 2012-2018
#
# Distributed under the terms of the MIT license.
#
# The new wikivoyage family that is hosted at wikimedia
from __future__ import absolute_import, unicode_literals
from pywikibot import family
class Family(family.SubdomainFamily, family.WikimediaFamily):
"""Family class for Wikivoyage."""
name = 'wikivoyage'
languages_by_size = [
'en', 'de', 'fa', 'it', 'fr', 'pl', 'ru', 'nl', 'pt', 'zh', 'es',
'he', 'fi', 'vi', 'sv', 'el', 'ro', 'uk', 'hi', 'bn',
]
category_redirect_templates = {
'_default': (),
'zh': ('分类重定向',),
}
# Global bot allowed languages on
# https://meta.wikimedia.org/wiki/BPI#Current_implementation
# & https://meta.wikimedia.org/wiki/Special:WikiSets/2
cross_allowed = [
'el', 'en', 'es', 'fa', 'ru',
]
|
988d66f748adc781f81929007e8971eadf3aafb5 | tests/models.py | tests/models.py | from django.db import models
class Office(models.Model):
location = models.CharField(max_length=50)
class Title(models.Model):
name = models.CharField(max_length=50)
salary = models.IntegerField(null=True)
boss = models.NullBooleanField(default=False)
class Employee(models.Model):
first_name = models.CharField(max_length=50)
last_name = models.CharField(max_length=50)
title = models.ForeignKey(Title, null=True)
office = models.ForeignKey(Office)
is_manager = models.NullBooleanField(default=False)
class Meeting(models.Model):
attendees = models.ManyToManyField(Employee)
office = models.ForeignKey(Office)
start_time = models.DateTimeField()
end_time = models.DateTimeField(null=True)
class Project(models.Model):
name = models.CharField(max_length=50)
employees = models.ManyToManyField(Employee)
manager = models.OneToOneField(Employee, related_name='managed_projects')
due_date = models.DateField(null=True)
| import logging
from django.db import models
class MockHandler(logging.Handler):
def __init__(self, *args, **kwargs):
self.reset()
logging.Handler.__init__(self, *args, **kwargs)
def emit(self, record):
self.messages[record.levelname.lower()].append(record.getMessage())
def reset(self):
self.messages = {
'debug': [],
'info': [],
'warning': [],
'error': [],
'critical': [],
}
class Office(models.Model):
location = models.CharField(max_length=50)
class Title(models.Model):
name = models.CharField(max_length=50)
salary = models.IntegerField(null=True)
boss = models.NullBooleanField(default=False)
class Employee(models.Model):
first_name = models.CharField(max_length=50)
last_name = models.CharField(max_length=50)
title = models.ForeignKey(Title, null=True)
office = models.ForeignKey(Office)
is_manager = models.NullBooleanField(default=False)
class Meeting(models.Model):
attendees = models.ManyToManyField(Employee)
office = models.ForeignKey(Office)
start_time = models.DateTimeField()
end_time = models.DateTimeField(null=True)
class Project(models.Model):
name = models.CharField(max_length=50)
employees = models.ManyToManyField(Employee)
manager = models.OneToOneField(Employee, related_name='managed_projects')
due_date = models.DateField(null=True)
| Add MockHandler for future testing of logging | Add MockHandler for future testing of logging
| Python | bsd-2-clause | chop-dbhi/serrano,rv816/serrano_night,rv816/serrano_night,chop-dbhi/serrano | from django.db import models
class Office(models.Model):
location = models.CharField(max_length=50)
class Title(models.Model):
name = models.CharField(max_length=50)
salary = models.IntegerField(null=True)
boss = models.NullBooleanField(default=False)
class Employee(models.Model):
first_name = models.CharField(max_length=50)
last_name = models.CharField(max_length=50)
title = models.ForeignKey(Title, null=True)
office = models.ForeignKey(Office)
is_manager = models.NullBooleanField(default=False)
class Meeting(models.Model):
attendees = models.ManyToManyField(Employee)
office = models.ForeignKey(Office)
start_time = models.DateTimeField()
end_time = models.DateTimeField(null=True)
class Project(models.Model):
name = models.CharField(max_length=50)
employees = models.ManyToManyField(Employee)
manager = models.OneToOneField(Employee, related_name='managed_projects')
due_date = models.DateField(null=True)
Add MockHandler for future testing of logging | import logging
from django.db import models
class MockHandler(logging.Handler):
def __init__(self, *args, **kwargs):
self.reset()
logging.Handler.__init__(self, *args, **kwargs)
def emit(self, record):
self.messages[record.levelname.lower()].append(record.getMessage())
def reset(self):
self.messages = {
'debug': [],
'info': [],
'warning': [],
'error': [],
'critical': [],
}
class Office(models.Model):
location = models.CharField(max_length=50)
class Title(models.Model):
name = models.CharField(max_length=50)
salary = models.IntegerField(null=True)
boss = models.NullBooleanField(default=False)
class Employee(models.Model):
first_name = models.CharField(max_length=50)
last_name = models.CharField(max_length=50)
title = models.ForeignKey(Title, null=True)
office = models.ForeignKey(Office)
is_manager = models.NullBooleanField(default=False)
class Meeting(models.Model):
attendees = models.ManyToManyField(Employee)
office = models.ForeignKey(Office)
start_time = models.DateTimeField()
end_time = models.DateTimeField(null=True)
class Project(models.Model):
name = models.CharField(max_length=50)
employees = models.ManyToManyField(Employee)
manager = models.OneToOneField(Employee, related_name='managed_projects')
due_date = models.DateField(null=True)
| <commit_before>from django.db import models
class Office(models.Model):
location = models.CharField(max_length=50)
class Title(models.Model):
name = models.CharField(max_length=50)
salary = models.IntegerField(null=True)
boss = models.NullBooleanField(default=False)
class Employee(models.Model):
first_name = models.CharField(max_length=50)
last_name = models.CharField(max_length=50)
title = models.ForeignKey(Title, null=True)
office = models.ForeignKey(Office)
is_manager = models.NullBooleanField(default=False)
class Meeting(models.Model):
attendees = models.ManyToManyField(Employee)
office = models.ForeignKey(Office)
start_time = models.DateTimeField()
end_time = models.DateTimeField(null=True)
class Project(models.Model):
name = models.CharField(max_length=50)
employees = models.ManyToManyField(Employee)
manager = models.OneToOneField(Employee, related_name='managed_projects')
due_date = models.DateField(null=True)
<commit_msg>Add MockHandler for future testing of logging<commit_after> | import logging
from django.db import models
class MockHandler(logging.Handler):
def __init__(self, *args, **kwargs):
self.reset()
logging.Handler.__init__(self, *args, **kwargs)
def emit(self, record):
self.messages[record.levelname.lower()].append(record.getMessage())
def reset(self):
self.messages = {
'debug': [],
'info': [],
'warning': [],
'error': [],
'critical': [],
}
class Office(models.Model):
location = models.CharField(max_length=50)
class Title(models.Model):
name = models.CharField(max_length=50)
salary = models.IntegerField(null=True)
boss = models.NullBooleanField(default=False)
class Employee(models.Model):
first_name = models.CharField(max_length=50)
last_name = models.CharField(max_length=50)
title = models.ForeignKey(Title, null=True)
office = models.ForeignKey(Office)
is_manager = models.NullBooleanField(default=False)
class Meeting(models.Model):
attendees = models.ManyToManyField(Employee)
office = models.ForeignKey(Office)
start_time = models.DateTimeField()
end_time = models.DateTimeField(null=True)
class Project(models.Model):
name = models.CharField(max_length=50)
employees = models.ManyToManyField(Employee)
manager = models.OneToOneField(Employee, related_name='managed_projects')
due_date = models.DateField(null=True)
| from django.db import models
class Office(models.Model):
location = models.CharField(max_length=50)
class Title(models.Model):
name = models.CharField(max_length=50)
salary = models.IntegerField(null=True)
boss = models.NullBooleanField(default=False)
class Employee(models.Model):
first_name = models.CharField(max_length=50)
last_name = models.CharField(max_length=50)
title = models.ForeignKey(Title, null=True)
office = models.ForeignKey(Office)
is_manager = models.NullBooleanField(default=False)
class Meeting(models.Model):
attendees = models.ManyToManyField(Employee)
office = models.ForeignKey(Office)
start_time = models.DateTimeField()
end_time = models.DateTimeField(null=True)
class Project(models.Model):
name = models.CharField(max_length=50)
employees = models.ManyToManyField(Employee)
manager = models.OneToOneField(Employee, related_name='managed_projects')
due_date = models.DateField(null=True)
Add MockHandler for future testing of loggingimport logging
from django.db import models
class MockHandler(logging.Handler):
def __init__(self, *args, **kwargs):
self.reset()
logging.Handler.__init__(self, *args, **kwargs)
def emit(self, record):
self.messages[record.levelname.lower()].append(record.getMessage())
def reset(self):
self.messages = {
'debug': [],
'info': [],
'warning': [],
'error': [],
'critical': [],
}
class Office(models.Model):
location = models.CharField(max_length=50)
class Title(models.Model):
name = models.CharField(max_length=50)
salary = models.IntegerField(null=True)
boss = models.NullBooleanField(default=False)
class Employee(models.Model):
first_name = models.CharField(max_length=50)
last_name = models.CharField(max_length=50)
title = models.ForeignKey(Title, null=True)
office = models.ForeignKey(Office)
is_manager = models.NullBooleanField(default=False)
class Meeting(models.Model):
attendees = models.ManyToManyField(Employee)
office = models.ForeignKey(Office)
start_time = models.DateTimeField()
end_time = models.DateTimeField(null=True)
class Project(models.Model):
name = models.CharField(max_length=50)
employees = models.ManyToManyField(Employee)
manager = models.OneToOneField(Employee, related_name='managed_projects')
due_date = models.DateField(null=True)
| <commit_before>from django.db import models
class Office(models.Model):
location = models.CharField(max_length=50)
class Title(models.Model):
name = models.CharField(max_length=50)
salary = models.IntegerField(null=True)
boss = models.NullBooleanField(default=False)
class Employee(models.Model):
first_name = models.CharField(max_length=50)
last_name = models.CharField(max_length=50)
title = models.ForeignKey(Title, null=True)
office = models.ForeignKey(Office)
is_manager = models.NullBooleanField(default=False)
class Meeting(models.Model):
attendees = models.ManyToManyField(Employee)
office = models.ForeignKey(Office)
start_time = models.DateTimeField()
end_time = models.DateTimeField(null=True)
class Project(models.Model):
name = models.CharField(max_length=50)
employees = models.ManyToManyField(Employee)
manager = models.OneToOneField(Employee, related_name='managed_projects')
due_date = models.DateField(null=True)
<commit_msg>Add MockHandler for future testing of logging<commit_after>import logging
from django.db import models
class MockHandler(logging.Handler):
def __init__(self, *args, **kwargs):
self.reset()
logging.Handler.__init__(self, *args, **kwargs)
def emit(self, record):
self.messages[record.levelname.lower()].append(record.getMessage())
def reset(self):
self.messages = {
'debug': [],
'info': [],
'warning': [],
'error': [],
'critical': [],
}
class Office(models.Model):
location = models.CharField(max_length=50)
class Title(models.Model):
name = models.CharField(max_length=50)
salary = models.IntegerField(null=True)
boss = models.NullBooleanField(default=False)
class Employee(models.Model):
first_name = models.CharField(max_length=50)
last_name = models.CharField(max_length=50)
title = models.ForeignKey(Title, null=True)
office = models.ForeignKey(Office)
is_manager = models.NullBooleanField(default=False)
class Meeting(models.Model):
attendees = models.ManyToManyField(Employee)
office = models.ForeignKey(Office)
start_time = models.DateTimeField()
end_time = models.DateTimeField(null=True)
class Project(models.Model):
name = models.CharField(max_length=50)
employees = models.ManyToManyField(Employee)
manager = models.OneToOneField(Employee, related_name='managed_projects')
due_date = models.DateField(null=True)
|
291d6c51d545cb46117ff25a5a01da8e08e78127 | ynr/apps/sopn_parsing/management/commands/sopn_parsing_extract_tables.py | ynr/apps/sopn_parsing/management/commands/sopn_parsing_extract_tables.py | from sopn_parsing.helpers.command_helpers import BaseSOPNParsingCommand
from sopn_parsing.helpers.extract_tables import extract_ballot_table
from sopn_parsing.helpers.text_helpers import NoTextInDocumentError
class Command(BaseSOPNParsingCommand):
help = """
Parse tables out of PDFs in to ParsedSOPN models for later parsing.
"""
def handle(self, *args, **options):
qs = self.get_queryset(options)
filter_kwargs = {}
if not options["ballot"] and not options["testing"]:
if not options["reparse"]:
filter_kwargs["officialdocument__parsedsopn"] = None
qs = qs.filter(**filter_kwargs)
# We can't extract tables when we don't know about the pages
qs = qs.exclude(officialdocument__relevant_pages="")
for ballot in qs:
try:
extract_ballot_table(ballot)
except NoTextInDocumentError:
self.stdout.write(
f"{ballot} raised a NoTextInDocumentError trying to extract tables"
)
except ValueError:
self.stdout.write(
f"{ballot} raised a ValueError trying extract tables"
)
| from django.db.models import OuterRef, Subquery
from official_documents.models import OfficialDocument
from sopn_parsing.helpers.command_helpers import BaseSOPNParsingCommand
from sopn_parsing.helpers.extract_tables import extract_ballot_table
from sopn_parsing.helpers.text_helpers import NoTextInDocumentError
class Command(BaseSOPNParsingCommand):
help = """
Parse tables out of PDFs in to ParsedSOPN models for later parsing.
"""
def handle(self, *args, **options):
qs = self.get_queryset(options)
filter_kwargs = {}
if not options["ballot"] and not options["testing"]:
if not options["reparse"]:
filter_kwargs["officialdocument__parsedsopn"] = None
qs = qs.filter(**filter_kwargs)
# We can't extract tables when we don't know about the pages
# It is possible for an a ballot to have more than one
# OfficialDocument so we need to get the latest one to check
# that we know which pages to parse tables from
latest_sopns = OfficialDocument.objects.filter(
ballot=OuterRef("pk")
).order_by("-created")
qs = qs.annotate(
sopn_relevant_pages=Subquery(
latest_sopns.values("relevant_pages")[:1]
)
)
qs = qs.exclude(sopn_relevant_pages="")
for ballot in qs:
try:
extract_ballot_table(ballot)
except NoTextInDocumentError:
self.stdout.write(
f"{ballot} raised a NoTextInDocumentError trying to extract tables"
)
except ValueError:
self.stdout.write(
f"{ballot} raised a ValueError trying extract tables"
)
| Fix query to exclude objects without relevant pages | Fix query to exclude objects without relevant pages
| Python | agpl-3.0 | DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative | from sopn_parsing.helpers.command_helpers import BaseSOPNParsingCommand
from sopn_parsing.helpers.extract_tables import extract_ballot_table
from sopn_parsing.helpers.text_helpers import NoTextInDocumentError
class Command(BaseSOPNParsingCommand):
help = """
Parse tables out of PDFs in to ParsedSOPN models for later parsing.
"""
def handle(self, *args, **options):
qs = self.get_queryset(options)
filter_kwargs = {}
if not options["ballot"] and not options["testing"]:
if not options["reparse"]:
filter_kwargs["officialdocument__parsedsopn"] = None
qs = qs.filter(**filter_kwargs)
# We can't extract tables when we don't know about the pages
qs = qs.exclude(officialdocument__relevant_pages="")
for ballot in qs:
try:
extract_ballot_table(ballot)
except NoTextInDocumentError:
self.stdout.write(
f"{ballot} raised a NoTextInDocumentError trying to extract tables"
)
except ValueError:
self.stdout.write(
f"{ballot} raised a ValueError trying extract tables"
)
Fix query to exclude objects without relevant pages | from django.db.models import OuterRef, Subquery
from official_documents.models import OfficialDocument
from sopn_parsing.helpers.command_helpers import BaseSOPNParsingCommand
from sopn_parsing.helpers.extract_tables import extract_ballot_table
from sopn_parsing.helpers.text_helpers import NoTextInDocumentError
class Command(BaseSOPNParsingCommand):
help = """
Parse tables out of PDFs in to ParsedSOPN models for later parsing.
"""
def handle(self, *args, **options):
qs = self.get_queryset(options)
filter_kwargs = {}
if not options["ballot"] and not options["testing"]:
if not options["reparse"]:
filter_kwargs["officialdocument__parsedsopn"] = None
qs = qs.filter(**filter_kwargs)
# We can't extract tables when we don't know about the pages
# It is possible for an a ballot to have more than one
# OfficialDocument so we need to get the latest one to check
# that we know which pages to parse tables from
latest_sopns = OfficialDocument.objects.filter(
ballot=OuterRef("pk")
).order_by("-created")
qs = qs.annotate(
sopn_relevant_pages=Subquery(
latest_sopns.values("relevant_pages")[:1]
)
)
qs = qs.exclude(sopn_relevant_pages="")
for ballot in qs:
try:
extract_ballot_table(ballot)
except NoTextInDocumentError:
self.stdout.write(
f"{ballot} raised a NoTextInDocumentError trying to extract tables"
)
except ValueError:
self.stdout.write(
f"{ballot} raised a ValueError trying extract tables"
)
| <commit_before>from sopn_parsing.helpers.command_helpers import BaseSOPNParsingCommand
from sopn_parsing.helpers.extract_tables import extract_ballot_table
from sopn_parsing.helpers.text_helpers import NoTextInDocumentError
class Command(BaseSOPNParsingCommand):
help = """
Parse tables out of PDFs in to ParsedSOPN models for later parsing.
"""
def handle(self, *args, **options):
qs = self.get_queryset(options)
filter_kwargs = {}
if not options["ballot"] and not options["testing"]:
if not options["reparse"]:
filter_kwargs["officialdocument__parsedsopn"] = None
qs = qs.filter(**filter_kwargs)
# We can't extract tables when we don't know about the pages
qs = qs.exclude(officialdocument__relevant_pages="")
for ballot in qs:
try:
extract_ballot_table(ballot)
except NoTextInDocumentError:
self.stdout.write(
f"{ballot} raised a NoTextInDocumentError trying to extract tables"
)
except ValueError:
self.stdout.write(
f"{ballot} raised a ValueError trying extract tables"
)
<commit_msg>Fix query to exclude objects without relevant pages<commit_after> | from django.db.models import OuterRef, Subquery
from official_documents.models import OfficialDocument
from sopn_parsing.helpers.command_helpers import BaseSOPNParsingCommand
from sopn_parsing.helpers.extract_tables import extract_ballot_table
from sopn_parsing.helpers.text_helpers import NoTextInDocumentError
class Command(BaseSOPNParsingCommand):
help = """
Parse tables out of PDFs in to ParsedSOPN models for later parsing.
"""
def handle(self, *args, **options):
qs = self.get_queryset(options)
filter_kwargs = {}
if not options["ballot"] and not options["testing"]:
if not options["reparse"]:
filter_kwargs["officialdocument__parsedsopn"] = None
qs = qs.filter(**filter_kwargs)
# We can't extract tables when we don't know about the pages
# It is possible for an a ballot to have more than one
# OfficialDocument so we need to get the latest one to check
# that we know which pages to parse tables from
latest_sopns = OfficialDocument.objects.filter(
ballot=OuterRef("pk")
).order_by("-created")
qs = qs.annotate(
sopn_relevant_pages=Subquery(
latest_sopns.values("relevant_pages")[:1]
)
)
qs = qs.exclude(sopn_relevant_pages="")
for ballot in qs:
try:
extract_ballot_table(ballot)
except NoTextInDocumentError:
self.stdout.write(
f"{ballot} raised a NoTextInDocumentError trying to extract tables"
)
except ValueError:
self.stdout.write(
f"{ballot} raised a ValueError trying extract tables"
)
| from sopn_parsing.helpers.command_helpers import BaseSOPNParsingCommand
from sopn_parsing.helpers.extract_tables import extract_ballot_table
from sopn_parsing.helpers.text_helpers import NoTextInDocumentError
class Command(BaseSOPNParsingCommand):
help = """
Parse tables out of PDFs in to ParsedSOPN models for later parsing.
"""
def handle(self, *args, **options):
qs = self.get_queryset(options)
filter_kwargs = {}
if not options["ballot"] and not options["testing"]:
if not options["reparse"]:
filter_kwargs["officialdocument__parsedsopn"] = None
qs = qs.filter(**filter_kwargs)
# We can't extract tables when we don't know about the pages
qs = qs.exclude(officialdocument__relevant_pages="")
for ballot in qs:
try:
extract_ballot_table(ballot)
except NoTextInDocumentError:
self.stdout.write(
f"{ballot} raised a NoTextInDocumentError trying to extract tables"
)
except ValueError:
self.stdout.write(
f"{ballot} raised a ValueError trying extract tables"
)
Fix query to exclude objects without relevant pagesfrom django.db.models import OuterRef, Subquery
from official_documents.models import OfficialDocument
from sopn_parsing.helpers.command_helpers import BaseSOPNParsingCommand
from sopn_parsing.helpers.extract_tables import extract_ballot_table
from sopn_parsing.helpers.text_helpers import NoTextInDocumentError
class Command(BaseSOPNParsingCommand):
help = """
Parse tables out of PDFs in to ParsedSOPN models for later parsing.
"""
def handle(self, *args, **options):
qs = self.get_queryset(options)
filter_kwargs = {}
if not options["ballot"] and not options["testing"]:
if not options["reparse"]:
filter_kwargs["officialdocument__parsedsopn"] = None
qs = qs.filter(**filter_kwargs)
# We can't extract tables when we don't know about the pages
# It is possible for an a ballot to have more than one
# OfficialDocument so we need to get the latest one to check
# that we know which pages to parse tables from
latest_sopns = OfficialDocument.objects.filter(
ballot=OuterRef("pk")
).order_by("-created")
qs = qs.annotate(
sopn_relevant_pages=Subquery(
latest_sopns.values("relevant_pages")[:1]
)
)
qs = qs.exclude(sopn_relevant_pages="")
for ballot in qs:
try:
extract_ballot_table(ballot)
except NoTextInDocumentError:
self.stdout.write(
f"{ballot} raised a NoTextInDocumentError trying to extract tables"
)
except ValueError:
self.stdout.write(
f"{ballot} raised a ValueError trying extract tables"
)
| <commit_before>from sopn_parsing.helpers.command_helpers import BaseSOPNParsingCommand
from sopn_parsing.helpers.extract_tables import extract_ballot_table
from sopn_parsing.helpers.text_helpers import NoTextInDocumentError
class Command(BaseSOPNParsingCommand):
help = """
Parse tables out of PDFs in to ParsedSOPN models for later parsing.
"""
def handle(self, *args, **options):
qs = self.get_queryset(options)
filter_kwargs = {}
if not options["ballot"] and not options["testing"]:
if not options["reparse"]:
filter_kwargs["officialdocument__parsedsopn"] = None
qs = qs.filter(**filter_kwargs)
# We can't extract tables when we don't know about the pages
qs = qs.exclude(officialdocument__relevant_pages="")
for ballot in qs:
try:
extract_ballot_table(ballot)
except NoTextInDocumentError:
self.stdout.write(
f"{ballot} raised a NoTextInDocumentError trying to extract tables"
)
except ValueError:
self.stdout.write(
f"{ballot} raised a ValueError trying extract tables"
)
<commit_msg>Fix query to exclude objects without relevant pages<commit_after>from django.db.models import OuterRef, Subquery
from official_documents.models import OfficialDocument
from sopn_parsing.helpers.command_helpers import BaseSOPNParsingCommand
from sopn_parsing.helpers.extract_tables import extract_ballot_table
from sopn_parsing.helpers.text_helpers import NoTextInDocumentError
class Command(BaseSOPNParsingCommand):
help = """
Parse tables out of PDFs in to ParsedSOPN models for later parsing.
"""
def handle(self, *args, **options):
qs = self.get_queryset(options)
filter_kwargs = {}
if not options["ballot"] and not options["testing"]:
if not options["reparse"]:
filter_kwargs["officialdocument__parsedsopn"] = None
qs = qs.filter(**filter_kwargs)
# We can't extract tables when we don't know about the pages
# It is possible for an a ballot to have more than one
# OfficialDocument so we need to get the latest one to check
# that we know which pages to parse tables from
latest_sopns = OfficialDocument.objects.filter(
ballot=OuterRef("pk")
).order_by("-created")
qs = qs.annotate(
sopn_relevant_pages=Subquery(
latest_sopns.values("relevant_pages")[:1]
)
)
qs = qs.exclude(sopn_relevant_pages="")
for ballot in qs:
try:
extract_ballot_table(ballot)
except NoTextInDocumentError:
self.stdout.write(
f"{ballot} raised a NoTextInDocumentError trying to extract tables"
)
except ValueError:
self.stdout.write(
f"{ballot} raised a ValueError trying extract tables"
)
|
f812fa8f0df0f3b8c8bb56f446dd01f64cff5cae | wapps/migrations/0016_auto_20161024_0925.py | wapps/migrations/0016_auto_20161024_0925.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-24 09:25
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('wapps', '0015_identitysettings_amp_logo'),
]
operations = [
migrations.AlterField(
model_name='identitysettings',
name='amp_logo',
field=models.ForeignKey(blank=True, help_text='An mobile optimized logo that must be 600x60', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wapps.WappsImage', verbose_name='Mobile Logo'),
),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-24 09:25
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
from wapps.utils import get_image_model
class Migration(migrations.Migration):
dependencies = [
('wapps', '0015_identitysettings_amp_logo'),
]
operations = [
migrations.AlterField(
model_name='identitysettings',
name='amp_logo',
field=models.ForeignKey(blank=True, help_text='An mobile optimized logo that must be 600x60', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=get_image_model(), verbose_name='Mobile Logo'),
),
]
| Fix image model in migration | Fix image model in migration
| Python | mit | apihackers/wapps,apihackers/wapps,apihackers/wapps,apihackers/wapps | # -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-24 09:25
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('wapps', '0015_identitysettings_amp_logo'),
]
operations = [
migrations.AlterField(
model_name='identitysettings',
name='amp_logo',
field=models.ForeignKey(blank=True, help_text='An mobile optimized logo that must be 600x60', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wapps.WappsImage', verbose_name='Mobile Logo'),
),
]
Fix image model in migration | # -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-24 09:25
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
from wapps.utils import get_image_model
class Migration(migrations.Migration):
dependencies = [
('wapps', '0015_identitysettings_amp_logo'),
]
operations = [
migrations.AlterField(
model_name='identitysettings',
name='amp_logo',
field=models.ForeignKey(blank=True, help_text='An mobile optimized logo that must be 600x60', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=get_image_model(), verbose_name='Mobile Logo'),
),
]
| <commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-24 09:25
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('wapps', '0015_identitysettings_amp_logo'),
]
operations = [
migrations.AlterField(
model_name='identitysettings',
name='amp_logo',
field=models.ForeignKey(blank=True, help_text='An mobile optimized logo that must be 600x60', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wapps.WappsImage', verbose_name='Mobile Logo'),
),
]
<commit_msg>Fix image model in migration<commit_after> | # -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-24 09:25
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
from wapps.utils import get_image_model
class Migration(migrations.Migration):
dependencies = [
('wapps', '0015_identitysettings_amp_logo'),
]
operations = [
migrations.AlterField(
model_name='identitysettings',
name='amp_logo',
field=models.ForeignKey(blank=True, help_text='An mobile optimized logo that must be 600x60', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=get_image_model(), verbose_name='Mobile Logo'),
),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-24 09:25
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('wapps', '0015_identitysettings_amp_logo'),
]
operations = [
migrations.AlterField(
model_name='identitysettings',
name='amp_logo',
field=models.ForeignKey(blank=True, help_text='An mobile optimized logo that must be 600x60', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wapps.WappsImage', verbose_name='Mobile Logo'),
),
]
Fix image model in migration# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-24 09:25
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
from wapps.utils import get_image_model
class Migration(migrations.Migration):
dependencies = [
('wapps', '0015_identitysettings_amp_logo'),
]
operations = [
migrations.AlterField(
model_name='identitysettings',
name='amp_logo',
field=models.ForeignKey(blank=True, help_text='An mobile optimized logo that must be 600x60', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=get_image_model(), verbose_name='Mobile Logo'),
),
]
| <commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-24 09:25
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('wapps', '0015_identitysettings_amp_logo'),
]
operations = [
migrations.AlterField(
model_name='identitysettings',
name='amp_logo',
field=models.ForeignKey(blank=True, help_text='An mobile optimized logo that must be 600x60', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wapps.WappsImage', verbose_name='Mobile Logo'),
),
]
<commit_msg>Fix image model in migration<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-24 09:25
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
from wapps.utils import get_image_model
class Migration(migrations.Migration):
dependencies = [
('wapps', '0015_identitysettings_amp_logo'),
]
operations = [
migrations.AlterField(
model_name='identitysettings',
name='amp_logo',
field=models.ForeignKey(blank=True, help_text='An mobile optimized logo that must be 600x60', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=get_image_model(), verbose_name='Mobile Logo'),
),
]
|
7d2d94d69797586860f7bb8c21a0b0e217fbc394 | components/mgmtworker/scripts/start.py | components/mgmtworker/scripts/start.py | #!/usr/bin/env python
from os.path import join, dirname
from cloudify import ctx
ctx.download_resource(
join('components', 'utils.py'),
join(dirname(__file__), 'utils.py'))
import utils # NOQA
MGMT_WORKER_SERVICE_NAME = 'mgmtworker'
CELERY_PATH = '/opt/mgmtworker/env/bin/celery' # also hardcoded in create
@utils.retry(ValueError)
def check_worker_running(amqp_url):
"""Use `celery status` to check if the worker is running."""
result = utils.sudo([
CELERY_PATH,
'-b', celery_amqp_url,
'--app=cloudify_agent.app.app',
'status'
], ignore_failures=True)
if result.returncode != 0:
raise ValueError('celery status: worker not running')
ctx.logger.info('Starting Management Worker Service...')
utils.start_service(MGMT_WORKER_SERVICE_NAME)
utils.systemd.verify_alive(MGMT_WORKER_SERVICE_NAME)
celery_amqp_url = ('amqp://{rabbitmq_username}:{rabbitmq_password}@'
'{rabbitmq_endpoint_ip}:{broker_port}//').format(
**ctx.instance.runtime_properties)
try:
check_worker_running(celery_amqp_url)
except ValueError:
ctx.abort_operation('Celery worker failed to start')
| #!/usr/bin/env python
from os.path import join, dirname
from cloudify import ctx
ctx.download_resource(
join('components', 'utils.py'),
join(dirname(__file__), 'utils.py'))
import utils # NOQA
MGMT_WORKER_SERVICE_NAME = 'mgmtworker'
CELERY_PATH = '/opt/mgmtworker/env/bin/celery' # also hardcoded in create
@utils.retry(ValueError)
def check_worker_running():
"""Use `celery status` to check if the worker is running."""
result = utils.sudo([
'CELERY_WORK_DIR=/opt/mgmtworker/work',
CELERY_PATH,
'--config=cloudify.broker_config',
'status'
], ignore_failures=True)
if result.returncode != 0:
raise ValueError('celery status: worker not running')
ctx.logger.info('Starting Management Worker Service...')
utils.start_service(MGMT_WORKER_SERVICE_NAME)
utils.systemd.verify_alive(MGMT_WORKER_SERVICE_NAME)
try:
check_worker_running()
except ValueError:
ctx.abort_operation('Celery worker failed to start')
| Use the stored broker_config instead of working it out ourselves | Use the stored broker_config instead of working it out ourselves
Fixes SSL
Means we're verifying the config is written properly too
| Python | apache-2.0 | isaac-s/cloudify-manager-blueprints,Cloudify-PS/cloudify-manager-blueprints,cloudify-cosmo/cloudify-manager-blueprints,cloudify-cosmo/cloudify-manager-blueprints,isaac-s/cloudify-manager-blueprints,Cloudify-PS/cloudify-manager-blueprints | #!/usr/bin/env python
from os.path import join, dirname
from cloudify import ctx
ctx.download_resource(
join('components', 'utils.py'),
join(dirname(__file__), 'utils.py'))
import utils # NOQA
MGMT_WORKER_SERVICE_NAME = 'mgmtworker'
CELERY_PATH = '/opt/mgmtworker/env/bin/celery' # also hardcoded in create
@utils.retry(ValueError)
def check_worker_running(amqp_url):
"""Use `celery status` to check if the worker is running."""
result = utils.sudo([
CELERY_PATH,
'-b', celery_amqp_url,
'--app=cloudify_agent.app.app',
'status'
], ignore_failures=True)
if result.returncode != 0:
raise ValueError('celery status: worker not running')
ctx.logger.info('Starting Management Worker Service...')
utils.start_service(MGMT_WORKER_SERVICE_NAME)
utils.systemd.verify_alive(MGMT_WORKER_SERVICE_NAME)
celery_amqp_url = ('amqp://{rabbitmq_username}:{rabbitmq_password}@'
'{rabbitmq_endpoint_ip}:{broker_port}//').format(
**ctx.instance.runtime_properties)
try:
check_worker_running(celery_amqp_url)
except ValueError:
ctx.abort_operation('Celery worker failed to start')
Use the stored broker_config instead of working it out ourselves
Fixes SSL
Means we're verifying the config is written properly too | #!/usr/bin/env python
from os.path import join, dirname
from cloudify import ctx
ctx.download_resource(
join('components', 'utils.py'),
join(dirname(__file__), 'utils.py'))
import utils # NOQA
MGMT_WORKER_SERVICE_NAME = 'mgmtworker'
CELERY_PATH = '/opt/mgmtworker/env/bin/celery' # also hardcoded in create
@utils.retry(ValueError)
def check_worker_running():
"""Use `celery status` to check if the worker is running."""
result = utils.sudo([
'CELERY_WORK_DIR=/opt/mgmtworker/work',
CELERY_PATH,
'--config=cloudify.broker_config',
'status'
], ignore_failures=True)
if result.returncode != 0:
raise ValueError('celery status: worker not running')
ctx.logger.info('Starting Management Worker Service...')
utils.start_service(MGMT_WORKER_SERVICE_NAME)
utils.systemd.verify_alive(MGMT_WORKER_SERVICE_NAME)
try:
check_worker_running()
except ValueError:
ctx.abort_operation('Celery worker failed to start')
| <commit_before>#!/usr/bin/env python
from os.path import join, dirname
from cloudify import ctx
ctx.download_resource(
join('components', 'utils.py'),
join(dirname(__file__), 'utils.py'))
import utils # NOQA
MGMT_WORKER_SERVICE_NAME = 'mgmtworker'
CELERY_PATH = '/opt/mgmtworker/env/bin/celery' # also hardcoded in create
@utils.retry(ValueError)
def check_worker_running(amqp_url):
"""Use `celery status` to check if the worker is running."""
result = utils.sudo([
CELERY_PATH,
'-b', celery_amqp_url,
'--app=cloudify_agent.app.app',
'status'
], ignore_failures=True)
if result.returncode != 0:
raise ValueError('celery status: worker not running')
ctx.logger.info('Starting Management Worker Service...')
utils.start_service(MGMT_WORKER_SERVICE_NAME)
utils.systemd.verify_alive(MGMT_WORKER_SERVICE_NAME)
celery_amqp_url = ('amqp://{rabbitmq_username}:{rabbitmq_password}@'
'{rabbitmq_endpoint_ip}:{broker_port}//').format(
**ctx.instance.runtime_properties)
try:
check_worker_running(celery_amqp_url)
except ValueError:
ctx.abort_operation('Celery worker failed to start')
<commit_msg>Use the stored broker_config instead of working it out ourselves
Fixes SSL
Means we're verifying the config is written properly too<commit_after> | #!/usr/bin/env python
from os.path import join, dirname
from cloudify import ctx
ctx.download_resource(
join('components', 'utils.py'),
join(dirname(__file__), 'utils.py'))
import utils # NOQA
MGMT_WORKER_SERVICE_NAME = 'mgmtworker'
CELERY_PATH = '/opt/mgmtworker/env/bin/celery' # also hardcoded in create
@utils.retry(ValueError)
def check_worker_running():
"""Use `celery status` to check if the worker is running."""
result = utils.sudo([
'CELERY_WORK_DIR=/opt/mgmtworker/work',
CELERY_PATH,
'--config=cloudify.broker_config',
'status'
], ignore_failures=True)
if result.returncode != 0:
raise ValueError('celery status: worker not running')
ctx.logger.info('Starting Management Worker Service...')
utils.start_service(MGMT_WORKER_SERVICE_NAME)
utils.systemd.verify_alive(MGMT_WORKER_SERVICE_NAME)
try:
check_worker_running()
except ValueError:
ctx.abort_operation('Celery worker failed to start')
| #!/usr/bin/env python
from os.path import join, dirname
from cloudify import ctx
ctx.download_resource(
join('components', 'utils.py'),
join(dirname(__file__), 'utils.py'))
import utils # NOQA
MGMT_WORKER_SERVICE_NAME = 'mgmtworker'
CELERY_PATH = '/opt/mgmtworker/env/bin/celery' # also hardcoded in create
@utils.retry(ValueError)
def check_worker_running(amqp_url):
"""Use `celery status` to check if the worker is running."""
result = utils.sudo([
CELERY_PATH,
'-b', celery_amqp_url,
'--app=cloudify_agent.app.app',
'status'
], ignore_failures=True)
if result.returncode != 0:
raise ValueError('celery status: worker not running')
ctx.logger.info('Starting Management Worker Service...')
utils.start_service(MGMT_WORKER_SERVICE_NAME)
utils.systemd.verify_alive(MGMT_WORKER_SERVICE_NAME)
celery_amqp_url = ('amqp://{rabbitmq_username}:{rabbitmq_password}@'
'{rabbitmq_endpoint_ip}:{broker_port}//').format(
**ctx.instance.runtime_properties)
try:
check_worker_running(celery_amqp_url)
except ValueError:
ctx.abort_operation('Celery worker failed to start')
Use the stored broker_config instead of working it out ourselves
Fixes SSL
Means we're verifying the config is written properly too#!/usr/bin/env python
from os.path import join, dirname
from cloudify import ctx
ctx.download_resource(
join('components', 'utils.py'),
join(dirname(__file__), 'utils.py'))
import utils # NOQA
MGMT_WORKER_SERVICE_NAME = 'mgmtworker'
CELERY_PATH = '/opt/mgmtworker/env/bin/celery' # also hardcoded in create
@utils.retry(ValueError)
def check_worker_running():
"""Use `celery status` to check if the worker is running."""
result = utils.sudo([
'CELERY_WORK_DIR=/opt/mgmtworker/work',
CELERY_PATH,
'--config=cloudify.broker_config',
'status'
], ignore_failures=True)
if result.returncode != 0:
raise ValueError('celery status: worker not running')
ctx.logger.info('Starting Management Worker Service...')
utils.start_service(MGMT_WORKER_SERVICE_NAME)
utils.systemd.verify_alive(MGMT_WORKER_SERVICE_NAME)
try:
check_worker_running()
except ValueError:
ctx.abort_operation('Celery worker failed to start')
| <commit_before>#!/usr/bin/env python
from os.path import join, dirname
from cloudify import ctx
ctx.download_resource(
join('components', 'utils.py'),
join(dirname(__file__), 'utils.py'))
import utils # NOQA
MGMT_WORKER_SERVICE_NAME = 'mgmtworker'
CELERY_PATH = '/opt/mgmtworker/env/bin/celery' # also hardcoded in create
@utils.retry(ValueError)
def check_worker_running(amqp_url):
"""Use `celery status` to check if the worker is running."""
result = utils.sudo([
CELERY_PATH,
'-b', celery_amqp_url,
'--app=cloudify_agent.app.app',
'status'
], ignore_failures=True)
if result.returncode != 0:
raise ValueError('celery status: worker not running')
ctx.logger.info('Starting Management Worker Service...')
utils.start_service(MGMT_WORKER_SERVICE_NAME)
utils.systemd.verify_alive(MGMT_WORKER_SERVICE_NAME)
celery_amqp_url = ('amqp://{rabbitmq_username}:{rabbitmq_password}@'
'{rabbitmq_endpoint_ip}:{broker_port}//').format(
**ctx.instance.runtime_properties)
try:
check_worker_running(celery_amqp_url)
except ValueError:
ctx.abort_operation('Celery worker failed to start')
<commit_msg>Use the stored broker_config instead of working it out ourselves
Fixes SSL
Means we're verifying the config is written properly too<commit_after>#!/usr/bin/env python
from os.path import join, dirname
from cloudify import ctx
ctx.download_resource(
join('components', 'utils.py'),
join(dirname(__file__), 'utils.py'))
import utils # NOQA
MGMT_WORKER_SERVICE_NAME = 'mgmtworker'
CELERY_PATH = '/opt/mgmtworker/env/bin/celery' # also hardcoded in create
@utils.retry(ValueError)
def check_worker_running():
"""Use `celery status` to check if the worker is running."""
result = utils.sudo([
'CELERY_WORK_DIR=/opt/mgmtworker/work',
CELERY_PATH,
'--config=cloudify.broker_config',
'status'
], ignore_failures=True)
if result.returncode != 0:
raise ValueError('celery status: worker not running')
ctx.logger.info('Starting Management Worker Service...')
utils.start_service(MGMT_WORKER_SERVICE_NAME)
utils.systemd.verify_alive(MGMT_WORKER_SERVICE_NAME)
try:
check_worker_running()
except ValueError:
ctx.abort_operation('Celery worker failed to start')
|
31bea815589e018c9a6250f1a8efcdf8cecfef7d | tests/test_settings.py | tests/test_settings.py | DEBUG = True
TEMPLATE_DEBUG = DEBUG
SECRET_KEY = 'fake_secret'
ROOT_URLCONF = 'tests.test_urls'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'irrelevant.db'
}
}
INSTALLED_APPS = (
'djproxy',
)
STATIC_ROOT = ''
STATIC_URL = '/'
| DEBUG = True
TEMPLATE_DEBUG = DEBUG
SECRET_KEY = 'fake_secret'
ROOT_URLCONF = 'tests.test_urls'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'irrelevant.db'
}
}
INSTALLED_APPS = (
'djproxy',
)
STATIC_ROOT = ''
STATIC_URL = '/'
APPEND_SLASH = False
| Disable append_slash in test config | Disable append_slash in test config
This interferes with the ability to test proxying downstream redirects
| Python | mit | thomasw/djproxy | DEBUG = True
TEMPLATE_DEBUG = DEBUG
SECRET_KEY = 'fake_secret'
ROOT_URLCONF = 'tests.test_urls'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'irrelevant.db'
}
}
INSTALLED_APPS = (
'djproxy',
)
STATIC_ROOT = ''
STATIC_URL = '/'
Disable append_slash in test config
This interferes with the ability to test proxying downstream redirects | DEBUG = True
TEMPLATE_DEBUG = DEBUG
SECRET_KEY = 'fake_secret'
ROOT_URLCONF = 'tests.test_urls'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'irrelevant.db'
}
}
INSTALLED_APPS = (
'djproxy',
)
STATIC_ROOT = ''
STATIC_URL = '/'
APPEND_SLASH = False
| <commit_before>DEBUG = True
TEMPLATE_DEBUG = DEBUG
SECRET_KEY = 'fake_secret'
ROOT_URLCONF = 'tests.test_urls'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'irrelevant.db'
}
}
INSTALLED_APPS = (
'djproxy',
)
STATIC_ROOT = ''
STATIC_URL = '/'
<commit_msg>Disable append_slash in test config
This interferes with the ability to test proxying downstream redirects<commit_after> | DEBUG = True
TEMPLATE_DEBUG = DEBUG
SECRET_KEY = 'fake_secret'
ROOT_URLCONF = 'tests.test_urls'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'irrelevant.db'
}
}
INSTALLED_APPS = (
'djproxy',
)
STATIC_ROOT = ''
STATIC_URL = '/'
APPEND_SLASH = False
| DEBUG = True
TEMPLATE_DEBUG = DEBUG
SECRET_KEY = 'fake_secret'
ROOT_URLCONF = 'tests.test_urls'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'irrelevant.db'
}
}
INSTALLED_APPS = (
'djproxy',
)
STATIC_ROOT = ''
STATIC_URL = '/'
Disable append_slash in test config
This interferes with the ability to test proxying downstream redirectsDEBUG = True
TEMPLATE_DEBUG = DEBUG
SECRET_KEY = 'fake_secret'
ROOT_URLCONF = 'tests.test_urls'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'irrelevant.db'
}
}
INSTALLED_APPS = (
'djproxy',
)
STATIC_ROOT = ''
STATIC_URL = '/'
APPEND_SLASH = False
| <commit_before>DEBUG = True
TEMPLATE_DEBUG = DEBUG
SECRET_KEY = 'fake_secret'
ROOT_URLCONF = 'tests.test_urls'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'irrelevant.db'
}
}
INSTALLED_APPS = (
'djproxy',
)
STATIC_ROOT = ''
STATIC_URL = '/'
<commit_msg>Disable append_slash in test config
This interferes with the ability to test proxying downstream redirects<commit_after>DEBUG = True
TEMPLATE_DEBUG = DEBUG
SECRET_KEY = 'fake_secret'
ROOT_URLCONF = 'tests.test_urls'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'irrelevant.db'
}
}
INSTALLED_APPS = (
'djproxy',
)
STATIC_ROOT = ''
STATIC_URL = '/'
APPEND_SLASH = False
|
d6fcedb2b479da20cbe061bcba52758a18a6ed0b | noweats/__init__.py | noweats/__init__.py | """
The NowEats application scrapes Twitter for what people are eating now.
"""
| """
The NowEats application scrapes Twitter for what people are eating now.
"""
import analysis
import collection
import extraction
| Add module imports to noweats init. | Add module imports to noweats init.
| Python | mit | blr246/noweats,blr246/noweats | """
The NowEats application scrapes Twitter for what people are eating now.
"""
Add module imports to noweats init. | """
The NowEats application scrapes Twitter for what people are eating now.
"""
import analysis
import collection
import extraction
| <commit_before>"""
The NowEats application scrapes Twitter for what people are eating now.
"""
<commit_msg>Add module imports to noweats init.<commit_after> | """
The NowEats application scrapes Twitter for what people are eating now.
"""
import analysis
import collection
import extraction
| """
The NowEats application scrapes Twitter for what people are eating now.
"""
Add module imports to noweats init."""
The NowEats application scrapes Twitter for what people are eating now.
"""
import analysis
import collection
import extraction
| <commit_before>"""
The NowEats application scrapes Twitter for what people are eating now.
"""
<commit_msg>Add module imports to noweats init.<commit_after>"""
The NowEats application scrapes Twitter for what people are eating now.
"""
import analysis
import collection
import extraction
|
ed97a1f811f04693203f6d1c0e9b64649a3da152 | coney/exceptions.py | coney/exceptions.py |
class ConeyException(Exception):
def __repr__(self):
return 'An unspecified error has occurred'
class CallTimeoutException(ConeyException):
def __repr__(self):
return 'An RPC call did not return before the time out period'
class MalformedRequestException(ConeyException):
def __init__(self, serializer_name, request):
self._serializer_name = serializer_name
self._request = request
def __repr__(self):
return '{} failed to create a Request from string: {}'.format(self._serialier_name, self._request)
class RemoteExecErrorException(ConeyException):
def __init__(self, value, details):
self._value = value
self._details = details
def __repr__(self):
return 'An error occurred during remote execution: ({}) {}'.format(self._value, self._details)
@property
def value(self):
return self._value
@property
def details(self):
return self._details
class RemoteUnhandledExceptionException(ConeyException):
def __init__(self, details):
self._details = details
def __repr__(self):
return 'An unhandled exception was raised during remote execution: {}'.format(self._details)
class DispatchHandlerException(ConeyException):
def __init__(self, code):
self.code = code
def __repr__(self):
return 'Error {} occurred during message dispatch'.format(self.code)
|
class ConeyException(Exception):
def __repr__(self):
return 'An unspecified error has occurred'
class CallTimeoutException(ConeyException):
def __repr__(self):
return 'An RPC call did not return before the time out period'
class MalformedRequestException(ConeyException):
def __init__(self, serializer_name, request):
self._serializer_name = serializer_name
self._request = request
def __repr__(self):
return '{} failed to create a Request from string: {}'.format(self._serialier_name, self._request)
class RemoteExecErrorException(ConeyException):
def __init__(self, value, details):
self._value = value
self._details = details
def __repr__(self):
return 'An error occurred during remote execution: ({}) {}'.format(self._value, self._details)
@property
def value(self):
return self._value
@property
def details(self):
return self._details
class RemoteUnhandledExceptionException(ConeyException):
def __init__(self, details):
self._details = details
def __repr__(self):
return 'An unhandled exception was raised during remote execution: {}'.format(self._details)
class DispatchHandlerException(ConeyException):
def __init__(self, code):
self.code = code
def __repr__(self):
return 'Error {} occurred during message dispatch'.format(self.code)
class HandlerNotCallableException(ConeyException):
def __repr__(self):
return 'Handler provided a non-callable object'
| Add a new exception to handle a non-callable handler. | Add a new exception to handle a non-callable handler.
| Python | mit | cbigler/jackrabbit |
class ConeyException(Exception):
def __repr__(self):
return 'An unspecified error has occurred'
class CallTimeoutException(ConeyException):
def __repr__(self):
return 'An RPC call did not return before the time out period'
class MalformedRequestException(ConeyException):
def __init__(self, serializer_name, request):
self._serializer_name = serializer_name
self._request = request
def __repr__(self):
return '{} failed to create a Request from string: {}'.format(self._serialier_name, self._request)
class RemoteExecErrorException(ConeyException):
def __init__(self, value, details):
self._value = value
self._details = details
def __repr__(self):
return 'An error occurred during remote execution: ({}) {}'.format(self._value, self._details)
@property
def value(self):
return self._value
@property
def details(self):
return self._details
class RemoteUnhandledExceptionException(ConeyException):
def __init__(self, details):
self._details = details
def __repr__(self):
return 'An unhandled exception was raised during remote execution: {}'.format(self._details)
class DispatchHandlerException(ConeyException):
def __init__(self, code):
self.code = code
def __repr__(self):
return 'Error {} occurred during message dispatch'.format(self.code)
Add a new exception to handle a non-callable handler. |
class ConeyException(Exception):
def __repr__(self):
return 'An unspecified error has occurred'
class CallTimeoutException(ConeyException):
def __repr__(self):
return 'An RPC call did not return before the time out period'
class MalformedRequestException(ConeyException):
def __init__(self, serializer_name, request):
self._serializer_name = serializer_name
self._request = request
def __repr__(self):
return '{} failed to create a Request from string: {}'.format(self._serialier_name, self._request)
class RemoteExecErrorException(ConeyException):
def __init__(self, value, details):
self._value = value
self._details = details
def __repr__(self):
return 'An error occurred during remote execution: ({}) {}'.format(self._value, self._details)
@property
def value(self):
return self._value
@property
def details(self):
return self._details
class RemoteUnhandledExceptionException(ConeyException):
def __init__(self, details):
self._details = details
def __repr__(self):
return 'An unhandled exception was raised during remote execution: {}'.format(self._details)
class DispatchHandlerException(ConeyException):
def __init__(self, code):
self.code = code
def __repr__(self):
return 'Error {} occurred during message dispatch'.format(self.code)
class HandlerNotCallableException(ConeyException):
def __repr__(self):
return 'Handler provided a non-callable object'
| <commit_before>
class ConeyException(Exception):
def __repr__(self):
return 'An unspecified error has occurred'
class CallTimeoutException(ConeyException):
def __repr__(self):
return 'An RPC call did not return before the time out period'
class MalformedRequestException(ConeyException):
def __init__(self, serializer_name, request):
self._serializer_name = serializer_name
self._request = request
def __repr__(self):
return '{} failed to create a Request from string: {}'.format(self._serialier_name, self._request)
class RemoteExecErrorException(ConeyException):
def __init__(self, value, details):
self._value = value
self._details = details
def __repr__(self):
return 'An error occurred during remote execution: ({}) {}'.format(self._value, self._details)
@property
def value(self):
return self._value
@property
def details(self):
return self._details
class RemoteUnhandledExceptionException(ConeyException):
def __init__(self, details):
self._details = details
def __repr__(self):
return 'An unhandled exception was raised during remote execution: {}'.format(self._details)
class DispatchHandlerException(ConeyException):
def __init__(self, code):
self.code = code
def __repr__(self):
return 'Error {} occurred during message dispatch'.format(self.code)
<commit_msg>Add a new exception to handle a non-callable handler.<commit_after> |
class ConeyException(Exception):
def __repr__(self):
return 'An unspecified error has occurred'
class CallTimeoutException(ConeyException):
def __repr__(self):
return 'An RPC call did not return before the time out period'
class MalformedRequestException(ConeyException):
def __init__(self, serializer_name, request):
self._serializer_name = serializer_name
self._request = request
def __repr__(self):
return '{} failed to create a Request from string: {}'.format(self._serialier_name, self._request)
class RemoteExecErrorException(ConeyException):
def __init__(self, value, details):
self._value = value
self._details = details
def __repr__(self):
return 'An error occurred during remote execution: ({}) {}'.format(self._value, self._details)
@property
def value(self):
return self._value
@property
def details(self):
return self._details
class RemoteUnhandledExceptionException(ConeyException):
def __init__(self, details):
self._details = details
def __repr__(self):
return 'An unhandled exception was raised during remote execution: {}'.format(self._details)
class DispatchHandlerException(ConeyException):
def __init__(self, code):
self.code = code
def __repr__(self):
return 'Error {} occurred during message dispatch'.format(self.code)
class HandlerNotCallableException(ConeyException):
def __repr__(self):
return 'Handler provided a non-callable object'
|
class ConeyException(Exception):
def __repr__(self):
return 'An unspecified error has occurred'
class CallTimeoutException(ConeyException):
def __repr__(self):
return 'An RPC call did not return before the time out period'
class MalformedRequestException(ConeyException):
def __init__(self, serializer_name, request):
self._serializer_name = serializer_name
self._request = request
def __repr__(self):
return '{} failed to create a Request from string: {}'.format(self._serialier_name, self._request)
class RemoteExecErrorException(ConeyException):
def __init__(self, value, details):
self._value = value
self._details = details
def __repr__(self):
return 'An error occurred during remote execution: ({}) {}'.format(self._value, self._details)
@property
def value(self):
return self._value
@property
def details(self):
return self._details
class RemoteUnhandledExceptionException(ConeyException):
def __init__(self, details):
self._details = details
def __repr__(self):
return 'An unhandled exception was raised during remote execution: {}'.format(self._details)
class DispatchHandlerException(ConeyException):
def __init__(self, code):
self.code = code
def __repr__(self):
return 'Error {} occurred during message dispatch'.format(self.code)
Add a new exception to handle a non-callable handler.
class ConeyException(Exception):
def __repr__(self):
return 'An unspecified error has occurred'
class CallTimeoutException(ConeyException):
def __repr__(self):
return 'An RPC call did not return before the time out period'
class MalformedRequestException(ConeyException):
def __init__(self, serializer_name, request):
self._serializer_name = serializer_name
self._request = request
def __repr__(self):
return '{} failed to create a Request from string: {}'.format(self._serialier_name, self._request)
class RemoteExecErrorException(ConeyException):
def __init__(self, value, details):
self._value = value
self._details = details
def __repr__(self):
return 'An error occurred during remote execution: ({}) {}'.format(self._value, self._details)
@property
def value(self):
return self._value
@property
def details(self):
return self._details
class RemoteUnhandledExceptionException(ConeyException):
def __init__(self, details):
self._details = details
def __repr__(self):
return 'An unhandled exception was raised during remote execution: {}'.format(self._details)
class DispatchHandlerException(ConeyException):
def __init__(self, code):
self.code = code
def __repr__(self):
return 'Error {} occurred during message dispatch'.format(self.code)
class HandlerNotCallableException(ConeyException):
def __repr__(self):
return 'Handler provided a non-callable object'
| <commit_before>
class ConeyException(Exception):
def __repr__(self):
return 'An unspecified error has occurred'
class CallTimeoutException(ConeyException):
def __repr__(self):
return 'An RPC call did not return before the time out period'
class MalformedRequestException(ConeyException):
def __init__(self, serializer_name, request):
self._serializer_name = serializer_name
self._request = request
def __repr__(self):
return '{} failed to create a Request from string: {}'.format(self._serialier_name, self._request)
class RemoteExecErrorException(ConeyException):
def __init__(self, value, details):
self._value = value
self._details = details
def __repr__(self):
return 'An error occurred during remote execution: ({}) {}'.format(self._value, self._details)
@property
def value(self):
return self._value
@property
def details(self):
return self._details
class RemoteUnhandledExceptionException(ConeyException):
def __init__(self, details):
self._details = details
def __repr__(self):
return 'An unhandled exception was raised during remote execution: {}'.format(self._details)
class DispatchHandlerException(ConeyException):
def __init__(self, code):
self.code = code
def __repr__(self):
return 'Error {} occurred during message dispatch'.format(self.code)
<commit_msg>Add a new exception to handle a non-callable handler.<commit_after>
class ConeyException(Exception):
def __repr__(self):
return 'An unspecified error has occurred'
class CallTimeoutException(ConeyException):
def __repr__(self):
return 'An RPC call did not return before the time out period'
class MalformedRequestException(ConeyException):
def __init__(self, serializer_name, request):
self._serializer_name = serializer_name
self._request = request
def __repr__(self):
return '{} failed to create a Request from string: {}'.format(self._serialier_name, self._request)
class RemoteExecErrorException(ConeyException):
def __init__(self, value, details):
self._value = value
self._details = details
def __repr__(self):
return 'An error occurred during remote execution: ({}) {}'.format(self._value, self._details)
@property
def value(self):
return self._value
@property
def details(self):
return self._details
class RemoteUnhandledExceptionException(ConeyException):
def __init__(self, details):
self._details = details
def __repr__(self):
return 'An unhandled exception was raised during remote execution: {}'.format(self._details)
class DispatchHandlerException(ConeyException):
def __init__(self, code):
self.code = code
def __repr__(self):
return 'Error {} occurred during message dispatch'.format(self.code)
class HandlerNotCallableException(ConeyException):
def __repr__(self):
return 'Handler provided a non-callable object'
|
80ea1fd6dc5ad47a3689f64ebe6e639f037f7d20 | ln/backend/reduction.py | ln/backend/reduction.py | '''Functions that perform the different reduction strategies.'''
import numpy as np
def closest(times, values, center_time):
abs_delta = np.abs(np.array([(t - center_time).total_seconds()
for t in times]))
closest_index = np.argmin(abs_delta)
return values[closest_index]
def sum(times, values, center_time):
return np.sum(values, axis=0)
def mean(times, values, center_time):
return np.mean(values, axis=0)
def min(times, values, center_time):
return np.amin(values, axis=0)
def max(times, values, center_time):
return np.amax(values, axis=0)
REDUCTIONS = dict(closest=closest, sum=sum, mean=mean, min=min, max=max)
| '''Functions that perform the different reduction strategies.'''
# Needed for get_total_seconds() implementation
from __future__ import division
import numpy as np
# Implementation for python 2.6
def get_total_sections(td):
return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 10**6
def closest(times, values, center_time):
abs_delta = np.abs(np.array([get_total_sections(t - center_time)
for t in times]))
closest_index = np.argmin(abs_delta)
return values[closest_index]
def sum(times, values, center_time):
return np.sum(values, axis=0)
def mean(times, values, center_time):
return np.mean(values, axis=0)
def min(times, values, center_time):
return np.amin(values, axis=0)
def max(times, values, center_time):
return np.amax(values, axis=0)
REDUCTIONS = dict(closest=closest, sum=sum, mean=mean, min=min, max=max)
| Fix for Python 2.6 that lacks timedelta.total_seconds() | Fix for Python 2.6 that lacks timedelta.total_seconds()
| Python | bsd-2-clause | seibert/ln | '''Functions that perform the different reduction strategies.'''
import numpy as np
def closest(times, values, center_time):
abs_delta = np.abs(np.array([(t - center_time).total_seconds()
for t in times]))
closest_index = np.argmin(abs_delta)
return values[closest_index]
def sum(times, values, center_time):
return np.sum(values, axis=0)
def mean(times, values, center_time):
return np.mean(values, axis=0)
def min(times, values, center_time):
return np.amin(values, axis=0)
def max(times, values, center_time):
return np.amax(values, axis=0)
REDUCTIONS = dict(closest=closest, sum=sum, mean=mean, min=min, max=max)
Fix for Python 2.6 that lacks timedelta.total_seconds() | '''Functions that perform the different reduction strategies.'''
# Needed for get_total_seconds() implementation
from __future__ import division
import numpy as np
# Implementation for python 2.6
def get_total_sections(td):
return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 10**6
def closest(times, values, center_time):
abs_delta = np.abs(np.array([get_total_sections(t - center_time)
for t in times]))
closest_index = np.argmin(abs_delta)
return values[closest_index]
def sum(times, values, center_time):
return np.sum(values, axis=0)
def mean(times, values, center_time):
return np.mean(values, axis=0)
def min(times, values, center_time):
return np.amin(values, axis=0)
def max(times, values, center_time):
return np.amax(values, axis=0)
REDUCTIONS = dict(closest=closest, sum=sum, mean=mean, min=min, max=max)
| <commit_before>'''Functions that perform the different reduction strategies.'''
import numpy as np
def closest(times, values, center_time):
abs_delta = np.abs(np.array([(t - center_time).total_seconds()
for t in times]))
closest_index = np.argmin(abs_delta)
return values[closest_index]
def sum(times, values, center_time):
return np.sum(values, axis=0)
def mean(times, values, center_time):
return np.mean(values, axis=0)
def min(times, values, center_time):
return np.amin(values, axis=0)
def max(times, values, center_time):
return np.amax(values, axis=0)
REDUCTIONS = dict(closest=closest, sum=sum, mean=mean, min=min, max=max)
<commit_msg>Fix for Python 2.6 that lacks timedelta.total_seconds()<commit_after> | '''Functions that perform the different reduction strategies.'''
# Needed for get_total_seconds() implementation
from __future__ import division
import numpy as np
# Implementation for python 2.6
def get_total_sections(td):
return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 10**6
def closest(times, values, center_time):
abs_delta = np.abs(np.array([get_total_sections(t - center_time)
for t in times]))
closest_index = np.argmin(abs_delta)
return values[closest_index]
def sum(times, values, center_time):
return np.sum(values, axis=0)
def mean(times, values, center_time):
return np.mean(values, axis=0)
def min(times, values, center_time):
return np.amin(values, axis=0)
def max(times, values, center_time):
return np.amax(values, axis=0)
REDUCTIONS = dict(closest=closest, sum=sum, mean=mean, min=min, max=max)
| '''Functions that perform the different reduction strategies.'''
import numpy as np
def closest(times, values, center_time):
abs_delta = np.abs(np.array([(t - center_time).total_seconds()
for t in times]))
closest_index = np.argmin(abs_delta)
return values[closest_index]
def sum(times, values, center_time):
return np.sum(values, axis=0)
def mean(times, values, center_time):
return np.mean(values, axis=0)
def min(times, values, center_time):
return np.amin(values, axis=0)
def max(times, values, center_time):
return np.amax(values, axis=0)
REDUCTIONS = dict(closest=closest, sum=sum, mean=mean, min=min, max=max)
Fix for Python 2.6 that lacks timedelta.total_seconds()'''Functions that perform the different reduction strategies.'''
# Needed for get_total_seconds() implementation
from __future__ import division
import numpy as np
# Implementation for python 2.6
def get_total_sections(td):
return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 10**6
def closest(times, values, center_time):
abs_delta = np.abs(np.array([get_total_sections(t - center_time)
for t in times]))
closest_index = np.argmin(abs_delta)
return values[closest_index]
def sum(times, values, center_time):
return np.sum(values, axis=0)
def mean(times, values, center_time):
return np.mean(values, axis=0)
def min(times, values, center_time):
return np.amin(values, axis=0)
def max(times, values, center_time):
return np.amax(values, axis=0)
REDUCTIONS = dict(closest=closest, sum=sum, mean=mean, min=min, max=max)
| <commit_before>'''Functions that perform the different reduction strategies.'''
import numpy as np
def closest(times, values, center_time):
abs_delta = np.abs(np.array([(t - center_time).total_seconds()
for t in times]))
closest_index = np.argmin(abs_delta)
return values[closest_index]
def sum(times, values, center_time):
return np.sum(values, axis=0)
def mean(times, values, center_time):
return np.mean(values, axis=0)
def min(times, values, center_time):
return np.amin(values, axis=0)
def max(times, values, center_time):
return np.amax(values, axis=0)
REDUCTIONS = dict(closest=closest, sum=sum, mean=mean, min=min, max=max)
<commit_msg>Fix for Python 2.6 that lacks timedelta.total_seconds()<commit_after>'''Functions that perform the different reduction strategies.'''
# Needed for get_total_seconds() implementation
from __future__ import division
import numpy as np
# Implementation for python 2.6
def get_total_sections(td):
return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 10**6
def closest(times, values, center_time):
abs_delta = np.abs(np.array([get_total_sections(t - center_time)
for t in times]))
closest_index = np.argmin(abs_delta)
return values[closest_index]
def sum(times, values, center_time):
return np.sum(values, axis=0)
def mean(times, values, center_time):
return np.mean(values, axis=0)
def min(times, values, center_time):
return np.amin(values, axis=0)
def max(times, values, center_time):
return np.amax(values, axis=0)
REDUCTIONS = dict(closest=closest, sum=sum, mean=mean, min=min, max=max)
|
113fc2bb39e872ae8e1fa4870d8b94f9375cec9e | daemon/__init__.py | daemon/__init__.py | # -*- coding: utf-8 -*-
# Copyright © 2009 Ben Finney <ben+python@benfinney.id.au>
# Copyright © 2006 Robert Niederreiter
#
# This is free software: you may copy, modify, and/or distribute this work
# under the terms of the Python Software Foundation License, version 2 or
# later as published by the Python Software Foundation.
# No warranty expressed or implied. See the file LICENSE.PSF-2 for details.
""" Library to implement a well-behaved Unix daemon process
This library implements PEP [no number yet], Standard daemon
process library.
A well-behaved Unix daemon process is tricky to get right, but the
required steps are much the same for every daemon program. An
instance of the `DaemonContext` holds the behaviour and configured
process environment for the program; use the instance as a context
manager to enter a daemon state.
Simple example of usage::
import daemon
from spam import do_main_program
with daemon.DaemonContext() as daemon_context:
do_main_program()
"""
from daemon import DaemonContext
version = "1.4.2"
| # -*- coding: utf-8 -*-
# Copyright © 2009 Ben Finney <ben+python@benfinney.id.au>
# Copyright © 2006 Robert Niederreiter
#
# This is free software: you may copy, modify, and/or distribute this work
# under the terms of the Python Software Foundation License, version 2 or
# later as published by the Python Software Foundation.
# No warranty expressed or implied. See the file LICENSE.PSF-2 for details.
""" Library to implement a well-behaved Unix daemon process
This library implements PEP [no number yet], Standard daemon
process library.
A well-behaved Unix daemon process is tricky to get right, but the
required steps are much the same for every daemon program. An
instance of the `DaemonContext` holds the behaviour and configured
process environment for the program; use the instance as a context
manager to enter a daemon state.
Simple example of usage::
import daemon
from spam import do_main_program
with daemon.DaemonContext() as daemon_context:
do_main_program()
"""
from daemon import DaemonContext
version = "1.4.3"
| Prepare development of new version. | Prepare development of new version. | Python | apache-2.0 | wting/python-daemon,eaufavor/python-daemon | # -*- coding: utf-8 -*-
# Copyright © 2009 Ben Finney <ben+python@benfinney.id.au>
# Copyright © 2006 Robert Niederreiter
#
# This is free software: you may copy, modify, and/or distribute this work
# under the terms of the Python Software Foundation License, version 2 or
# later as published by the Python Software Foundation.
# No warranty expressed or implied. See the file LICENSE.PSF-2 for details.
""" Library to implement a well-behaved Unix daemon process
This library implements PEP [no number yet], Standard daemon
process library.
A well-behaved Unix daemon process is tricky to get right, but the
required steps are much the same for every daemon program. An
instance of the `DaemonContext` holds the behaviour and configured
process environment for the program; use the instance as a context
manager to enter a daemon state.
Simple example of usage::
import daemon
from spam import do_main_program
with daemon.DaemonContext() as daemon_context:
do_main_program()
"""
from daemon import DaemonContext
version = "1.4.2"
Prepare development of new version. | # -*- coding: utf-8 -*-
# Copyright © 2009 Ben Finney <ben+python@benfinney.id.au>
# Copyright © 2006 Robert Niederreiter
#
# This is free software: you may copy, modify, and/or distribute this work
# under the terms of the Python Software Foundation License, version 2 or
# later as published by the Python Software Foundation.
# No warranty expressed or implied. See the file LICENSE.PSF-2 for details.
""" Library to implement a well-behaved Unix daemon process
This library implements PEP [no number yet], Standard daemon
process library.
A well-behaved Unix daemon process is tricky to get right, but the
required steps are much the same for every daemon program. An
instance of the `DaemonContext` holds the behaviour and configured
process environment for the program; use the instance as a context
manager to enter a daemon state.
Simple example of usage::
import daemon
from spam import do_main_program
with daemon.DaemonContext() as daemon_context:
do_main_program()
"""
from daemon import DaemonContext
version = "1.4.3"
| <commit_before># -*- coding: utf-8 -*-
# Copyright © 2009 Ben Finney <ben+python@benfinney.id.au>
# Copyright © 2006 Robert Niederreiter
#
# This is free software: you may copy, modify, and/or distribute this work
# under the terms of the Python Software Foundation License, version 2 or
# later as published by the Python Software Foundation.
# No warranty expressed or implied. See the file LICENSE.PSF-2 for details.
""" Library to implement a well-behaved Unix daemon process
This library implements PEP [no number yet], Standard daemon
process library.
A well-behaved Unix daemon process is tricky to get right, but the
required steps are much the same for every daemon program. An
instance of the `DaemonContext` holds the behaviour and configured
process environment for the program; use the instance as a context
manager to enter a daemon state.
Simple example of usage::
import daemon
from spam import do_main_program
with daemon.DaemonContext() as daemon_context:
do_main_program()
"""
from daemon import DaemonContext
version = "1.4.2"
<commit_msg>Prepare development of new version.<commit_after> | # -*- coding: utf-8 -*-
# Copyright © 2009 Ben Finney <ben+python@benfinney.id.au>
# Copyright © 2006 Robert Niederreiter
#
# This is free software: you may copy, modify, and/or distribute this work
# under the terms of the Python Software Foundation License, version 2 or
# later as published by the Python Software Foundation.
# No warranty expressed or implied. See the file LICENSE.PSF-2 for details.
""" Library to implement a well-behaved Unix daemon process
This library implements PEP [no number yet], Standard daemon
process library.
A well-behaved Unix daemon process is tricky to get right, but the
required steps are much the same for every daemon program. An
instance of the `DaemonContext` holds the behaviour and configured
process environment for the program; use the instance as a context
manager to enter a daemon state.
Simple example of usage::
import daemon
from spam import do_main_program
with daemon.DaemonContext() as daemon_context:
do_main_program()
"""
from daemon import DaemonContext
version = "1.4.3"
| # -*- coding: utf-8 -*-
# Copyright © 2009 Ben Finney <ben+python@benfinney.id.au>
# Copyright © 2006 Robert Niederreiter
#
# This is free software: you may copy, modify, and/or distribute this work
# under the terms of the Python Software Foundation License, version 2 or
# later as published by the Python Software Foundation.
# No warranty expressed or implied. See the file LICENSE.PSF-2 for details.
""" Library to implement a well-behaved Unix daemon process
This library implements PEP [no number yet], Standard daemon
process library.
A well-behaved Unix daemon process is tricky to get right, but the
required steps are much the same for every daemon program. An
instance of the `DaemonContext` holds the behaviour and configured
process environment for the program; use the instance as a context
manager to enter a daemon state.
Simple example of usage::
import daemon
from spam import do_main_program
with daemon.DaemonContext() as daemon_context:
do_main_program()
"""
from daemon import DaemonContext
version = "1.4.2"
Prepare development of new version.# -*- coding: utf-8 -*-
# Copyright © 2009 Ben Finney <ben+python@benfinney.id.au>
# Copyright © 2006 Robert Niederreiter
#
# This is free software: you may copy, modify, and/or distribute this work
# under the terms of the Python Software Foundation License, version 2 or
# later as published by the Python Software Foundation.
# No warranty expressed or implied. See the file LICENSE.PSF-2 for details.
""" Library to implement a well-behaved Unix daemon process
This library implements PEP [no number yet], Standard daemon
process library.
A well-behaved Unix daemon process is tricky to get right, but the
required steps are much the same for every daemon program. An
instance of the `DaemonContext` holds the behaviour and configured
process environment for the program; use the instance as a context
manager to enter a daemon state.
Simple example of usage::
import daemon
from spam import do_main_program
with daemon.DaemonContext() as daemon_context:
do_main_program()
"""
from daemon import DaemonContext
version = "1.4.3"
| <commit_before># -*- coding: utf-8 -*-
# Copyright © 2009 Ben Finney <ben+python@benfinney.id.au>
# Copyright © 2006 Robert Niederreiter
#
# This is free software: you may copy, modify, and/or distribute this work
# under the terms of the Python Software Foundation License, version 2 or
# later as published by the Python Software Foundation.
# No warranty expressed or implied. See the file LICENSE.PSF-2 for details.
""" Library to implement a well-behaved Unix daemon process
This library implements PEP [no number yet], Standard daemon
process library.
A well-behaved Unix daemon process is tricky to get right, but the
required steps are much the same for every daemon program. An
instance of the `DaemonContext` holds the behaviour and configured
process environment for the program; use the instance as a context
manager to enter a daemon state.
Simple example of usage::
import daemon
from spam import do_main_program
with daemon.DaemonContext() as daemon_context:
do_main_program()
"""
from daemon import DaemonContext
version = "1.4.2"
<commit_msg>Prepare development of new version.<commit_after># -*- coding: utf-8 -*-
# Copyright © 2009 Ben Finney <ben+python@benfinney.id.au>
# Copyright © 2006 Robert Niederreiter
#
# This is free software: you may copy, modify, and/or distribute this work
# under the terms of the Python Software Foundation License, version 2 or
# later as published by the Python Software Foundation.
# No warranty expressed or implied. See the file LICENSE.PSF-2 for details.
""" Library to implement a well-behaved Unix daemon process
This library implements PEP [no number yet], Standard daemon
process library.
A well-behaved Unix daemon process is tricky to get right, but the
required steps are much the same for every daemon program. An
instance of the `DaemonContext` holds the behaviour and configured
process environment for the program; use the instance as a context
manager to enter a daemon state.
Simple example of usage::
import daemon
from spam import do_main_program
with daemon.DaemonContext() as daemon_context:
do_main_program()
"""
from daemon import DaemonContext
version = "1.4.3"
|
52b6dac7528232dfd41841f4697c7a78e2a2e675 | www/src/Lib/_weakref.py | www/src/Lib/_weakref.py | class ProxyType:
def __init__(self,obj):
self.obj = obj
CallableProxyType = ProxyType
ProxyTypes = [ProxyType,CallableProxyType]
class ReferenceType:
def __init__(self,obj,callback):
self.obj = obj
self.callback = callback
class ref:
def __init__(self,obj,callback=None):
self.obj = ReferenceType(obj,callback)
self.callback=callback
def getweakrefcount(obj):
return 1
def getweakrefs(obj):
return obj
def proxy(obj,callback):
return ProxyType(obj)
| class ProxyType:
def __init__(self,obj):
self.obj = obj
CallableProxyType = ProxyType
ProxyTypes = [ProxyType,CallableProxyType]
class ReferenceType:
def __init__(self,obj,callback):
self.obj = obj
self.callback = callback
class ref:
def __init__(self,obj,callback=None):
self.obj = ReferenceType(obj,callback)
self.callback=callback
def __call__(self):
return self.obj.obj
def getweakrefcount(obj):
return 1
def getweakrefs(obj):
return obj
def proxy(obj,callback):
return ProxyType(obj)
| Add method __call__ to _weaksetref.WeakSet | Add method __call__ to _weaksetref.WeakSet
| Python | bsd-3-clause | olemis/brython,Lh4cKg/brython,molebot/brython,kikocorreoso/brython,Isendir/brython,Mozhuowen/brython,Isendir/brython,amrdraz/brython,Hasimir/brython,olemis/brython,firmlyjin/brython,JohnDenker/brython,olemis/brython,firmlyjin/brython,Mozhuowen/brython,jonathanverner/brython,molebot/brython,jonathanverner/brython,kevinmel2000/brython,Hasimir/brython,Lh4cKg/brython,JohnDenker/brython,firmlyjin/brython,firmlyjin/brython,Hasimir/brython,amrdraz/brython,Hasimir/brython,molebot/brython,kevinmel2000/brython,rubyinhell/brython,Mozhuowen/brython,Isendir/brython,kikocorreoso/brython,brython-dev/brython,rubyinhell/brython,Lh4cKg/brython,JohnDenker/brython,JohnDenker/brython,rubyinhell/brython,rubyinhell/brython,molebot/brython,jonathanverner/brython,kevinmel2000/brython,Lh4cKg/brython,kevinmel2000/brython,brython-dev/brython,kikocorreoso/brython,jonathanverner/brython,Mozhuowen/brython,firmlyjin/brython,amrdraz/brython,Isendir/brython,amrdraz/brython,olemis/brython,brython-dev/brython | class ProxyType:
def __init__(self,obj):
self.obj = obj
CallableProxyType = ProxyType
ProxyTypes = [ProxyType,CallableProxyType]
class ReferenceType:
def __init__(self,obj,callback):
self.obj = obj
self.callback = callback
class ref:
def __init__(self,obj,callback=None):
self.obj = ReferenceType(obj,callback)
self.callback=callback
def getweakrefcount(obj):
return 1
def getweakrefs(obj):
return obj
def proxy(obj,callback):
return ProxyType(obj)
Add method __call__ to _weaksetref.WeakSet | class ProxyType:
def __init__(self,obj):
self.obj = obj
CallableProxyType = ProxyType
ProxyTypes = [ProxyType,CallableProxyType]
class ReferenceType:
def __init__(self,obj,callback):
self.obj = obj
self.callback = callback
class ref:
def __init__(self,obj,callback=None):
self.obj = ReferenceType(obj,callback)
self.callback=callback
def __call__(self):
return self.obj.obj
def getweakrefcount(obj):
return 1
def getweakrefs(obj):
return obj
def proxy(obj,callback):
return ProxyType(obj)
| <commit_before>class ProxyType:
def __init__(self,obj):
self.obj = obj
CallableProxyType = ProxyType
ProxyTypes = [ProxyType,CallableProxyType]
class ReferenceType:
def __init__(self,obj,callback):
self.obj = obj
self.callback = callback
class ref:
def __init__(self,obj,callback=None):
self.obj = ReferenceType(obj,callback)
self.callback=callback
def getweakrefcount(obj):
return 1
def getweakrefs(obj):
return obj
def proxy(obj,callback):
return ProxyType(obj)
<commit_msg>Add method __call__ to _weaksetref.WeakSet<commit_after> | class ProxyType:
def __init__(self,obj):
self.obj = obj
CallableProxyType = ProxyType
ProxyTypes = [ProxyType,CallableProxyType]
class ReferenceType:
def __init__(self,obj,callback):
self.obj = obj
self.callback = callback
class ref:
def __init__(self,obj,callback=None):
self.obj = ReferenceType(obj,callback)
self.callback=callback
def __call__(self):
return self.obj.obj
def getweakrefcount(obj):
return 1
def getweakrefs(obj):
return obj
def proxy(obj,callback):
return ProxyType(obj)
| class ProxyType:
def __init__(self,obj):
self.obj = obj
CallableProxyType = ProxyType
ProxyTypes = [ProxyType,CallableProxyType]
class ReferenceType:
def __init__(self,obj,callback):
self.obj = obj
self.callback = callback
class ref:
def __init__(self,obj,callback=None):
self.obj = ReferenceType(obj,callback)
self.callback=callback
def getweakrefcount(obj):
return 1
def getweakrefs(obj):
return obj
def proxy(obj,callback):
return ProxyType(obj)
Add method __call__ to _weaksetref.WeakSetclass ProxyType:
def __init__(self,obj):
self.obj = obj
CallableProxyType = ProxyType
ProxyTypes = [ProxyType,CallableProxyType]
class ReferenceType:
def __init__(self,obj,callback):
self.obj = obj
self.callback = callback
class ref:
def __init__(self,obj,callback=None):
self.obj = ReferenceType(obj,callback)
self.callback=callback
def __call__(self):
return self.obj.obj
def getweakrefcount(obj):
return 1
def getweakrefs(obj):
return obj
def proxy(obj,callback):
return ProxyType(obj)
| <commit_before>class ProxyType:
def __init__(self,obj):
self.obj = obj
CallableProxyType = ProxyType
ProxyTypes = [ProxyType,CallableProxyType]
class ReferenceType:
def __init__(self,obj,callback):
self.obj = obj
self.callback = callback
class ref:
def __init__(self,obj,callback=None):
self.obj = ReferenceType(obj,callback)
self.callback=callback
def getweakrefcount(obj):
return 1
def getweakrefs(obj):
return obj
def proxy(obj,callback):
return ProxyType(obj)
<commit_msg>Add method __call__ to _weaksetref.WeakSet<commit_after>class ProxyType:
def __init__(self,obj):
self.obj = obj
CallableProxyType = ProxyType
ProxyTypes = [ProxyType,CallableProxyType]
class ReferenceType:
def __init__(self,obj,callback):
self.obj = obj
self.callback = callback
class ref:
def __init__(self,obj,callback=None):
self.obj = ReferenceType(obj,callback)
self.callback=callback
def __call__(self):
return self.obj.obj
def getweakrefcount(obj):
return 1
def getweakrefs(obj):
return obj
def proxy(obj,callback):
return ProxyType(obj)
|
3948128a194a27171ec14ca664a77c125ec73fae | data/adjustIndex.py | data/adjustIndex.py | import csv
start = 900
newcsv = []
with open('50_percent_cheaters.csv', 'rb') as csvfile:
experiment50 = csv.DictReader(csvfile)
for row in experiment50:
row['Run'] = int(row['Run']) - start
newcsv.append(row)
with open('changed.csv', 'wb') as newfile:
fieldnames = ["Run","Time","Status","Solver","Gas","Verifiers"]
writer = csv.DictWriter(newfile,fieldnames)
writer.writerows(newcsv)
| import csv
start = 690
newcsv = []
with open('70_percent_cheaters2.csv', 'rb') as csvfile:
experiment50 = csv.DictReader(csvfile)
for row in experiment50:
row['Run'] = int(row['Run']) + start
newcsv.append(row)
with open('changed.csv', 'wb') as newfile:
fieldnames = ["Run","Time","Status","Solver","Gas","Verifiers"]
writer = csv.DictWriter(newfile,fieldnames)
writer.writerows(newcsv)
| Change dirty little helper script | Change dirty little helper script
| Python | mit | nud3l/smart-contract-analysis | import csv
start = 900
newcsv = []
with open('50_percent_cheaters.csv', 'rb') as csvfile:
experiment50 = csv.DictReader(csvfile)
for row in experiment50:
row['Run'] = int(row['Run']) - start
newcsv.append(row)
with open('changed.csv', 'wb') as newfile:
fieldnames = ["Run","Time","Status","Solver","Gas","Verifiers"]
writer = csv.DictWriter(newfile,fieldnames)
writer.writerows(newcsv)
Change dirty little helper script | import csv
start = 690
newcsv = []
with open('70_percent_cheaters2.csv', 'rb') as csvfile:
experiment50 = csv.DictReader(csvfile)
for row in experiment50:
row['Run'] = int(row['Run']) + start
newcsv.append(row)
with open('changed.csv', 'wb') as newfile:
fieldnames = ["Run","Time","Status","Solver","Gas","Verifiers"]
writer = csv.DictWriter(newfile,fieldnames)
writer.writerows(newcsv)
| <commit_before>import csv
start = 900
newcsv = []
with open('50_percent_cheaters.csv', 'rb') as csvfile:
experiment50 = csv.DictReader(csvfile)
for row in experiment50:
row['Run'] = int(row['Run']) - start
newcsv.append(row)
with open('changed.csv', 'wb') as newfile:
fieldnames = ["Run","Time","Status","Solver","Gas","Verifiers"]
writer = csv.DictWriter(newfile,fieldnames)
writer.writerows(newcsv)
<commit_msg>Change dirty little helper script<commit_after> | import csv
start = 690
newcsv = []
with open('70_percent_cheaters2.csv', 'rb') as csvfile:
experiment50 = csv.DictReader(csvfile)
for row in experiment50:
row['Run'] = int(row['Run']) + start
newcsv.append(row)
with open('changed.csv', 'wb') as newfile:
fieldnames = ["Run","Time","Status","Solver","Gas","Verifiers"]
writer = csv.DictWriter(newfile,fieldnames)
writer.writerows(newcsv)
| import csv
start = 900
newcsv = []
with open('50_percent_cheaters.csv', 'rb') as csvfile:
experiment50 = csv.DictReader(csvfile)
for row in experiment50:
row['Run'] = int(row['Run']) - start
newcsv.append(row)
with open('changed.csv', 'wb') as newfile:
fieldnames = ["Run","Time","Status","Solver","Gas","Verifiers"]
writer = csv.DictWriter(newfile,fieldnames)
writer.writerows(newcsv)
Change dirty little helper scriptimport csv
start = 690
newcsv = []
with open('70_percent_cheaters2.csv', 'rb') as csvfile:
experiment50 = csv.DictReader(csvfile)
for row in experiment50:
row['Run'] = int(row['Run']) + start
newcsv.append(row)
with open('changed.csv', 'wb') as newfile:
fieldnames = ["Run","Time","Status","Solver","Gas","Verifiers"]
writer = csv.DictWriter(newfile,fieldnames)
writer.writerows(newcsv)
| <commit_before>import csv
start = 900
newcsv = []
with open('50_percent_cheaters.csv', 'rb') as csvfile:
experiment50 = csv.DictReader(csvfile)
for row in experiment50:
row['Run'] = int(row['Run']) - start
newcsv.append(row)
with open('changed.csv', 'wb') as newfile:
fieldnames = ["Run","Time","Status","Solver","Gas","Verifiers"]
writer = csv.DictWriter(newfile,fieldnames)
writer.writerows(newcsv)
<commit_msg>Change dirty little helper script<commit_after>import csv
start = 690
newcsv = []
with open('70_percent_cheaters2.csv', 'rb') as csvfile:
experiment50 = csv.DictReader(csvfile)
for row in experiment50:
row['Run'] = int(row['Run']) + start
newcsv.append(row)
with open('changed.csv', 'wb') as newfile:
fieldnames = ["Run","Time","Status","Solver","Gas","Verifiers"]
writer = csv.DictWriter(newfile,fieldnames)
writer.writerows(newcsv)
|
7d3fd06884bc11b9e1c60250052a1abdb6e7d44d | pycassa/__init__.py | pycassa/__init__.py | from pycassa.columnfamily import *
from pycassa.columnfamilymap import *
from pycassa.types import *
from pycassa.index import *
from pycassa.pool import *
from pycassa.connection import *
from pycassa.system_manager import *
from pycassa.cassandra.ttypes import ConsistencyLevel,\
InvalidRequestException, NotFoundException, UnavailableException,\
TimedOutException
from pycassa.logging.pycassa_logger import *
| from pycassa.columnfamily import *
from pycassa.columnfamilymap import *
from pycassa.types import *
from pycassa.index import *
from pycassa.pool import *
from pycassa.system_manager import *
from pycassa.cassandra.ttypes import ConsistencyLevel,\
InvalidRequestException, NotFoundException, UnavailableException,\
TimedOutException
from pycassa.logging.pycassa_logger import *
| Remove connection from pycassa package import | Remove connection from pycassa package import
| Python | mit | pycassa/pycassa,pycassa/pycassa | from pycassa.columnfamily import *
from pycassa.columnfamilymap import *
from pycassa.types import *
from pycassa.index import *
from pycassa.pool import *
from pycassa.connection import *
from pycassa.system_manager import *
from pycassa.cassandra.ttypes import ConsistencyLevel,\
InvalidRequestException, NotFoundException, UnavailableException,\
TimedOutException
from pycassa.logging.pycassa_logger import *
Remove connection from pycassa package import | from pycassa.columnfamily import *
from pycassa.columnfamilymap import *
from pycassa.types import *
from pycassa.index import *
from pycassa.pool import *
from pycassa.system_manager import *
from pycassa.cassandra.ttypes import ConsistencyLevel,\
InvalidRequestException, NotFoundException, UnavailableException,\
TimedOutException
from pycassa.logging.pycassa_logger import *
| <commit_before>from pycassa.columnfamily import *
from pycassa.columnfamilymap import *
from pycassa.types import *
from pycassa.index import *
from pycassa.pool import *
from pycassa.connection import *
from pycassa.system_manager import *
from pycassa.cassandra.ttypes import ConsistencyLevel,\
InvalidRequestException, NotFoundException, UnavailableException,\
TimedOutException
from pycassa.logging.pycassa_logger import *
<commit_msg>Remove connection from pycassa package import<commit_after> | from pycassa.columnfamily import *
from pycassa.columnfamilymap import *
from pycassa.types import *
from pycassa.index import *
from pycassa.pool import *
from pycassa.system_manager import *
from pycassa.cassandra.ttypes import ConsistencyLevel,\
InvalidRequestException, NotFoundException, UnavailableException,\
TimedOutException
from pycassa.logging.pycassa_logger import *
| from pycassa.columnfamily import *
from pycassa.columnfamilymap import *
from pycassa.types import *
from pycassa.index import *
from pycassa.pool import *
from pycassa.connection import *
from pycassa.system_manager import *
from pycassa.cassandra.ttypes import ConsistencyLevel,\
InvalidRequestException, NotFoundException, UnavailableException,\
TimedOutException
from pycassa.logging.pycassa_logger import *
Remove connection from pycassa package importfrom pycassa.columnfamily import *
from pycassa.columnfamilymap import *
from pycassa.types import *
from pycassa.index import *
from pycassa.pool import *
from pycassa.system_manager import *
from pycassa.cassandra.ttypes import ConsistencyLevel,\
InvalidRequestException, NotFoundException, UnavailableException,\
TimedOutException
from pycassa.logging.pycassa_logger import *
| <commit_before>from pycassa.columnfamily import *
from pycassa.columnfamilymap import *
from pycassa.types import *
from pycassa.index import *
from pycassa.pool import *
from pycassa.connection import *
from pycassa.system_manager import *
from pycassa.cassandra.ttypes import ConsistencyLevel,\
InvalidRequestException, NotFoundException, UnavailableException,\
TimedOutException
from pycassa.logging.pycassa_logger import *
<commit_msg>Remove connection from pycassa package import<commit_after>from pycassa.columnfamily import *
from pycassa.columnfamilymap import *
from pycassa.types import *
from pycassa.index import *
from pycassa.pool import *
from pycassa.system_manager import *
from pycassa.cassandra.ttypes import ConsistencyLevel,\
InvalidRequestException, NotFoundException, UnavailableException,\
TimedOutException
from pycassa.logging.pycassa_logger import *
|
3da2c4a83de97407c69b9144475441e9bb0a3073 | backdrop/write/config/development_tokens.py | backdrop/write/config/development_tokens.py | TOKENS = {
'_foo_bucket': '_foo_bucket-bearer-token',
'bucket': 'bucket-bearer-token',
'foo': 'foo-bearer-token',
'foo_bucket': 'foo_bucket-bearer-token',
'licensing': 'licensing-bearer-token'
}
| TOKENS = {
'_foo_bucket': '_foo_bucket-bearer-token',
'bucket': 'bucket-bearer-token',
'foo': 'foo-bearer-token',
'foo_bucket': 'foo_bucket-bearer-token',
'licensing': 'licensing-bearer-token',
'licensing_journey': 'licensing_journey-bearer-token'
}
| Add dev token for licensing_journey bucket | Add dev token for licensing_journey bucket
| Python | mit | alphagov/backdrop,alphagov/backdrop,alphagov/backdrop | TOKENS = {
'_foo_bucket': '_foo_bucket-bearer-token',
'bucket': 'bucket-bearer-token',
'foo': 'foo-bearer-token',
'foo_bucket': 'foo_bucket-bearer-token',
'licensing': 'licensing-bearer-token'
}
Add dev token for licensing_journey bucket | TOKENS = {
'_foo_bucket': '_foo_bucket-bearer-token',
'bucket': 'bucket-bearer-token',
'foo': 'foo-bearer-token',
'foo_bucket': 'foo_bucket-bearer-token',
'licensing': 'licensing-bearer-token',
'licensing_journey': 'licensing_journey-bearer-token'
}
| <commit_before>TOKENS = {
'_foo_bucket': '_foo_bucket-bearer-token',
'bucket': 'bucket-bearer-token',
'foo': 'foo-bearer-token',
'foo_bucket': 'foo_bucket-bearer-token',
'licensing': 'licensing-bearer-token'
}
<commit_msg>Add dev token for licensing_journey bucket<commit_after> | TOKENS = {
'_foo_bucket': '_foo_bucket-bearer-token',
'bucket': 'bucket-bearer-token',
'foo': 'foo-bearer-token',
'foo_bucket': 'foo_bucket-bearer-token',
'licensing': 'licensing-bearer-token',
'licensing_journey': 'licensing_journey-bearer-token'
}
| TOKENS = {
'_foo_bucket': '_foo_bucket-bearer-token',
'bucket': 'bucket-bearer-token',
'foo': 'foo-bearer-token',
'foo_bucket': 'foo_bucket-bearer-token',
'licensing': 'licensing-bearer-token'
}
Add dev token for licensing_journey bucketTOKENS = {
'_foo_bucket': '_foo_bucket-bearer-token',
'bucket': 'bucket-bearer-token',
'foo': 'foo-bearer-token',
'foo_bucket': 'foo_bucket-bearer-token',
'licensing': 'licensing-bearer-token',
'licensing_journey': 'licensing_journey-bearer-token'
}
| <commit_before>TOKENS = {
'_foo_bucket': '_foo_bucket-bearer-token',
'bucket': 'bucket-bearer-token',
'foo': 'foo-bearer-token',
'foo_bucket': 'foo_bucket-bearer-token',
'licensing': 'licensing-bearer-token'
}
<commit_msg>Add dev token for licensing_journey bucket<commit_after>TOKENS = {
'_foo_bucket': '_foo_bucket-bearer-token',
'bucket': 'bucket-bearer-token',
'foo': 'foo-bearer-token',
'foo_bucket': 'foo_bucket-bearer-token',
'licensing': 'licensing-bearer-token',
'licensing_journey': 'licensing_journey-bearer-token'
}
|
966c8e549e1cb78c64ad2f359162bc5a2171a732 | fabfile.py | fabfile.py | from fabric.api import cd, env, local, lcd, run
PUPPET_MASTER_IP = '192.168.33.10'
def puppet():
env.hosts = [
'vagrant@' + PUPPET_MASTER_IP + ':22',
]
env.passwords = {
'vagrant@' + PUPPET_MASTER_IP + ':22': 'vagrant'
}
def test():
with lcd('puppet/modules'):
with lcd('nginx'):
local('rspec')
def deploy():
puppet()
test()
run('rm -rf puppet-untitled-2016')
run('git clone https://github.com/zkan/puppet-untitled-2016.git')
run('sudo rm -rf /etc/puppet/manifests')
run('sudo ln -sf /home/vagrant/puppet-untitled-2016/puppet/manifests /etc/puppet/manifests')
run('sudo rm -rf /etc/puppet/modules')
run('sudo ln -sf /home/vagrant/puppet-untitled-2016/puppet/modules /etc/puppet/modules')
| from fabric.api import cd, env, local, lcd, run
PUPPET_MASTER_IP = '192.168.33.10'
def puppet():
env.hosts = [
'vagrant@' + PUPPET_MASTER_IP + ':22',
]
env.passwords = {
'vagrant@' + PUPPET_MASTER_IP + ':22': 'vagrant'
}
def test():
with lcd('puppet/modules'):
with lcd('nginx'):
local('rspec')
def push():
with lcd('puppet'):
local('git add .')
local('git push origin master')
def deploy():
puppet()
test()
run('rm -rf puppet-untitled-2016')
run('git clone https://github.com/zkan/puppet-untitled-2016.git')
run('sudo rm -rf /etc/puppet/manifests')
run('sudo ln -sf /home/vagrant/puppet-untitled-2016/puppet/manifests /etc/puppet/manifests')
run('sudo rm -rf /etc/puppet/modules')
run('sudo ln -sf /home/vagrant/puppet-untitled-2016/puppet/modules /etc/puppet/modules')
| Add push task (but not use it yet) | Add push task (but not use it yet)
| Python | mit | zkan/puppet-untitled-2016,zkan/puppet-untitled-2016 | from fabric.api import cd, env, local, lcd, run
PUPPET_MASTER_IP = '192.168.33.10'
def puppet():
env.hosts = [
'vagrant@' + PUPPET_MASTER_IP + ':22',
]
env.passwords = {
'vagrant@' + PUPPET_MASTER_IP + ':22': 'vagrant'
}
def test():
with lcd('puppet/modules'):
with lcd('nginx'):
local('rspec')
def deploy():
puppet()
test()
run('rm -rf puppet-untitled-2016')
run('git clone https://github.com/zkan/puppet-untitled-2016.git')
run('sudo rm -rf /etc/puppet/manifests')
run('sudo ln -sf /home/vagrant/puppet-untitled-2016/puppet/manifests /etc/puppet/manifests')
run('sudo rm -rf /etc/puppet/modules')
run('sudo ln -sf /home/vagrant/puppet-untitled-2016/puppet/modules /etc/puppet/modules')
Add push task (but not use it yet) | from fabric.api import cd, env, local, lcd, run
PUPPET_MASTER_IP = '192.168.33.10'
def puppet():
env.hosts = [
'vagrant@' + PUPPET_MASTER_IP + ':22',
]
env.passwords = {
'vagrant@' + PUPPET_MASTER_IP + ':22': 'vagrant'
}
def test():
with lcd('puppet/modules'):
with lcd('nginx'):
local('rspec')
def push():
with lcd('puppet'):
local('git add .')
local('git push origin master')
def deploy():
puppet()
test()
run('rm -rf puppet-untitled-2016')
run('git clone https://github.com/zkan/puppet-untitled-2016.git')
run('sudo rm -rf /etc/puppet/manifests')
run('sudo ln -sf /home/vagrant/puppet-untitled-2016/puppet/manifests /etc/puppet/manifests')
run('sudo rm -rf /etc/puppet/modules')
run('sudo ln -sf /home/vagrant/puppet-untitled-2016/puppet/modules /etc/puppet/modules')
| <commit_before>from fabric.api import cd, env, local, lcd, run
PUPPET_MASTER_IP = '192.168.33.10'
def puppet():
env.hosts = [
'vagrant@' + PUPPET_MASTER_IP + ':22',
]
env.passwords = {
'vagrant@' + PUPPET_MASTER_IP + ':22': 'vagrant'
}
def test():
with lcd('puppet/modules'):
with lcd('nginx'):
local('rspec')
def deploy():
puppet()
test()
run('rm -rf puppet-untitled-2016')
run('git clone https://github.com/zkan/puppet-untitled-2016.git')
run('sudo rm -rf /etc/puppet/manifests')
run('sudo ln -sf /home/vagrant/puppet-untitled-2016/puppet/manifests /etc/puppet/manifests')
run('sudo rm -rf /etc/puppet/modules')
run('sudo ln -sf /home/vagrant/puppet-untitled-2016/puppet/modules /etc/puppet/modules')
<commit_msg>Add push task (but not use it yet)<commit_after> | from fabric.api import cd, env, local, lcd, run
PUPPET_MASTER_IP = '192.168.33.10'
def puppet():
env.hosts = [
'vagrant@' + PUPPET_MASTER_IP + ':22',
]
env.passwords = {
'vagrant@' + PUPPET_MASTER_IP + ':22': 'vagrant'
}
def test():
with lcd('puppet/modules'):
with lcd('nginx'):
local('rspec')
def push():
with lcd('puppet'):
local('git add .')
local('git push origin master')
def deploy():
puppet()
test()
run('rm -rf puppet-untitled-2016')
run('git clone https://github.com/zkan/puppet-untitled-2016.git')
run('sudo rm -rf /etc/puppet/manifests')
run('sudo ln -sf /home/vagrant/puppet-untitled-2016/puppet/manifests /etc/puppet/manifests')
run('sudo rm -rf /etc/puppet/modules')
run('sudo ln -sf /home/vagrant/puppet-untitled-2016/puppet/modules /etc/puppet/modules')
| from fabric.api import cd, env, local, lcd, run
PUPPET_MASTER_IP = '192.168.33.10'
def puppet():
env.hosts = [
'vagrant@' + PUPPET_MASTER_IP + ':22',
]
env.passwords = {
'vagrant@' + PUPPET_MASTER_IP + ':22': 'vagrant'
}
def test():
with lcd('puppet/modules'):
with lcd('nginx'):
local('rspec')
def deploy():
puppet()
test()
run('rm -rf puppet-untitled-2016')
run('git clone https://github.com/zkan/puppet-untitled-2016.git')
run('sudo rm -rf /etc/puppet/manifests')
run('sudo ln -sf /home/vagrant/puppet-untitled-2016/puppet/manifests /etc/puppet/manifests')
run('sudo rm -rf /etc/puppet/modules')
run('sudo ln -sf /home/vagrant/puppet-untitled-2016/puppet/modules /etc/puppet/modules')
Add push task (but not use it yet)from fabric.api import cd, env, local, lcd, run
PUPPET_MASTER_IP = '192.168.33.10'
def puppet():
env.hosts = [
'vagrant@' + PUPPET_MASTER_IP + ':22',
]
env.passwords = {
'vagrant@' + PUPPET_MASTER_IP + ':22': 'vagrant'
}
def test():
with lcd('puppet/modules'):
with lcd('nginx'):
local('rspec')
def push():
with lcd('puppet'):
local('git add .')
local('git push origin master')
def deploy():
puppet()
test()
run('rm -rf puppet-untitled-2016')
run('git clone https://github.com/zkan/puppet-untitled-2016.git')
run('sudo rm -rf /etc/puppet/manifests')
run('sudo ln -sf /home/vagrant/puppet-untitled-2016/puppet/manifests /etc/puppet/manifests')
run('sudo rm -rf /etc/puppet/modules')
run('sudo ln -sf /home/vagrant/puppet-untitled-2016/puppet/modules /etc/puppet/modules')
| <commit_before>from fabric.api import cd, env, local, lcd, run
PUPPET_MASTER_IP = '192.168.33.10'
def puppet():
env.hosts = [
'vagrant@' + PUPPET_MASTER_IP + ':22',
]
env.passwords = {
'vagrant@' + PUPPET_MASTER_IP + ':22': 'vagrant'
}
def test():
with lcd('puppet/modules'):
with lcd('nginx'):
local('rspec')
def deploy():
puppet()
test()
run('rm -rf puppet-untitled-2016')
run('git clone https://github.com/zkan/puppet-untitled-2016.git')
run('sudo rm -rf /etc/puppet/manifests')
run('sudo ln -sf /home/vagrant/puppet-untitled-2016/puppet/manifests /etc/puppet/manifests')
run('sudo rm -rf /etc/puppet/modules')
run('sudo ln -sf /home/vagrant/puppet-untitled-2016/puppet/modules /etc/puppet/modules')
<commit_msg>Add push task (but not use it yet)<commit_after>from fabric.api import cd, env, local, lcd, run
PUPPET_MASTER_IP = '192.168.33.10'
def puppet():
env.hosts = [
'vagrant@' + PUPPET_MASTER_IP + ':22',
]
env.passwords = {
'vagrant@' + PUPPET_MASTER_IP + ':22': 'vagrant'
}
def test():
with lcd('puppet/modules'):
with lcd('nginx'):
local('rspec')
def push():
with lcd('puppet'):
local('git add .')
local('git push origin master')
def deploy():
puppet()
test()
run('rm -rf puppet-untitled-2016')
run('git clone https://github.com/zkan/puppet-untitled-2016.git')
run('sudo rm -rf /etc/puppet/manifests')
run('sudo ln -sf /home/vagrant/puppet-untitled-2016/puppet/manifests /etc/puppet/manifests')
run('sudo rm -rf /etc/puppet/modules')
run('sudo ln -sf /home/vagrant/puppet-untitled-2016/puppet/modules /etc/puppet/modules')
|
23540359422501ebd8a1b833c426cdfb1a3dfe00 | fabfile.py | fabfile.py | # -*- coding: UTF-8 -*-
from fabric.api import *
def move_sampledocs_to_fixture():
local("cp fs_doc/fixtures/foreskrift/*.pdf uploads/foreskrift/")
local("cp fs_doc/fixtures/bilaga/*.pdf uploads/bilaga/")
local("cp fs_doc/fixtures/allmanna_rad/*.pdf uploads/allmanna_rad/")
def clean_db():
local("rm -rf database/fst_demo.db;python manage.py syncdb --noinput;python manage.py loaddata fs_doc/fixtures/exempeldata.json")
def test():
local("python manage.py test")
def clean_test():
move_sampledocs_to_fixture()
clean_db()
test()
| # -*- coding: UTF-8 -*-
from fabric.api import *
def move_sampledocs_to_fixture():
local("mkdir -p uploads/foreskrift")
local("rm -f uploads/foreskrift/*.pdf")
local("cp fs_doc/fixtures/foreskrift/*.pdf uploads/foreskrift/")
local("mkdir -p uploads/bilaga")
local("rm -f uploads/bilaga/*.pdf")
local("cp fs_doc/fixtures/bilaga/*.pdf uploads/bilaga/")
local("mkdir -p uploads/allmanna_rad")
local("rm -f uploads/allmanna_rad/*.pdf")
local("cp fs_doc/fixtures/allmanna_rad/*.pdf uploads/allmanna_rad/")
local("mkdir -p uploads/konsoliderad_foreskrift")
local("rm -f uploads/konsoliderad_foreskrift/*.pdf")
local("cp fs_doc/fixtures/konsoliderad_foreskrift/*.pdf uploads/konsoliderad_foreskrift/")
def clean_db():
local("rm -rf database/fst_demo.db;python manage.py syncdb --noinput;python manage.py loaddata fs_doc/fixtures/exempeldata.json")
def test():
local("python manage.py test")
def clean_test():
move_sampledocs_to_fixture()
clean_db()
test()
| Add new fixture to fabric script | Add new fixture to fabric script
| Python | bsd-3-clause | kamidev/autobuild_fst,kamidev/autobuild_fst,rinfo/fst,rinfo/fst,kamidev/autobuild_fst,rinfo/fst,kamidev/autobuild_fst,rinfo/fst | # -*- coding: UTF-8 -*-
from fabric.api import *
def move_sampledocs_to_fixture():
local("cp fs_doc/fixtures/foreskrift/*.pdf uploads/foreskrift/")
local("cp fs_doc/fixtures/bilaga/*.pdf uploads/bilaga/")
local("cp fs_doc/fixtures/allmanna_rad/*.pdf uploads/allmanna_rad/")
def clean_db():
local("rm -rf database/fst_demo.db;python manage.py syncdb --noinput;python manage.py loaddata fs_doc/fixtures/exempeldata.json")
def test():
local("python manage.py test")
def clean_test():
move_sampledocs_to_fixture()
clean_db()
test()
Add new fixture to fabric script | # -*- coding: UTF-8 -*-
from fabric.api import *
def move_sampledocs_to_fixture():
local("mkdir -p uploads/foreskrift")
local("rm -f uploads/foreskrift/*.pdf")
local("cp fs_doc/fixtures/foreskrift/*.pdf uploads/foreskrift/")
local("mkdir -p uploads/bilaga")
local("rm -f uploads/bilaga/*.pdf")
local("cp fs_doc/fixtures/bilaga/*.pdf uploads/bilaga/")
local("mkdir -p uploads/allmanna_rad")
local("rm -f uploads/allmanna_rad/*.pdf")
local("cp fs_doc/fixtures/allmanna_rad/*.pdf uploads/allmanna_rad/")
local("mkdir -p uploads/konsoliderad_foreskrift")
local("rm -f uploads/konsoliderad_foreskrift/*.pdf")
local("cp fs_doc/fixtures/konsoliderad_foreskrift/*.pdf uploads/konsoliderad_foreskrift/")
def clean_db():
local("rm -rf database/fst_demo.db;python manage.py syncdb --noinput;python manage.py loaddata fs_doc/fixtures/exempeldata.json")
def test():
local("python manage.py test")
def clean_test():
move_sampledocs_to_fixture()
clean_db()
test()
| <commit_before># -*- coding: UTF-8 -*-
from fabric.api import *
def move_sampledocs_to_fixture():
local("cp fs_doc/fixtures/foreskrift/*.pdf uploads/foreskrift/")
local("cp fs_doc/fixtures/bilaga/*.pdf uploads/bilaga/")
local("cp fs_doc/fixtures/allmanna_rad/*.pdf uploads/allmanna_rad/")
def clean_db():
local("rm -rf database/fst_demo.db;python manage.py syncdb --noinput;python manage.py loaddata fs_doc/fixtures/exempeldata.json")
def test():
local("python manage.py test")
def clean_test():
move_sampledocs_to_fixture()
clean_db()
test()
<commit_msg>Add new fixture to fabric script<commit_after> | # -*- coding: UTF-8 -*-
from fabric.api import *
def move_sampledocs_to_fixture():
local("mkdir -p uploads/foreskrift")
local("rm -f uploads/foreskrift/*.pdf")
local("cp fs_doc/fixtures/foreskrift/*.pdf uploads/foreskrift/")
local("mkdir -p uploads/bilaga")
local("rm -f uploads/bilaga/*.pdf")
local("cp fs_doc/fixtures/bilaga/*.pdf uploads/bilaga/")
local("mkdir -p uploads/allmanna_rad")
local("rm -f uploads/allmanna_rad/*.pdf")
local("cp fs_doc/fixtures/allmanna_rad/*.pdf uploads/allmanna_rad/")
local("mkdir -p uploads/konsoliderad_foreskrift")
local("rm -f uploads/konsoliderad_foreskrift/*.pdf")
local("cp fs_doc/fixtures/konsoliderad_foreskrift/*.pdf uploads/konsoliderad_foreskrift/")
def clean_db():
local("rm -rf database/fst_demo.db;python manage.py syncdb --noinput;python manage.py loaddata fs_doc/fixtures/exempeldata.json")
def test():
local("python manage.py test")
def clean_test():
move_sampledocs_to_fixture()
clean_db()
test()
| # -*- coding: UTF-8 -*-
from fabric.api import *
def move_sampledocs_to_fixture():
local("cp fs_doc/fixtures/foreskrift/*.pdf uploads/foreskrift/")
local("cp fs_doc/fixtures/bilaga/*.pdf uploads/bilaga/")
local("cp fs_doc/fixtures/allmanna_rad/*.pdf uploads/allmanna_rad/")
def clean_db():
local("rm -rf database/fst_demo.db;python manage.py syncdb --noinput;python manage.py loaddata fs_doc/fixtures/exempeldata.json")
def test():
local("python manage.py test")
def clean_test():
move_sampledocs_to_fixture()
clean_db()
test()
Add new fixture to fabric script# -*- coding: UTF-8 -*-
from fabric.api import *
def move_sampledocs_to_fixture():
local("mkdir -p uploads/foreskrift")
local("rm -f uploads/foreskrift/*.pdf")
local("cp fs_doc/fixtures/foreskrift/*.pdf uploads/foreskrift/")
local("mkdir -p uploads/bilaga")
local("rm -f uploads/bilaga/*.pdf")
local("cp fs_doc/fixtures/bilaga/*.pdf uploads/bilaga/")
local("mkdir -p uploads/allmanna_rad")
local("rm -f uploads/allmanna_rad/*.pdf")
local("cp fs_doc/fixtures/allmanna_rad/*.pdf uploads/allmanna_rad/")
local("mkdir -p uploads/konsoliderad_foreskrift")
local("rm -f uploads/konsoliderad_foreskrift/*.pdf")
local("cp fs_doc/fixtures/konsoliderad_foreskrift/*.pdf uploads/konsoliderad_foreskrift/")
def clean_db():
local("rm -rf database/fst_demo.db;python manage.py syncdb --noinput;python manage.py loaddata fs_doc/fixtures/exempeldata.json")
def test():
local("python manage.py test")
def clean_test():
move_sampledocs_to_fixture()
clean_db()
test()
| <commit_before># -*- coding: UTF-8 -*-
from fabric.api import *
def move_sampledocs_to_fixture():
local("cp fs_doc/fixtures/foreskrift/*.pdf uploads/foreskrift/")
local("cp fs_doc/fixtures/bilaga/*.pdf uploads/bilaga/")
local("cp fs_doc/fixtures/allmanna_rad/*.pdf uploads/allmanna_rad/")
def clean_db():
local("rm -rf database/fst_demo.db;python manage.py syncdb --noinput;python manage.py loaddata fs_doc/fixtures/exempeldata.json")
def test():
local("python manage.py test")
def clean_test():
move_sampledocs_to_fixture()
clean_db()
test()
<commit_msg>Add new fixture to fabric script<commit_after># -*- coding: UTF-8 -*-
from fabric.api import *
def move_sampledocs_to_fixture():
local("mkdir -p uploads/foreskrift")
local("rm -f uploads/foreskrift/*.pdf")
local("cp fs_doc/fixtures/foreskrift/*.pdf uploads/foreskrift/")
local("mkdir -p uploads/bilaga")
local("rm -f uploads/bilaga/*.pdf")
local("cp fs_doc/fixtures/bilaga/*.pdf uploads/bilaga/")
local("mkdir -p uploads/allmanna_rad")
local("rm -f uploads/allmanna_rad/*.pdf")
local("cp fs_doc/fixtures/allmanna_rad/*.pdf uploads/allmanna_rad/")
local("mkdir -p uploads/konsoliderad_foreskrift")
local("rm -f uploads/konsoliderad_foreskrift/*.pdf")
local("cp fs_doc/fixtures/konsoliderad_foreskrift/*.pdf uploads/konsoliderad_foreskrift/")
def clean_db():
local("rm -rf database/fst_demo.db;python manage.py syncdb --noinput;python manage.py loaddata fs_doc/fixtures/exempeldata.json")
def test():
local("python manage.py test")
def clean_test():
move_sampledocs_to_fixture()
clean_db()
test()
|
612784b82792036ca538aa835f14e7102fd9e570 | kpi_dashboard/__manifest__.py | kpi_dashboard/__manifest__.py | # Copyright 2020 Creu Blanca
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Kpi Dashboard",
"summary": """
Create Dashboards using kpis""",
"version": "12.0.1.2.0",
"license": "AGPL-3",
"author": "Creu Blanca,Odoo Community Association (OCA)",
"website": "https://github.com/reporting-engine",
"depends": ["bus", "board", "base_sparse_field", "web_widget_color"],
"qweb": ["static/src/xml/dashboard.xml"],
"data": [
"wizards/kpi_dashboard_menu.xml",
"security/security.xml",
"security/ir.model.access.csv",
"templates/assets.xml",
"views/kpi_menu.xml",
"views/kpi_kpi.xml",
"views/kpi_dashboard.xml",
],
"demo": ["demo/demo_dashboard.xml"],
"maintainers": ["etobella"],
}
| # Copyright 2020 Creu Blanca
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Kpi Dashboard",
"summary": """
Create Dashboards using kpis""",
"version": "12.0.1.2.0",
"license": "AGPL-3",
"author": "Creu Blanca,Odoo Community Association (OCA)",
"website": "https://github.com/OCA/reporting-engine",
"depends": ["bus", "board", "base_sparse_field", "web_widget_color"],
"qweb": ["static/src/xml/dashboard.xml"],
"data": [
"wizards/kpi_dashboard_menu.xml",
"security/security.xml",
"security/ir.model.access.csv",
"templates/assets.xml",
"views/kpi_menu.xml",
"views/kpi_kpi.xml",
"views/kpi_dashboard.xml",
],
"demo": ["demo/demo_dashboard.xml"],
"maintainers": ["etobella"],
}
| Fix website URL in kpi_dashboard module | Fix website URL in kpi_dashboard module
| Python | agpl-3.0 | OCA/reporting-engine,OCA/reporting-engine,OCA/reporting-engine,OCA/reporting-engine | # Copyright 2020 Creu Blanca
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Kpi Dashboard",
"summary": """
Create Dashboards using kpis""",
"version": "12.0.1.2.0",
"license": "AGPL-3",
"author": "Creu Blanca,Odoo Community Association (OCA)",
"website": "https://github.com/reporting-engine",
"depends": ["bus", "board", "base_sparse_field", "web_widget_color"],
"qweb": ["static/src/xml/dashboard.xml"],
"data": [
"wizards/kpi_dashboard_menu.xml",
"security/security.xml",
"security/ir.model.access.csv",
"templates/assets.xml",
"views/kpi_menu.xml",
"views/kpi_kpi.xml",
"views/kpi_dashboard.xml",
],
"demo": ["demo/demo_dashboard.xml"],
"maintainers": ["etobella"],
}
Fix website URL in kpi_dashboard module | # Copyright 2020 Creu Blanca
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Kpi Dashboard",
"summary": """
Create Dashboards using kpis""",
"version": "12.0.1.2.0",
"license": "AGPL-3",
"author": "Creu Blanca,Odoo Community Association (OCA)",
"website": "https://github.com/OCA/reporting-engine",
"depends": ["bus", "board", "base_sparse_field", "web_widget_color"],
"qweb": ["static/src/xml/dashboard.xml"],
"data": [
"wizards/kpi_dashboard_menu.xml",
"security/security.xml",
"security/ir.model.access.csv",
"templates/assets.xml",
"views/kpi_menu.xml",
"views/kpi_kpi.xml",
"views/kpi_dashboard.xml",
],
"demo": ["demo/demo_dashboard.xml"],
"maintainers": ["etobella"],
}
| <commit_before># Copyright 2020 Creu Blanca
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Kpi Dashboard",
"summary": """
Create Dashboards using kpis""",
"version": "12.0.1.2.0",
"license": "AGPL-3",
"author": "Creu Blanca,Odoo Community Association (OCA)",
"website": "https://github.com/reporting-engine",
"depends": ["bus", "board", "base_sparse_field", "web_widget_color"],
"qweb": ["static/src/xml/dashboard.xml"],
"data": [
"wizards/kpi_dashboard_menu.xml",
"security/security.xml",
"security/ir.model.access.csv",
"templates/assets.xml",
"views/kpi_menu.xml",
"views/kpi_kpi.xml",
"views/kpi_dashboard.xml",
],
"demo": ["demo/demo_dashboard.xml"],
"maintainers": ["etobella"],
}
<commit_msg>Fix website URL in kpi_dashboard module<commit_after> | # Copyright 2020 Creu Blanca
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Kpi Dashboard",
"summary": """
Create Dashboards using kpis""",
"version": "12.0.1.2.0",
"license": "AGPL-3",
"author": "Creu Blanca,Odoo Community Association (OCA)",
"website": "https://github.com/OCA/reporting-engine",
"depends": ["bus", "board", "base_sparse_field", "web_widget_color"],
"qweb": ["static/src/xml/dashboard.xml"],
"data": [
"wizards/kpi_dashboard_menu.xml",
"security/security.xml",
"security/ir.model.access.csv",
"templates/assets.xml",
"views/kpi_menu.xml",
"views/kpi_kpi.xml",
"views/kpi_dashboard.xml",
],
"demo": ["demo/demo_dashboard.xml"],
"maintainers": ["etobella"],
}
| # Copyright 2020 Creu Blanca
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Kpi Dashboard",
"summary": """
Create Dashboards using kpis""",
"version": "12.0.1.2.0",
"license": "AGPL-3",
"author": "Creu Blanca,Odoo Community Association (OCA)",
"website": "https://github.com/reporting-engine",
"depends": ["bus", "board", "base_sparse_field", "web_widget_color"],
"qweb": ["static/src/xml/dashboard.xml"],
"data": [
"wizards/kpi_dashboard_menu.xml",
"security/security.xml",
"security/ir.model.access.csv",
"templates/assets.xml",
"views/kpi_menu.xml",
"views/kpi_kpi.xml",
"views/kpi_dashboard.xml",
],
"demo": ["demo/demo_dashboard.xml"],
"maintainers": ["etobella"],
}
Fix website URL in kpi_dashboard module# Copyright 2020 Creu Blanca
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Kpi Dashboard",
"summary": """
Create Dashboards using kpis""",
"version": "12.0.1.2.0",
"license": "AGPL-3",
"author": "Creu Blanca,Odoo Community Association (OCA)",
"website": "https://github.com/OCA/reporting-engine",
"depends": ["bus", "board", "base_sparse_field", "web_widget_color"],
"qweb": ["static/src/xml/dashboard.xml"],
"data": [
"wizards/kpi_dashboard_menu.xml",
"security/security.xml",
"security/ir.model.access.csv",
"templates/assets.xml",
"views/kpi_menu.xml",
"views/kpi_kpi.xml",
"views/kpi_dashboard.xml",
],
"demo": ["demo/demo_dashboard.xml"],
"maintainers": ["etobella"],
}
| <commit_before># Copyright 2020 Creu Blanca
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Kpi Dashboard",
"summary": """
Create Dashboards using kpis""",
"version": "12.0.1.2.0",
"license": "AGPL-3",
"author": "Creu Blanca,Odoo Community Association (OCA)",
"website": "https://github.com/reporting-engine",
"depends": ["bus", "board", "base_sparse_field", "web_widget_color"],
"qweb": ["static/src/xml/dashboard.xml"],
"data": [
"wizards/kpi_dashboard_menu.xml",
"security/security.xml",
"security/ir.model.access.csv",
"templates/assets.xml",
"views/kpi_menu.xml",
"views/kpi_kpi.xml",
"views/kpi_dashboard.xml",
],
"demo": ["demo/demo_dashboard.xml"],
"maintainers": ["etobella"],
}
<commit_msg>Fix website URL in kpi_dashboard module<commit_after># Copyright 2020 Creu Blanca
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Kpi Dashboard",
"summary": """
Create Dashboards using kpis""",
"version": "12.0.1.2.0",
"license": "AGPL-3",
"author": "Creu Blanca,Odoo Community Association (OCA)",
"website": "https://github.com/OCA/reporting-engine",
"depends": ["bus", "board", "base_sparse_field", "web_widget_color"],
"qweb": ["static/src/xml/dashboard.xml"],
"data": [
"wizards/kpi_dashboard_menu.xml",
"security/security.xml",
"security/ir.model.access.csv",
"templates/assets.xml",
"views/kpi_menu.xml",
"views/kpi_kpi.xml",
"views/kpi_dashboard.xml",
],
"demo": ["demo/demo_dashboard.xml"],
"maintainers": ["etobella"],
}
|
b13494292bc8cc42783db1e4500a525a0e457222 | dashboard_app/tests/__init__.py | dashboard_app/tests/__init__.py | """
Package with all tests for dashboard_app
"""
import unittest
from testscenarios.scenarios import generate_scenarios
TEST_MODULES = [
'models.attachment',
'models.bundle',
'models.bundle_stream',
'models.hw_device',
'models.named_attribute',
'models.sw_package',
'models.test',
'models.test_case',
'models.test_result',
'models.test_run',
'other.deserialization',
'other.tests',
]
def suite():
loader = unittest.TestLoader()
test_suite = unittest.TestSuite()
for name in TEST_MODULES:
tests = loader.loadTestsFromName('dashboard_app.tests.' + name)
test_suite.addTests(generate_scenarios(tests))
return test_suite
| """
Package with all tests for dashboard_app
"""
import unittest
from testscenarios.scenarios import generate_scenarios
TEST_MODULES = [
'models.attachment',
'models.bundle',
'models.bundle_stream',
'models.hw_device',
'models.named_attribute',
'models.sw_package',
'models.test',
'models.test_case',
'models.test_result',
'models.test_run',
'other.csrf',
'other.deserialization',
'other.tests',
]
def suite():
loader = unittest.TestLoader()
test_suite = unittest.TestSuite()
for name in TEST_MODULES:
tests = loader.loadTestsFromName('dashboard_app.tests.' + name)
test_suite.addTests(generate_scenarios(tests))
return test_suite
| Add reference to csrf tests in tests loader list | Add reference to csrf tests in tests loader list
| Python | agpl-3.0 | OSSystems/lava-server,OSSystems/lava-server,OSSystems/lava-server,Linaro/lava-server,Linaro/lava-server,Linaro/lava-server,Linaro/lava-server | """
Package with all tests for dashboard_app
"""
import unittest
from testscenarios.scenarios import generate_scenarios
TEST_MODULES = [
'models.attachment',
'models.bundle',
'models.bundle_stream',
'models.hw_device',
'models.named_attribute',
'models.sw_package',
'models.test',
'models.test_case',
'models.test_result',
'models.test_run',
'other.deserialization',
'other.tests',
]
def suite():
loader = unittest.TestLoader()
test_suite = unittest.TestSuite()
for name in TEST_MODULES:
tests = loader.loadTestsFromName('dashboard_app.tests.' + name)
test_suite.addTests(generate_scenarios(tests))
return test_suite
Add reference to csrf tests in tests loader list | """
Package with all tests for dashboard_app
"""
import unittest
from testscenarios.scenarios import generate_scenarios
TEST_MODULES = [
'models.attachment',
'models.bundle',
'models.bundle_stream',
'models.hw_device',
'models.named_attribute',
'models.sw_package',
'models.test',
'models.test_case',
'models.test_result',
'models.test_run',
'other.csrf',
'other.deserialization',
'other.tests',
]
def suite():
loader = unittest.TestLoader()
test_suite = unittest.TestSuite()
for name in TEST_MODULES:
tests = loader.loadTestsFromName('dashboard_app.tests.' + name)
test_suite.addTests(generate_scenarios(tests))
return test_suite
| <commit_before>"""
Package with all tests for dashboard_app
"""
import unittest
from testscenarios.scenarios import generate_scenarios
TEST_MODULES = [
'models.attachment',
'models.bundle',
'models.bundle_stream',
'models.hw_device',
'models.named_attribute',
'models.sw_package',
'models.test',
'models.test_case',
'models.test_result',
'models.test_run',
'other.deserialization',
'other.tests',
]
def suite():
loader = unittest.TestLoader()
test_suite = unittest.TestSuite()
for name in TEST_MODULES:
tests = loader.loadTestsFromName('dashboard_app.tests.' + name)
test_suite.addTests(generate_scenarios(tests))
return test_suite
<commit_msg>Add reference to csrf tests in tests loader list<commit_after> | """
Package with all tests for dashboard_app
"""
import unittest
from testscenarios.scenarios import generate_scenarios
TEST_MODULES = [
'models.attachment',
'models.bundle',
'models.bundle_stream',
'models.hw_device',
'models.named_attribute',
'models.sw_package',
'models.test',
'models.test_case',
'models.test_result',
'models.test_run',
'other.csrf',
'other.deserialization',
'other.tests',
]
def suite():
loader = unittest.TestLoader()
test_suite = unittest.TestSuite()
for name in TEST_MODULES:
tests = loader.loadTestsFromName('dashboard_app.tests.' + name)
test_suite.addTests(generate_scenarios(tests))
return test_suite
| """
Package with all tests for dashboard_app
"""
import unittest
from testscenarios.scenarios import generate_scenarios
TEST_MODULES = [
'models.attachment',
'models.bundle',
'models.bundle_stream',
'models.hw_device',
'models.named_attribute',
'models.sw_package',
'models.test',
'models.test_case',
'models.test_result',
'models.test_run',
'other.deserialization',
'other.tests',
]
def suite():
loader = unittest.TestLoader()
test_suite = unittest.TestSuite()
for name in TEST_MODULES:
tests = loader.loadTestsFromName('dashboard_app.tests.' + name)
test_suite.addTests(generate_scenarios(tests))
return test_suite
Add reference to csrf tests in tests loader list"""
Package with all tests for dashboard_app
"""
import unittest
from testscenarios.scenarios import generate_scenarios
TEST_MODULES = [
'models.attachment',
'models.bundle',
'models.bundle_stream',
'models.hw_device',
'models.named_attribute',
'models.sw_package',
'models.test',
'models.test_case',
'models.test_result',
'models.test_run',
'other.csrf',
'other.deserialization',
'other.tests',
]
def suite():
loader = unittest.TestLoader()
test_suite = unittest.TestSuite()
for name in TEST_MODULES:
tests = loader.loadTestsFromName('dashboard_app.tests.' + name)
test_suite.addTests(generate_scenarios(tests))
return test_suite
| <commit_before>"""
Package with all tests for dashboard_app
"""
import unittest
from testscenarios.scenarios import generate_scenarios
TEST_MODULES = [
'models.attachment',
'models.bundle',
'models.bundle_stream',
'models.hw_device',
'models.named_attribute',
'models.sw_package',
'models.test',
'models.test_case',
'models.test_result',
'models.test_run',
'other.deserialization',
'other.tests',
]
def suite():
loader = unittest.TestLoader()
test_suite = unittest.TestSuite()
for name in TEST_MODULES:
tests = loader.loadTestsFromName('dashboard_app.tests.' + name)
test_suite.addTests(generate_scenarios(tests))
return test_suite
<commit_msg>Add reference to csrf tests in tests loader list<commit_after>"""
Package with all tests for dashboard_app
"""
import unittest
from testscenarios.scenarios import generate_scenarios
TEST_MODULES = [
'models.attachment',
'models.bundle',
'models.bundle_stream',
'models.hw_device',
'models.named_attribute',
'models.sw_package',
'models.test',
'models.test_case',
'models.test_result',
'models.test_run',
'other.csrf',
'other.deserialization',
'other.tests',
]
def suite():
loader = unittest.TestLoader()
test_suite = unittest.TestSuite()
for name in TEST_MODULES:
tests = loader.loadTestsFromName('dashboard_app.tests.' + name)
test_suite.addTests(generate_scenarios(tests))
return test_suite
|
a6049578c4dd4602aa903af262347dddf05df178 | template/module/tests/test_something.py | template/module/tests/test_something.py | # -*- coding: utf-8 -*-
# Copyright <YEAR(S)> <AUTHOR(S)>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp.tests.common import HttpCase, TransactionCase
class SomethingCase(TransactionCase):
def setUp(self, *args, **kwargs):
super(SomethingCase, self).setUp(*args, **kwargs)
# TODO Replace this for something useful or delete this method
self.do_something_before_all_tests()
def tearDown(self, *args, **kwargs):
# TODO Replace this for something useful or delete this method
self.do_something_after_all_tests()
return super(SomethingCase, self).tearDown(*args, **kwargs)
def test_something(self):
"""First line of docstring appears in test logs.
Other lines do not.
Any method starting with ``test_`` will be tested.
"""
pass
class UICase(HttpCase):
def test_ui_web(self):
"""Test backend tests."""
self.phantom_js("/web/tests?module=module_name", "", login="admin")
def test_ui_website(self):
"""Test frontend tour."""
self.phantom_js(
url_path="/",
code="odoo.__DEBUG__.services['web.Tour']"
".run('test_module_name', 'test')",
ready="odoo.__DEBUG__.services['web.Tour'].tours.test_module_name",
login="admin")
| # -*- coding: utf-8 -*-
# Copyright <YEAR(S)> <AUTHOR(S)>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp.tests.common import HttpCase, TransactionCase
class SomethingCase(TransactionCase):
def setUp(self, *args, **kwargs):
super(SomethingCase, self).setUp(*args, **kwargs)
# TODO Replace this for something useful or delete this method
self.do_something_before_all_tests()
def tearDown(self, *args, **kwargs):
# TODO Replace this for something useful or delete this method
self.do_something_after_all_tests()
return super(SomethingCase, self).tearDown(*args, **kwargs)
def test_something(self):
"""First line of docstring appears in test logs.
Other lines do not.
Any method starting with ``test_`` will be tested.
"""
pass
class UICase(HttpCase):
def test_ui_web(self):
"""Test backend tests."""
self.phantom_js("/web/tests?debug=assets&module=module_name", "", login="admin")
def test_ui_website(self):
"""Test frontend tour."""
self.phantom_js(
url_path="/?debug=assets",
code="odoo.__DEBUG__.services['web.Tour']"
".run('test_module_name', 'test')",
ready="odoo.__DEBUG__.services['web.Tour'].tours.test_module_name",
login="admin")
| Add debug assets to HTTP cases | [IMP] Add debug assets to HTTP cases
| Python | agpl-3.0 | Yajo/maintainer-tools,acsone/maintainer-tools,acsone/maintainer-tools,OCA/maintainer-tools,Yajo/maintainer-tools,Yajo/maintainer-tools,acsone/maintainers-tools,OCA/maintainer-tools,acsone/maintainers-tools,OCA/maintainer-tools,acsone/maintainer-tools,Yajo/maintainer-tools,OCA/maintainer-tools,acsone/maintainer-tools,acsone/maintainers-tools,gurneyalex/maintainers-tools | # -*- coding: utf-8 -*-
# Copyright <YEAR(S)> <AUTHOR(S)>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp.tests.common import HttpCase, TransactionCase
class SomethingCase(TransactionCase):
def setUp(self, *args, **kwargs):
super(SomethingCase, self).setUp(*args, **kwargs)
# TODO Replace this for something useful or delete this method
self.do_something_before_all_tests()
def tearDown(self, *args, **kwargs):
# TODO Replace this for something useful or delete this method
self.do_something_after_all_tests()
return super(SomethingCase, self).tearDown(*args, **kwargs)
def test_something(self):
"""First line of docstring appears in test logs.
Other lines do not.
Any method starting with ``test_`` will be tested.
"""
pass
class UICase(HttpCase):
def test_ui_web(self):
"""Test backend tests."""
self.phantom_js("/web/tests?module=module_name", "", login="admin")
def test_ui_website(self):
"""Test frontend tour."""
self.phantom_js(
url_path="/",
code="odoo.__DEBUG__.services['web.Tour']"
".run('test_module_name', 'test')",
ready="odoo.__DEBUG__.services['web.Tour'].tours.test_module_name",
login="admin")
[IMP] Add debug assets to HTTP cases | # -*- coding: utf-8 -*-
# Copyright <YEAR(S)> <AUTHOR(S)>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp.tests.common import HttpCase, TransactionCase
class SomethingCase(TransactionCase):
def setUp(self, *args, **kwargs):
super(SomethingCase, self).setUp(*args, **kwargs)
# TODO Replace this for something useful or delete this method
self.do_something_before_all_tests()
def tearDown(self, *args, **kwargs):
# TODO Replace this for something useful or delete this method
self.do_something_after_all_tests()
return super(SomethingCase, self).tearDown(*args, **kwargs)
def test_something(self):
"""First line of docstring appears in test logs.
Other lines do not.
Any method starting with ``test_`` will be tested.
"""
pass
class UICase(HttpCase):
def test_ui_web(self):
"""Test backend tests."""
self.phantom_js("/web/tests?debug=assets&module=module_name", "", login="admin")
def test_ui_website(self):
"""Test frontend tour."""
self.phantom_js(
url_path="/?debug=assets",
code="odoo.__DEBUG__.services['web.Tour']"
".run('test_module_name', 'test')",
ready="odoo.__DEBUG__.services['web.Tour'].tours.test_module_name",
login="admin")
| <commit_before># -*- coding: utf-8 -*-
# Copyright <YEAR(S)> <AUTHOR(S)>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp.tests.common import HttpCase, TransactionCase
class SomethingCase(TransactionCase):
def setUp(self, *args, **kwargs):
super(SomethingCase, self).setUp(*args, **kwargs)
# TODO Replace this for something useful or delete this method
self.do_something_before_all_tests()
def tearDown(self, *args, **kwargs):
# TODO Replace this for something useful or delete this method
self.do_something_after_all_tests()
return super(SomethingCase, self).tearDown(*args, **kwargs)
def test_something(self):
"""First line of docstring appears in test logs.
Other lines do not.
Any method starting with ``test_`` will be tested.
"""
pass
class UICase(HttpCase):
def test_ui_web(self):
"""Test backend tests."""
self.phantom_js("/web/tests?module=module_name", "", login="admin")
def test_ui_website(self):
"""Test frontend tour."""
self.phantom_js(
url_path="/",
code="odoo.__DEBUG__.services['web.Tour']"
".run('test_module_name', 'test')",
ready="odoo.__DEBUG__.services['web.Tour'].tours.test_module_name",
login="admin")
<commit_msg>[IMP] Add debug assets to HTTP cases<commit_after> | # -*- coding: utf-8 -*-
# Copyright <YEAR(S)> <AUTHOR(S)>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp.tests.common import HttpCase, TransactionCase
class SomethingCase(TransactionCase):
def setUp(self, *args, **kwargs):
super(SomethingCase, self).setUp(*args, **kwargs)
# TODO Replace this for something useful or delete this method
self.do_something_before_all_tests()
def tearDown(self, *args, **kwargs):
# TODO Replace this for something useful or delete this method
self.do_something_after_all_tests()
return super(SomethingCase, self).tearDown(*args, **kwargs)
def test_something(self):
"""First line of docstring appears in test logs.
Other lines do not.
Any method starting with ``test_`` will be tested.
"""
pass
class UICase(HttpCase):
def test_ui_web(self):
"""Test backend tests."""
self.phantom_js("/web/tests?debug=assets&module=module_name", "", login="admin")
def test_ui_website(self):
"""Test frontend tour."""
self.phantom_js(
url_path="/?debug=assets",
code="odoo.__DEBUG__.services['web.Tour']"
".run('test_module_name', 'test')",
ready="odoo.__DEBUG__.services['web.Tour'].tours.test_module_name",
login="admin")
| # -*- coding: utf-8 -*-
# Copyright <YEAR(S)> <AUTHOR(S)>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp.tests.common import HttpCase, TransactionCase
class SomethingCase(TransactionCase):
def setUp(self, *args, **kwargs):
super(SomethingCase, self).setUp(*args, **kwargs)
# TODO Replace this for something useful or delete this method
self.do_something_before_all_tests()
def tearDown(self, *args, **kwargs):
# TODO Replace this for something useful or delete this method
self.do_something_after_all_tests()
return super(SomethingCase, self).tearDown(*args, **kwargs)
def test_something(self):
"""First line of docstring appears in test logs.
Other lines do not.
Any method starting with ``test_`` will be tested.
"""
pass
class UICase(HttpCase):
def test_ui_web(self):
"""Test backend tests."""
self.phantom_js("/web/tests?module=module_name", "", login="admin")
def test_ui_website(self):
"""Test frontend tour."""
self.phantom_js(
url_path="/",
code="odoo.__DEBUG__.services['web.Tour']"
".run('test_module_name', 'test')",
ready="odoo.__DEBUG__.services['web.Tour'].tours.test_module_name",
login="admin")
[IMP] Add debug assets to HTTP cases# -*- coding: utf-8 -*-
# Copyright <YEAR(S)> <AUTHOR(S)>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp.tests.common import HttpCase, TransactionCase
class SomethingCase(TransactionCase):
def setUp(self, *args, **kwargs):
super(SomethingCase, self).setUp(*args, **kwargs)
# TODO Replace this for something useful or delete this method
self.do_something_before_all_tests()
def tearDown(self, *args, **kwargs):
# TODO Replace this for something useful or delete this method
self.do_something_after_all_tests()
return super(SomethingCase, self).tearDown(*args, **kwargs)
def test_something(self):
"""First line of docstring appears in test logs.
Other lines do not.
Any method starting with ``test_`` will be tested.
"""
pass
class UICase(HttpCase):
def test_ui_web(self):
"""Test backend tests."""
self.phantom_js("/web/tests?debug=assets&module=module_name", "", login="admin")
def test_ui_website(self):
"""Test frontend tour."""
self.phantom_js(
url_path="/?debug=assets",
code="odoo.__DEBUG__.services['web.Tour']"
".run('test_module_name', 'test')",
ready="odoo.__DEBUG__.services['web.Tour'].tours.test_module_name",
login="admin")
| <commit_before># -*- coding: utf-8 -*-
# Copyright <YEAR(S)> <AUTHOR(S)>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp.tests.common import HttpCase, TransactionCase
class SomethingCase(TransactionCase):
def setUp(self, *args, **kwargs):
super(SomethingCase, self).setUp(*args, **kwargs)
# TODO Replace this for something useful or delete this method
self.do_something_before_all_tests()
def tearDown(self, *args, **kwargs):
# TODO Replace this for something useful or delete this method
self.do_something_after_all_tests()
return super(SomethingCase, self).tearDown(*args, **kwargs)
def test_something(self):
"""First line of docstring appears in test logs.
Other lines do not.
Any method starting with ``test_`` will be tested.
"""
pass
class UICase(HttpCase):
def test_ui_web(self):
"""Test backend tests."""
self.phantom_js("/web/tests?module=module_name", "", login="admin")
def test_ui_website(self):
"""Test frontend tour."""
self.phantom_js(
url_path="/",
code="odoo.__DEBUG__.services['web.Tour']"
".run('test_module_name', 'test')",
ready="odoo.__DEBUG__.services['web.Tour'].tours.test_module_name",
login="admin")
<commit_msg>[IMP] Add debug assets to HTTP cases<commit_after># -*- coding: utf-8 -*-
# Copyright <YEAR(S)> <AUTHOR(S)>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp.tests.common import HttpCase, TransactionCase
class SomethingCase(TransactionCase):
def setUp(self, *args, **kwargs):
super(SomethingCase, self).setUp(*args, **kwargs)
# TODO Replace this for something useful or delete this method
self.do_something_before_all_tests()
def tearDown(self, *args, **kwargs):
# TODO Replace this for something useful or delete this method
self.do_something_after_all_tests()
return super(SomethingCase, self).tearDown(*args, **kwargs)
def test_something(self):
"""First line of docstring appears in test logs.
Other lines do not.
Any method starting with ``test_`` will be tested.
"""
pass
class UICase(HttpCase):
def test_ui_web(self):
"""Test backend tests."""
self.phantom_js("/web/tests?debug=assets&module=module_name", "", login="admin")
def test_ui_website(self):
"""Test frontend tour."""
self.phantom_js(
url_path="/?debug=assets",
code="odoo.__DEBUG__.services['web.Tour']"
".run('test_module_name', 'test')",
ready="odoo.__DEBUG__.services['web.Tour'].tours.test_module_name",
login="admin")
|
fa5f9ad96c52d5977913a4e8c41c0ec0bd2214f1 | coheoka/utils.py | coheoka/utils.py | # -*- coding: utf-8 -*-
'''
Preprocessing utilities
'''
from nltk import sent_tokenize
from random import shuffle, sample
def shuffle_sents(text, times):
sents = sent_tokenize(text)
res = []
for i in range(times):
shuffle(sents)
res.append(' '.join(sents))
return res
def remove_sents(text, times, remove_number=1):
sents = sent_tokenize(text)
res = []
for i in range(times):
res.append(' '.join(sample(sents, len(sents) - remove_number)))
return res
def add_sents(text, times, added_text, add_number=1):
sents = sent_tokenize(text)
sents.append(added_text)
res = []
for i in range(times):
shuffle(sents)
res.append(' '.join(sents))
return res
| # -*- coding: utf-8 -*-
'''
Preprocessing utilities
'''
from random import shuffle, sample
from nltk import sent_tokenize
from scipy.stats import kendalltau as tau
def shuffle_sents(text, times):
sents = sent_tokenize(text)
res = []
for i in range(times):
shuffle(sents)
res.append(' '.join(sents))
return res
def remove_sents(text, times, remove_number=1):
sents = sent_tokenize(text)
res = []
for i in range(times):
res.append(' '.join(sample(sents, len(sents) - remove_number)))
return res
def add_sents(text, times, added_text, add_number=1):
sents = sent_tokenize(text)
sents.append(added_text)
res = []
for i in range(times):
shuffle(sents)
res.append(' '.join(sents))
return res
def tau_score_of_sentents(sent1_tokens, sent2_tokens):
assert len(sent1_tokens) == len(sent2_tokens)
t = tau(sent1_tokens, sent2_tokens)[0]
if t <= 0.33:
return -1
elif t > 0.33 and t <= 0.66:
return 0
else:
return 1
| Add util function to score sentences with kendall's tau | Add util function to score sentences with kendall's tau
| Python | apache-2.0 | kigawas/coheoka | # -*- coding: utf-8 -*-
'''
Preprocessing utilities
'''
from nltk import sent_tokenize
from random import shuffle, sample
def shuffle_sents(text, times):
sents = sent_tokenize(text)
res = []
for i in range(times):
shuffle(sents)
res.append(' '.join(sents))
return res
def remove_sents(text, times, remove_number=1):
sents = sent_tokenize(text)
res = []
for i in range(times):
res.append(' '.join(sample(sents, len(sents) - remove_number)))
return res
def add_sents(text, times, added_text, add_number=1):
sents = sent_tokenize(text)
sents.append(added_text)
res = []
for i in range(times):
shuffle(sents)
res.append(' '.join(sents))
return res
Add util function to score sentences with kendall's tau | # -*- coding: utf-8 -*-
'''
Preprocessing utilities
'''
from random import shuffle, sample
from nltk import sent_tokenize
from scipy.stats import kendalltau as tau
def shuffle_sents(text, times):
sents = sent_tokenize(text)
res = []
for i in range(times):
shuffle(sents)
res.append(' '.join(sents))
return res
def remove_sents(text, times, remove_number=1):
sents = sent_tokenize(text)
res = []
for i in range(times):
res.append(' '.join(sample(sents, len(sents) - remove_number)))
return res
def add_sents(text, times, added_text, add_number=1):
sents = sent_tokenize(text)
sents.append(added_text)
res = []
for i in range(times):
shuffle(sents)
res.append(' '.join(sents))
return res
def tau_score_of_sentents(sent1_tokens, sent2_tokens):
assert len(sent1_tokens) == len(sent2_tokens)
t = tau(sent1_tokens, sent2_tokens)[0]
if t <= 0.33:
return -1
elif t > 0.33 and t <= 0.66:
return 0
else:
return 1
| <commit_before># -*- coding: utf-8 -*-
'''
Preprocessing utilities
'''
from nltk import sent_tokenize
from random import shuffle, sample
def shuffle_sents(text, times):
sents = sent_tokenize(text)
res = []
for i in range(times):
shuffle(sents)
res.append(' '.join(sents))
return res
def remove_sents(text, times, remove_number=1):
sents = sent_tokenize(text)
res = []
for i in range(times):
res.append(' '.join(sample(sents, len(sents) - remove_number)))
return res
def add_sents(text, times, added_text, add_number=1):
sents = sent_tokenize(text)
sents.append(added_text)
res = []
for i in range(times):
shuffle(sents)
res.append(' '.join(sents))
return res
<commit_msg>Add util function to score sentences with kendall's tau<commit_after> | # -*- coding: utf-8 -*-
'''
Preprocessing utilities
'''
from random import shuffle, sample
from nltk import sent_tokenize
from scipy.stats import kendalltau as tau
def shuffle_sents(text, times):
sents = sent_tokenize(text)
res = []
for i in range(times):
shuffle(sents)
res.append(' '.join(sents))
return res
def remove_sents(text, times, remove_number=1):
sents = sent_tokenize(text)
res = []
for i in range(times):
res.append(' '.join(sample(sents, len(sents) - remove_number)))
return res
def add_sents(text, times, added_text, add_number=1):
sents = sent_tokenize(text)
sents.append(added_text)
res = []
for i in range(times):
shuffle(sents)
res.append(' '.join(sents))
return res
def tau_score_of_sentents(sent1_tokens, sent2_tokens):
assert len(sent1_tokens) == len(sent2_tokens)
t = tau(sent1_tokens, sent2_tokens)[0]
if t <= 0.33:
return -1
elif t > 0.33 and t <= 0.66:
return 0
else:
return 1
| # -*- coding: utf-8 -*-
'''
Preprocessing utilities
'''
from nltk import sent_tokenize
from random import shuffle, sample
def shuffle_sents(text, times):
sents = sent_tokenize(text)
res = []
for i in range(times):
shuffle(sents)
res.append(' '.join(sents))
return res
def remove_sents(text, times, remove_number=1):
sents = sent_tokenize(text)
res = []
for i in range(times):
res.append(' '.join(sample(sents, len(sents) - remove_number)))
return res
def add_sents(text, times, added_text, add_number=1):
sents = sent_tokenize(text)
sents.append(added_text)
res = []
for i in range(times):
shuffle(sents)
res.append(' '.join(sents))
return res
Add util function to score sentences with kendall's tau# -*- coding: utf-8 -*-
'''
Preprocessing utilities
'''
from random import shuffle, sample
from nltk import sent_tokenize
from scipy.stats import kendalltau as tau
def shuffle_sents(text, times):
sents = sent_tokenize(text)
res = []
for i in range(times):
shuffle(sents)
res.append(' '.join(sents))
return res
def remove_sents(text, times, remove_number=1):
sents = sent_tokenize(text)
res = []
for i in range(times):
res.append(' '.join(sample(sents, len(sents) - remove_number)))
return res
def add_sents(text, times, added_text, add_number=1):
sents = sent_tokenize(text)
sents.append(added_text)
res = []
for i in range(times):
shuffle(sents)
res.append(' '.join(sents))
return res
def tau_score_of_sentents(sent1_tokens, sent2_tokens):
assert len(sent1_tokens) == len(sent2_tokens)
t = tau(sent1_tokens, sent2_tokens)[0]
if t <= 0.33:
return -1
elif t > 0.33 and t <= 0.66:
return 0
else:
return 1
| <commit_before># -*- coding: utf-8 -*-
'''
Preprocessing utilities
'''
from nltk import sent_tokenize
from random import shuffle, sample
def shuffle_sents(text, times):
sents = sent_tokenize(text)
res = []
for i in range(times):
shuffle(sents)
res.append(' '.join(sents))
return res
def remove_sents(text, times, remove_number=1):
sents = sent_tokenize(text)
res = []
for i in range(times):
res.append(' '.join(sample(sents, len(sents) - remove_number)))
return res
def add_sents(text, times, added_text, add_number=1):
sents = sent_tokenize(text)
sents.append(added_text)
res = []
for i in range(times):
shuffle(sents)
res.append(' '.join(sents))
return res
<commit_msg>Add util function to score sentences with kendall's tau<commit_after># -*- coding: utf-8 -*-
'''
Preprocessing utilities
'''
from random import shuffle, sample
from nltk import sent_tokenize
from scipy.stats import kendalltau as tau
def shuffle_sents(text, times):
sents = sent_tokenize(text)
res = []
for i in range(times):
shuffle(sents)
res.append(' '.join(sents))
return res
def remove_sents(text, times, remove_number=1):
sents = sent_tokenize(text)
res = []
for i in range(times):
res.append(' '.join(sample(sents, len(sents) - remove_number)))
return res
def add_sents(text, times, added_text, add_number=1):
sents = sent_tokenize(text)
sents.append(added_text)
res = []
for i in range(times):
shuffle(sents)
res.append(' '.join(sents))
return res
def tau_score_of_sentents(sent1_tokens, sent2_tokens):
assert len(sent1_tokens) == len(sent2_tokens)
t = tau(sent1_tokens, sent2_tokens)[0]
if t <= 0.33:
return -1
elif t > 0.33 and t <= 0.66:
return 0
else:
return 1
|
c977bef31fd36356f3a131d1f25250640c61f4b7 | dojango/__init__.py | dojango/__init__.py | VERSION = (0, 5, 5, 'final', 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3:] == ('alpha', 0):
version = '%s pre-alpha' % version
else:
if VERSION[3] != 'final':
version = '%s %s %s' % (version, VERSION[3], VERSION[4])
#from django.utils.version import get_svn_revision
#svn_rev = get_svn_revision()
#if svn_rev != u'SVN-unknown':
# version = "%s %s" % (version, svn_rev)
return version
| VERSION = (0, 5, 6, 'alpha', 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3:] == ('alpha', 0):
version = '%s pre-alpha' % version
else:
if VERSION[3] != 'final':
version = '%s %s %s' % (version, VERSION[3], VERSION[4])
#from django.utils.version import get_svn_revision
#svn_rev = get_svn_revision()
#if svn_rev != u'SVN-unknown':
# version = "%s %s" % (version, svn_rev)
return version
| Mark dojango as 0.5.6 alpha | Mark dojango as 0.5.6 alpha
| Python | bsd-3-clause | ofirr/dojango,ricard33/dojango,ofirr/dojango,ricard33/dojango,ofirr/dojango,william-gr/dojango,william-gr/dojango,ricard33/dojango,klipstein/dojango,william-gr/dojango,klipstein/dojango | VERSION = (0, 5, 5, 'final', 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3:] == ('alpha', 0):
version = '%s pre-alpha' % version
else:
if VERSION[3] != 'final':
version = '%s %s %s' % (version, VERSION[3], VERSION[4])
#from django.utils.version import get_svn_revision
#svn_rev = get_svn_revision()
#if svn_rev != u'SVN-unknown':
# version = "%s %s" % (version, svn_rev)
return version
Mark dojango as 0.5.6 alpha | VERSION = (0, 5, 6, 'alpha', 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3:] == ('alpha', 0):
version = '%s pre-alpha' % version
else:
if VERSION[3] != 'final':
version = '%s %s %s' % (version, VERSION[3], VERSION[4])
#from django.utils.version import get_svn_revision
#svn_rev = get_svn_revision()
#if svn_rev != u'SVN-unknown':
# version = "%s %s" % (version, svn_rev)
return version
| <commit_before>VERSION = (0, 5, 5, 'final', 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3:] == ('alpha', 0):
version = '%s pre-alpha' % version
else:
if VERSION[3] != 'final':
version = '%s %s %s' % (version, VERSION[3], VERSION[4])
#from django.utils.version import get_svn_revision
#svn_rev = get_svn_revision()
#if svn_rev != u'SVN-unknown':
# version = "%s %s" % (version, svn_rev)
return version
<commit_msg>Mark dojango as 0.5.6 alpha<commit_after> | VERSION = (0, 5, 6, 'alpha', 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3:] == ('alpha', 0):
version = '%s pre-alpha' % version
else:
if VERSION[3] != 'final':
version = '%s %s %s' % (version, VERSION[3], VERSION[4])
#from django.utils.version import get_svn_revision
#svn_rev = get_svn_revision()
#if svn_rev != u'SVN-unknown':
# version = "%s %s" % (version, svn_rev)
return version
| VERSION = (0, 5, 5, 'final', 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3:] == ('alpha', 0):
version = '%s pre-alpha' % version
else:
if VERSION[3] != 'final':
version = '%s %s %s' % (version, VERSION[3], VERSION[4])
#from django.utils.version import get_svn_revision
#svn_rev = get_svn_revision()
#if svn_rev != u'SVN-unknown':
# version = "%s %s" % (version, svn_rev)
return version
Mark dojango as 0.5.6 alphaVERSION = (0, 5, 6, 'alpha', 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3:] == ('alpha', 0):
version = '%s pre-alpha' % version
else:
if VERSION[3] != 'final':
version = '%s %s %s' % (version, VERSION[3], VERSION[4])
#from django.utils.version import get_svn_revision
#svn_rev = get_svn_revision()
#if svn_rev != u'SVN-unknown':
# version = "%s %s" % (version, svn_rev)
return version
| <commit_before>VERSION = (0, 5, 5, 'final', 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3:] == ('alpha', 0):
version = '%s pre-alpha' % version
else:
if VERSION[3] != 'final':
version = '%s %s %s' % (version, VERSION[3], VERSION[4])
#from django.utils.version import get_svn_revision
#svn_rev = get_svn_revision()
#if svn_rev != u'SVN-unknown':
# version = "%s %s" % (version, svn_rev)
return version
<commit_msg>Mark dojango as 0.5.6 alpha<commit_after>VERSION = (0, 5, 6, 'alpha', 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3:] == ('alpha', 0):
version = '%s pre-alpha' % version
else:
if VERSION[3] != 'final':
version = '%s %s %s' % (version, VERSION[3], VERSION[4])
#from django.utils.version import get_svn_revision
#svn_rev = get_svn_revision()
#if svn_rev != u'SVN-unknown':
# version = "%s %s" % (version, svn_rev)
return version
|
7c8a90a6bc0a51788966b0035bc97b24a6680611 | populous/cli.py | populous/cli.py | import click
from .loader import load_yaml
from .blueprint import Blueprint
from .exceptions import ValidationError, YAMLError
def get_blueprint(*files):
try:
return Blueprint.from_description(load_yaml(*files))
except (YAMLError, ValidationError) as e:
raise click.ClickException(e.message)
except Exception as e:
raise click.ClickException("Unexpected error during the blueprint "
"loading: {}".format(e.message))
@click.group()
@click.version_option()
def cli():
pass
@cli.command()
@click.argument('files', nargs=-1, required=True)
def predict(files):
"""
Predict how many objects will be created if the given files are used.
"""
blueprint = get_blueprint(*files)
for item in blueprint:
click.echo("{name}: {count} {by}".format(
name=item.name, count=item.total,
by="({} by {})".format(item.count.number, item.count.by)
if item.count.by else ""
))
| import click
from .loader import load_yaml
from .blueprint import Blueprint
from .exceptions import ValidationError, YAMLError, BackendError
def get_blueprint(*files):
try:
return Blueprint.from_description(load_yaml(*files))
except (YAMLError, ValidationError) as e:
raise click.ClickException(e.message)
except Exception as e:
raise click.ClickException("Unexpected error during the blueprint "
"loading: {}".format(e.message))
@click.group()
@click.version_option()
def cli():
pass
@cli.group()
def run():
pass
@run.command()
@click.option('--host', default='localhost', help="Database host address")
@click.option('--port', default=5432, type=int, help="Database host port")
@click.option('--db', help="Database name")
@click.option('--user', help="Postgresql user name used to authenticate")
@click.option('--password', help="Postgresql password used to authenticate")
@click.argument('files', nargs=-1, required=True)
def postgresql(host, port, db, user, password, files):
blueprint = get_blueprint(*files)
try:
from populous.backends.postgres import Postgres
backend = Postgres(database=db, user=user, password=password,
host=host, port=port)
try:
backend.generate(blueprint)
finally:
backend.close()
except BackendError as e:
raise click.ClickException(e.message)
@cli.command()
@click.argument('files', nargs=-1, required=True)
def predict(files):
"""
Predict how many objects will be created if the given files are used.
"""
blueprint = get_blueprint(*files)
for item in blueprint:
click.echo("{name}: {count} {by}".format(
name=item.name, count=item.total,
by="({} by {})".format(item.count.number, item.count.by)
if item.count.by else ""
))
| Add a CLI command for Postgres backend | Add a CLI command for Postgres backend
| Python | mit | novafloss/populous | import click
from .loader import load_yaml
from .blueprint import Blueprint
from .exceptions import ValidationError, YAMLError
def get_blueprint(*files):
try:
return Blueprint.from_description(load_yaml(*files))
except (YAMLError, ValidationError) as e:
raise click.ClickException(e.message)
except Exception as e:
raise click.ClickException("Unexpected error during the blueprint "
"loading: {}".format(e.message))
@click.group()
@click.version_option()
def cli():
pass
@cli.command()
@click.argument('files', nargs=-1, required=True)
def predict(files):
"""
Predict how many objects will be created if the given files are used.
"""
blueprint = get_blueprint(*files)
for item in blueprint:
click.echo("{name}: {count} {by}".format(
name=item.name, count=item.total,
by="({} by {})".format(item.count.number, item.count.by)
if item.count.by else ""
))
Add a CLI command for Postgres backend | import click
from .loader import load_yaml
from .blueprint import Blueprint
from .exceptions import ValidationError, YAMLError, BackendError
def get_blueprint(*files):
try:
return Blueprint.from_description(load_yaml(*files))
except (YAMLError, ValidationError) as e:
raise click.ClickException(e.message)
except Exception as e:
raise click.ClickException("Unexpected error during the blueprint "
"loading: {}".format(e.message))
@click.group()
@click.version_option()
def cli():
pass
@cli.group()
def run():
pass
@run.command()
@click.option('--host', default='localhost', help="Database host address")
@click.option('--port', default=5432, type=int, help="Database host port")
@click.option('--db', help="Database name")
@click.option('--user', help="Postgresql user name used to authenticate")
@click.option('--password', help="Postgresql password used to authenticate")
@click.argument('files', nargs=-1, required=True)
def postgresql(host, port, db, user, password, files):
blueprint = get_blueprint(*files)
try:
from populous.backends.postgres import Postgres
backend = Postgres(database=db, user=user, password=password,
host=host, port=port)
try:
backend.generate(blueprint)
finally:
backend.close()
except BackendError as e:
raise click.ClickException(e.message)
@cli.command()
@click.argument('files', nargs=-1, required=True)
def predict(files):
"""
Predict how many objects will be created if the given files are used.
"""
blueprint = get_blueprint(*files)
for item in blueprint:
click.echo("{name}: {count} {by}".format(
name=item.name, count=item.total,
by="({} by {})".format(item.count.number, item.count.by)
if item.count.by else ""
))
| <commit_before>import click
from .loader import load_yaml
from .blueprint import Blueprint
from .exceptions import ValidationError, YAMLError
def get_blueprint(*files):
try:
return Blueprint.from_description(load_yaml(*files))
except (YAMLError, ValidationError) as e:
raise click.ClickException(e.message)
except Exception as e:
raise click.ClickException("Unexpected error during the blueprint "
"loading: {}".format(e.message))
@click.group()
@click.version_option()
def cli():
pass
@cli.command()
@click.argument('files', nargs=-1, required=True)
def predict(files):
"""
Predict how many objects will be created if the given files are used.
"""
blueprint = get_blueprint(*files)
for item in blueprint:
click.echo("{name}: {count} {by}".format(
name=item.name, count=item.total,
by="({} by {})".format(item.count.number, item.count.by)
if item.count.by else ""
))
<commit_msg>Add a CLI command for Postgres backend<commit_after> | import click
from .loader import load_yaml
from .blueprint import Blueprint
from .exceptions import ValidationError, YAMLError, BackendError
def get_blueprint(*files):
try:
return Blueprint.from_description(load_yaml(*files))
except (YAMLError, ValidationError) as e:
raise click.ClickException(e.message)
except Exception as e:
raise click.ClickException("Unexpected error during the blueprint "
"loading: {}".format(e.message))
@click.group()
@click.version_option()
def cli():
pass
@cli.group()
def run():
pass
@run.command()
@click.option('--host', default='localhost', help="Database host address")
@click.option('--port', default=5432, type=int, help="Database host port")
@click.option('--db', help="Database name")
@click.option('--user', help="Postgresql user name used to authenticate")
@click.option('--password', help="Postgresql password used to authenticate")
@click.argument('files', nargs=-1, required=True)
def postgresql(host, port, db, user, password, files):
blueprint = get_blueprint(*files)
try:
from populous.backends.postgres import Postgres
backend = Postgres(database=db, user=user, password=password,
host=host, port=port)
try:
backend.generate(blueprint)
finally:
backend.close()
except BackendError as e:
raise click.ClickException(e.message)
@cli.command()
@click.argument('files', nargs=-1, required=True)
def predict(files):
"""
Predict how many objects will be created if the given files are used.
"""
blueprint = get_blueprint(*files)
for item in blueprint:
click.echo("{name}: {count} {by}".format(
name=item.name, count=item.total,
by="({} by {})".format(item.count.number, item.count.by)
if item.count.by else ""
))
| import click
from .loader import load_yaml
from .blueprint import Blueprint
from .exceptions import ValidationError, YAMLError
def get_blueprint(*files):
try:
return Blueprint.from_description(load_yaml(*files))
except (YAMLError, ValidationError) as e:
raise click.ClickException(e.message)
except Exception as e:
raise click.ClickException("Unexpected error during the blueprint "
"loading: {}".format(e.message))
@click.group()
@click.version_option()
def cli():
pass
@cli.command()
@click.argument('files', nargs=-1, required=True)
def predict(files):
"""
Predict how many objects will be created if the given files are used.
"""
blueprint = get_blueprint(*files)
for item in blueprint:
click.echo("{name}: {count} {by}".format(
name=item.name, count=item.total,
by="({} by {})".format(item.count.number, item.count.by)
if item.count.by else ""
))
Add a CLI command for Postgres backendimport click
from .loader import load_yaml
from .blueprint import Blueprint
from .exceptions import ValidationError, YAMLError, BackendError
def get_blueprint(*files):
try:
return Blueprint.from_description(load_yaml(*files))
except (YAMLError, ValidationError) as e:
raise click.ClickException(e.message)
except Exception as e:
raise click.ClickException("Unexpected error during the blueprint "
"loading: {}".format(e.message))
@click.group()
@click.version_option()
def cli():
pass
@cli.group()
def run():
pass
@run.command()
@click.option('--host', default='localhost', help="Database host address")
@click.option('--port', default=5432, type=int, help="Database host port")
@click.option('--db', help="Database name")
@click.option('--user', help="Postgresql user name used to authenticate")
@click.option('--password', help="Postgresql password used to authenticate")
@click.argument('files', nargs=-1, required=True)
def postgresql(host, port, db, user, password, files):
blueprint = get_blueprint(*files)
try:
from populous.backends.postgres import Postgres
backend = Postgres(database=db, user=user, password=password,
host=host, port=port)
try:
backend.generate(blueprint)
finally:
backend.close()
except BackendError as e:
raise click.ClickException(e.message)
@cli.command()
@click.argument('files', nargs=-1, required=True)
def predict(files):
"""
Predict how many objects will be created if the given files are used.
"""
blueprint = get_blueprint(*files)
for item in blueprint:
click.echo("{name}: {count} {by}".format(
name=item.name, count=item.total,
by="({} by {})".format(item.count.number, item.count.by)
if item.count.by else ""
))
| <commit_before>import click
from .loader import load_yaml
from .blueprint import Blueprint
from .exceptions import ValidationError, YAMLError
def get_blueprint(*files):
try:
return Blueprint.from_description(load_yaml(*files))
except (YAMLError, ValidationError) as e:
raise click.ClickException(e.message)
except Exception as e:
raise click.ClickException("Unexpected error during the blueprint "
"loading: {}".format(e.message))
@click.group()
@click.version_option()
def cli():
pass
@cli.command()
@click.argument('files', nargs=-1, required=True)
def predict(files):
"""
Predict how many objects will be created if the given files are used.
"""
blueprint = get_blueprint(*files)
for item in blueprint:
click.echo("{name}: {count} {by}".format(
name=item.name, count=item.total,
by="({} by {})".format(item.count.number, item.count.by)
if item.count.by else ""
))
<commit_msg>Add a CLI command for Postgres backend<commit_after>import click
from .loader import load_yaml
from .blueprint import Blueprint
from .exceptions import ValidationError, YAMLError, BackendError
def get_blueprint(*files):
try:
return Blueprint.from_description(load_yaml(*files))
except (YAMLError, ValidationError) as e:
raise click.ClickException(e.message)
except Exception as e:
raise click.ClickException("Unexpected error during the blueprint "
"loading: {}".format(e.message))
@click.group()
@click.version_option()
def cli():
pass
@cli.group()
def run():
pass
@run.command()
@click.option('--host', default='localhost', help="Database host address")
@click.option('--port', default=5432, type=int, help="Database host port")
@click.option('--db', help="Database name")
@click.option('--user', help="Postgresql user name used to authenticate")
@click.option('--password', help="Postgresql password used to authenticate")
@click.argument('files', nargs=-1, required=True)
def postgresql(host, port, db, user, password, files):
blueprint = get_blueprint(*files)
try:
from populous.backends.postgres import Postgres
backend = Postgres(database=db, user=user, password=password,
host=host, port=port)
try:
backend.generate(blueprint)
finally:
backend.close()
except BackendError as e:
raise click.ClickException(e.message)
@cli.command()
@click.argument('files', nargs=-1, required=True)
def predict(files):
"""
Predict how many objects will be created if the given files are used.
"""
blueprint = get_blueprint(*files)
for item in blueprint:
click.echo("{name}: {count} {by}".format(
name=item.name, count=item.total,
by="({} by {})".format(item.count.number, item.count.by)
if item.count.by else ""
))
|
c25e735216dd1969ed09d24fcca9eaafe1dc8405 | Lib/__init__.py | Lib/__init__.py | """\
SciPy --- A scientific computing package for Python
===================================================
You can support the development of SciPy by purchasing documentation
at
http://www.trelgol.com
It is being distributed for a fee for a limited time to try and raise
money for development.
Documentation is also available in the docstrings.
Available subpackages
---------------------
"""
try:
import pkg_resources as _pr # activate namespace packages (manipulates __path__)
del _pr
except ImportError:
pass
from numpy import show_config as show_numpy_config
if show_numpy_config is None:
raise ImportError,"Cannot import scipy when running from numpy source directory."
from numpy import __version__ as __numpy_version__
from __config__ import show as show_config
from version import version as __version__
import numpy._import_tools as _ni
pkgload = _ni.PackageLoader()
del _ni
import os as _os
SCIPY_IMPORT_VERBOSE = int(_os.environ.get('SCIPY_IMPORT_VERBOSE','0'))
pkgload(verbose=SCIPY_IMPORT_VERBOSE,postpone=True)
del _os
from numpy.testing import ScipyTest
test = ScipyTest('scipy').test
__all__.append('test')
__doc__ += pkgload.get_pkgdocs()
| """\
SciPy --- A scientific computing package for Python
===================================================
You can support the development of SciPy by purchasing documentation
at
http://www.trelgol.com
It is being distributed for a fee for a limited time to try and raise
money for development.
Documentation is also available in the docstrings.
Available subpackages
---------------------
"""
try:
import pkg_resources as _pr # activate namespace packages (manipulates __path__)
del _pr
except ImportError:
pass
from numpy import show_config as show_numpy_config
if show_numpy_config is None:
raise ImportError,"Cannot import scipy when running from numpy source directory."
from numpy import __version__ as __numpy_version__
from __config__ import show as show_config
from version import version as __version__
import numpy._import_tools as _ni
pkgload = _ni.PackageLoader()
del _ni
import os as _os
SCIPY_IMPORT_VERBOSE = int(_os.environ.get('SCIPY_IMPORT_VERBOSE','0'))
pkgload(verbose=SCIPY_IMPORT_VERBOSE,postpone=True)
del _os
from numpy.testing import ScipyTest
test = ScipyTest('scipy').test
__all__.append('test')
import numpy as _num
from numpy import *
__all__ += _num.__all__
del _num
__doc__ += pkgload.get_pkgdocs()
| Put numpy namespace in scipy for backward compatibility... | Put numpy namespace in scipy for backward compatibility...
| Python | bsd-3-clause | Newman101/scipy,kleskjr/scipy,mikebenfield/scipy,njwilson23/scipy,giorgiop/scipy,mikebenfield/scipy,Stefan-Endres/scipy,trankmichael/scipy,argriffing/scipy,ilayn/scipy,anntzer/scipy,rmcgibbo/scipy,ortylp/scipy,zerothi/scipy,raoulbq/scipy,mtrbean/scipy,sargas/scipy,newemailjdm/scipy,felipebetancur/scipy,ales-erjavec/scipy,woodscn/scipy,vanpact/scipy,endolith/scipy,efiring/scipy,maciejkula/scipy,sonnyhu/scipy,mhogg/scipy,mortonjt/scipy,rgommers/scipy,rmcgibbo/scipy,perimosocordiae/scipy,anntzer/scipy,matthewalbani/scipy,piyush0609/scipy,zaxliu/scipy,jsilter/scipy,vberaudi/scipy,zaxliu/scipy,juliantaylor/scipy,niknow/scipy,haudren/scipy,nmayorov/scipy,Kamp9/scipy,vberaudi/scipy,zerothi/scipy,larsmans/scipy,jor-/scipy,Gillu13/scipy,gfyoung/scipy,larsmans/scipy,ales-erjavec/scipy,chatcannon/scipy,trankmichael/scipy,nvoron23/scipy,argriffing/scipy,vigna/scipy,maniteja123/scipy,teoliphant/scipy,mgaitan/scipy,Eric89GXL/scipy,bkendzior/scipy,mdhaber/scipy,pbrod/scipy,gef756/scipy,rgommers/scipy,ilayn/scipy,person142/scipy,apbard/scipy,kalvdans/scipy,petebachant/scipy,jakevdp/scipy,pbrod/scipy,endolith/scipy,josephcslater/scipy,mortada/scipy,matthew-brett/scipy,anielsen001/scipy,josephcslater/scipy,nmayorov/scipy,jseabold/scipy,rgommers/scipy,ChanderG/scipy,Dapid/scipy,sonnyhu/scipy,chatcannon/scipy,Kamp9/scipy,ChanderG/scipy,endolith/scipy,scipy/scipy,giorgiop/scipy,mhogg/scipy,bkendzior/scipy,sauliusl/scipy,Stefan-Endres/scipy,mortonjt/scipy,Shaswat27/scipy,vhaasteren/scipy,felipebetancur/scipy,anntzer/scipy,dch312/scipy,fredrikw/scipy,e-q/scipy,FRidh/scipy,sauliusl/scipy,Eric89GXL/scipy,maniteja123/scipy,rmcgibbo/scipy,pschella/scipy,zerothi/scipy,ChanderG/scipy,person142/scipy,pyramania/scipy,ales-erjavec/scipy,apbard/scipy,nmayorov/scipy,mtrbean/scipy,Srisai85/scipy,Gillu13/scipy,hainm/scipy,fernand/scipy,fredrikw/scipy,nonhermitian/scipy,WillieMaddox/scipy,fernand/scipy,perimosocordiae/scipy,juliantaylor/scipy,lukauskas/scipy,gdooper/scipy,surhudm/scipy,tylerjereddy/scipy,vberaudi/scipy,zaxliu/scipy,sriki18/scipy,mortada/scipy,felipebetancur/scipy,ortylp/scipy,anielsen001/scipy,richardotis/scipy,Gillu13/scipy,fredrikw/scipy,scipy/scipy,lukauskas/scipy,gdooper/scipy,petebachant/scipy,niknow/scipy,gertingold/scipy,behzadnouri/scipy,anielsen001/scipy,befelix/scipy,WillieMaddox/scipy,tylerjereddy/scipy,aman-iitj/scipy,woodscn/scipy,newemailjdm/scipy,ilayn/scipy,argriffing/scipy,minhlongdo/scipy,fredrikw/scipy,anielsen001/scipy,gfyoung/scipy,vigna/scipy,e-q/scipy,zaxliu/scipy,jakevdp/scipy,jonycgn/scipy,mtrbean/scipy,jseabold/scipy,andyfaff/scipy,fernand/scipy,WarrenWeckesser/scipy,Shaswat27/scipy,maniteja123/scipy,jakevdp/scipy,Srisai85/scipy,njwilson23/scipy,cpaulik/scipy,dch312/scipy,woodscn/scipy,pyramania/scipy,mgaitan/scipy,fernand/scipy,sriki18/scipy,pizzathief/scipy,mikebenfield/scipy,mingwpy/scipy,hainm/scipy,Gillu13/scipy,Shaswat27/scipy,scipy/scipy,mortada/scipy,ndchorley/scipy,mingwpy/scipy,bkendzior/scipy,mortonjt/scipy,scipy/scipy,efiring/scipy,pschella/scipy,nonhermitian/scipy,vhaasteren/scipy,gertingold/scipy,sauliusl/scipy,anntzer/scipy,behzadnouri/scipy,endolith/scipy,mortonjt/scipy,zxsted/scipy,aeklant/scipy,perimosocordiae/scipy,jsilter/scipy,Newman101/scipy,fernand/scipy,fernand/scipy,andyfaff/scipy,grlee77/scipy,piyush0609/scipy,ortylp/scipy,WarrenWeckesser/scipy,mdhaber/scipy,arokem/scipy,Srisai85/scipy,raoulbq/scipy,chatcannon/scipy,fredrikw/scipy,lukauskas/scipy,e-q/scipy,kalvdans/scipy,apbard/scipy,mingwpy/scipy,ogrisel/scipy,pyramania/scipy,mgaitan/scipy,matthew-brett/scipy,sauliusl/scipy,witcxc/scipy,jamestwebber/scipy,mtrbean/scipy,aman-iitj/scipy,minhlongdo/scipy,maciejkula/scipy,surhudm/scipy,richardotis/scipy,futurulus/scipy,Srisai85/scipy,tylerjereddy/scipy,minhlongdo/scipy,vigna/scipy,perimosocordiae/scipy,Dapid/scipy,aeklant/scipy,minhlongdo/scipy,aeklant/scipy,nvoron23/scipy,surhudm/scipy,behzadnouri/scipy,jor-/scipy,sauliusl/scipy,fredrikw/scipy,gdooper/scipy,ndchorley/scipy,WarrenWeckesser/scipy,gertingold/scipy,felipebetancur/scipy,futurulus/scipy,petebachant/scipy,scipy/scipy,rmcgibbo/scipy,pbrod/scipy,futurulus/scipy,josephcslater/scipy,ogrisel/scipy,Eric89GXL/scipy,mortonjt/scipy,newemailjdm/scipy,lhilt/scipy,zaxliu/scipy,mingwpy/scipy,futurulus/scipy,befelix/scipy,richardotis/scipy,e-q/scipy,dominicelse/scipy,befelix/scipy,maniteja123/scipy,raoulbq/scipy,niknow/scipy,rmcgibbo/scipy,newemailjdm/scipy,WillieMaddox/scipy,Newman101/scipy,FRidh/scipy,pnedunuri/scipy,lhilt/scipy,ilayn/scipy,Shaswat27/scipy,Stefan-Endres/scipy,jonycgn/scipy,juliantaylor/scipy,zaxliu/scipy,mdhaber/scipy,sonnyhu/scipy,Srisai85/scipy,grlee77/scipy,Eric89GXL/scipy,cpaulik/scipy,nvoron23/scipy,jonycgn/scipy,jonycgn/scipy,andim/scipy,njwilson23/scipy,nmayorov/scipy,richardotis/scipy,Stefan-Endres/scipy,Shaswat27/scipy,zxsted/scipy,niknow/scipy,mhogg/scipy,pschella/scipy,Srisai85/scipy,pnedunuri/scipy,sriki18/scipy,mortada/scipy,kleskjr/scipy,ortylp/scipy,cpaulik/scipy,mingwpy/scipy,trankmichael/scipy,Stefan-Endres/scipy,futurulus/scipy,chatcannon/scipy,mingwpy/scipy,befelix/scipy,sargas/scipy,raoulbq/scipy,haudren/scipy,matthewalbani/scipy,gef756/scipy,cpaulik/scipy,anntzer/scipy,nonhermitian/scipy,jseabold/scipy,kleskjr/scipy,sonnyhu/scipy,zerothi/scipy,lukauskas/scipy,befelix/scipy,larsmans/scipy,sargas/scipy,pbrod/scipy,jamestwebber/scipy,andim/scipy,nvoron23/scipy,apbard/scipy,teoliphant/scipy,chatcannon/scipy,pnedunuri/scipy,vigna/scipy,haudren/scipy,andim/scipy,efiring/scipy,WarrenWeckesser/scipy,dominicelse/scipy,arokem/scipy,petebachant/scipy,mtrbean/scipy,njwilson23/scipy,maciejkula/scipy,josephcslater/scipy,jor-/scipy,jjhelmus/scipy,perimosocordiae/scipy,dominicelse/scipy,zxsted/scipy,jonycgn/scipy,andyfaff/scipy,Eric89GXL/scipy,bkendzior/scipy,pbrod/scipy,efiring/scipy,jamestwebber/scipy,vhaasteren/scipy,cpaulik/scipy,grlee77/scipy,zxsted/scipy,trankmichael/scipy,WarrenWeckesser/scipy,andyfaff/scipy,mdhaber/scipy,raoulbq/scipy,aarchiba/scipy,gef756/scipy,gertingold/scipy,person142/scipy,mikebenfield/scipy,Stefan-Endres/scipy,jjhelmus/scipy,tylerjereddy/scipy,argriffing/scipy,minhlongdo/scipy,ndchorley/scipy,anielsen001/scipy,juliantaylor/scipy,witcxc/scipy,chatcannon/scipy,ales-erjavec/scipy,niknow/scipy,WillieMaddox/scipy,richardotis/scipy,ogrisel/scipy,FRidh/scipy,witcxc/scipy,mdhaber/scipy,aarchiba/scipy,woodscn/scipy,gdooper/scipy,newemailjdm/scipy,vhaasteren/scipy,teoliphant/scipy,nmayorov/scipy,vberaudi/scipy,jonycgn/scipy,jor-/scipy,dominicelse/scipy,argriffing/scipy,njwilson23/scipy,lhilt/scipy,vigna/scipy,pbrod/scipy,ilayn/scipy,maniteja123/scipy,efiring/scipy,hainm/scipy,Dapid/scipy,Dapid/scipy,vberaudi/scipy,efiring/scipy,nonhermitian/scipy,jsilter/scipy,josephcslater/scipy,petebachant/scipy,mdhaber/scipy,dch312/scipy,vanpact/scipy,vberaudi/scipy,FRidh/scipy,giorgiop/scipy,larsmans/scipy,pnedunuri/scipy,haudren/scipy,aeklant/scipy,hainm/scipy,matthew-brett/scipy,gertingold/scipy,mortada/scipy,giorgiop/scipy,bkendzior/scipy,vhaasteren/scipy,maciejkula/scipy,sonnyhu/scipy,trankmichael/scipy,gfyoung/scipy,apbard/scipy,ChanderG/scipy,lukauskas/scipy,behzadnouri/scipy,teoliphant/scipy,jsilter/scipy,matthew-brett/scipy,sauliusl/scipy,jamestwebber/scipy,jor-/scipy,jsilter/scipy,gef756/scipy,gef756/scipy,hainm/scipy,jjhelmus/scipy,aman-iitj/scipy,Kamp9/scipy,kalvdans/scipy,ndchorley/scipy,jakevdp/scipy,andyfaff/scipy,andim/scipy,newemailjdm/scipy,mgaitan/scipy,futurulus/scipy,witcxc/scipy,surhudm/scipy,ndchorley/scipy,mortonjt/scipy,Kamp9/scipy,sriki18/scipy,Newman101/scipy,Dapid/scipy,arokem/scipy,sargas/scipy,njwilson23/scipy,maciejkula/scipy,matthewalbani/scipy,pyramania/scipy,aeklant/scipy,zxsted/scipy,hainm/scipy,surhudm/scipy,gef756/scipy,ogrisel/scipy,haudren/scipy,vanpact/scipy,matthewalbani/scipy,arokem/scipy,mtrbean/scipy,giorgiop/scipy,piyush0609/scipy,pnedunuri/scipy,cpaulik/scipy,felipebetancur/scipy,Newman101/scipy,kalvdans/scipy,zerothi/scipy,dominicelse/scipy,sriki18/scipy,jjhelmus/scipy,andim/scipy,ndchorley/scipy,mgaitan/scipy,mhogg/scipy,behzadnouri/scipy,behzadnouri/scipy,kleskjr/scipy,mhogg/scipy,trankmichael/scipy,witcxc/scipy,jakevdp/scipy,rgommers/scipy,Eric89GXL/scipy,pschella/scipy,FRidh/scipy,scipy/scipy,aman-iitj/scipy,pnedunuri/scipy,rmcgibbo/scipy,jamestwebber/scipy,mikebenfield/scipy,Gillu13/scipy,kalvdans/scipy,gdooper/scipy,larsmans/scipy,perimosocordiae/scipy,jjhelmus/scipy,gfyoung/scipy,nonhermitian/scipy,Kamp9/scipy,nvoron23/scipy,pizzathief/scipy,vanpact/scipy,endolith/scipy,grlee77/scipy,haudren/scipy,jseabold/scipy,zxsted/scipy,raoulbq/scipy,lukauskas/scipy,piyush0609/scipy,sargas/scipy,sonnyhu/scipy,kleskjr/scipy,zerothi/scipy,ChanderG/scipy,lhilt/scipy,vanpact/scipy,Newman101/scipy,person142/scipy,grlee77/scipy,jseabold/scipy,jseabold/scipy,aman-iitj/scipy,WarrenWeckesser/scipy,ortylp/scipy,ilayn/scipy,surhudm/scipy,matthew-brett/scipy,dch312/scipy,lhilt/scipy,dch312/scipy,aarchiba/scipy,WillieMaddox/scipy,e-q/scipy,juliantaylor/scipy,maniteja123/scipy,matthewalbani/scipy,minhlongdo/scipy,person142/scipy,anntzer/scipy,kleskjr/scipy,vhaasteren/scipy,rgommers/scipy,piyush0609/scipy,andim/scipy,pizzathief/scipy,anielsen001/scipy,felipebetancur/scipy,aarchiba/scipy,teoliphant/scipy,ogrisel/scipy,arokem/scipy,woodscn/scipy,ales-erjavec/scipy,ales-erjavec/scipy,aman-iitj/scipy,petebachant/scipy,tylerjereddy/scipy,pyramania/scipy,niknow/scipy,vanpact/scipy,Dapid/scipy,nvoron23/scipy,andyfaff/scipy,Shaswat27/scipy,larsmans/scipy,endolith/scipy,woodscn/scipy,pizzathief/scipy,richardotis/scipy,FRidh/scipy,pschella/scipy,ChanderG/scipy,pizzathief/scipy,mortada/scipy,Kamp9/scipy,ortylp/scipy,WillieMaddox/scipy,sriki18/scipy,argriffing/scipy,mhogg/scipy,piyush0609/scipy,mgaitan/scipy,gfyoung/scipy,aarchiba/scipy,Gillu13/scipy,giorgiop/scipy | """\
SciPy --- A scientific computing package for Python
===================================================
You can support the development of SciPy by purchasing documentation
at
http://www.trelgol.com
It is being distributed for a fee for a limited time to try and raise
money for development.
Documentation is also available in the docstrings.
Available subpackages
---------------------
"""
try:
import pkg_resources as _pr # activate namespace packages (manipulates __path__)
del _pr
except ImportError:
pass
from numpy import show_config as show_numpy_config
if show_numpy_config is None:
raise ImportError,"Cannot import scipy when running from numpy source directory."
from numpy import __version__ as __numpy_version__
from __config__ import show as show_config
from version import version as __version__
import numpy._import_tools as _ni
pkgload = _ni.PackageLoader()
del _ni
import os as _os
SCIPY_IMPORT_VERBOSE = int(_os.environ.get('SCIPY_IMPORT_VERBOSE','0'))
pkgload(verbose=SCIPY_IMPORT_VERBOSE,postpone=True)
del _os
from numpy.testing import ScipyTest
test = ScipyTest('scipy').test
__all__.append('test')
__doc__ += pkgload.get_pkgdocs()
Put numpy namespace in scipy for backward compatibility... | """\
SciPy --- A scientific computing package for Python
===================================================
You can support the development of SciPy by purchasing documentation
at
http://www.trelgol.com
It is being distributed for a fee for a limited time to try and raise
money for development.
Documentation is also available in the docstrings.
Available subpackages
---------------------
"""
try:
import pkg_resources as _pr # activate namespace packages (manipulates __path__)
del _pr
except ImportError:
pass
from numpy import show_config as show_numpy_config
if show_numpy_config is None:
raise ImportError,"Cannot import scipy when running from numpy source directory."
from numpy import __version__ as __numpy_version__
from __config__ import show as show_config
from version import version as __version__
import numpy._import_tools as _ni
pkgload = _ni.PackageLoader()
del _ni
import os as _os
SCIPY_IMPORT_VERBOSE = int(_os.environ.get('SCIPY_IMPORT_VERBOSE','0'))
pkgload(verbose=SCIPY_IMPORT_VERBOSE,postpone=True)
del _os
from numpy.testing import ScipyTest
test = ScipyTest('scipy').test
__all__.append('test')
import numpy as _num
from numpy import *
__all__ += _num.__all__
del _num
__doc__ += pkgload.get_pkgdocs()
| <commit_before>"""\
SciPy --- A scientific computing package for Python
===================================================
You can support the development of SciPy by purchasing documentation
at
http://www.trelgol.com
It is being distributed for a fee for a limited time to try and raise
money for development.
Documentation is also available in the docstrings.
Available subpackages
---------------------
"""
try:
import pkg_resources as _pr # activate namespace packages (manipulates __path__)
del _pr
except ImportError:
pass
from numpy import show_config as show_numpy_config
if show_numpy_config is None:
raise ImportError,"Cannot import scipy when running from numpy source directory."
from numpy import __version__ as __numpy_version__
from __config__ import show as show_config
from version import version as __version__
import numpy._import_tools as _ni
pkgload = _ni.PackageLoader()
del _ni
import os as _os
SCIPY_IMPORT_VERBOSE = int(_os.environ.get('SCIPY_IMPORT_VERBOSE','0'))
pkgload(verbose=SCIPY_IMPORT_VERBOSE,postpone=True)
del _os
from numpy.testing import ScipyTest
test = ScipyTest('scipy').test
__all__.append('test')
__doc__ += pkgload.get_pkgdocs()
<commit_msg>Put numpy namespace in scipy for backward compatibility...<commit_after> | """\
SciPy --- A scientific computing package for Python
===================================================
You can support the development of SciPy by purchasing documentation
at
http://www.trelgol.com
It is being distributed for a fee for a limited time to try and raise
money for development.
Documentation is also available in the docstrings.
Available subpackages
---------------------
"""
try:
import pkg_resources as _pr # activate namespace packages (manipulates __path__)
del _pr
except ImportError:
pass
from numpy import show_config as show_numpy_config
if show_numpy_config is None:
raise ImportError,"Cannot import scipy when running from numpy source directory."
from numpy import __version__ as __numpy_version__
from __config__ import show as show_config
from version import version as __version__
import numpy._import_tools as _ni
pkgload = _ni.PackageLoader()
del _ni
import os as _os
SCIPY_IMPORT_VERBOSE = int(_os.environ.get('SCIPY_IMPORT_VERBOSE','0'))
pkgload(verbose=SCIPY_IMPORT_VERBOSE,postpone=True)
del _os
from numpy.testing import ScipyTest
test = ScipyTest('scipy').test
__all__.append('test')
import numpy as _num
from numpy import *
__all__ += _num.__all__
del _num
__doc__ += pkgload.get_pkgdocs()
| """\
SciPy --- A scientific computing package for Python
===================================================
You can support the development of SciPy by purchasing documentation
at
http://www.trelgol.com
It is being distributed for a fee for a limited time to try and raise
money for development.
Documentation is also available in the docstrings.
Available subpackages
---------------------
"""
try:
import pkg_resources as _pr # activate namespace packages (manipulates __path__)
del _pr
except ImportError:
pass
from numpy import show_config as show_numpy_config
if show_numpy_config is None:
raise ImportError,"Cannot import scipy when running from numpy source directory."
from numpy import __version__ as __numpy_version__
from __config__ import show as show_config
from version import version as __version__
import numpy._import_tools as _ni
pkgload = _ni.PackageLoader()
del _ni
import os as _os
SCIPY_IMPORT_VERBOSE = int(_os.environ.get('SCIPY_IMPORT_VERBOSE','0'))
pkgload(verbose=SCIPY_IMPORT_VERBOSE,postpone=True)
del _os
from numpy.testing import ScipyTest
test = ScipyTest('scipy').test
__all__.append('test')
__doc__ += pkgload.get_pkgdocs()
Put numpy namespace in scipy for backward compatibility..."""\
SciPy --- A scientific computing package for Python
===================================================
You can support the development of SciPy by purchasing documentation
at
http://www.trelgol.com
It is being distributed for a fee for a limited time to try and raise
money for development.
Documentation is also available in the docstrings.
Available subpackages
---------------------
"""
try:
import pkg_resources as _pr # activate namespace packages (manipulates __path__)
del _pr
except ImportError:
pass
from numpy import show_config as show_numpy_config
if show_numpy_config is None:
raise ImportError,"Cannot import scipy when running from numpy source directory."
from numpy import __version__ as __numpy_version__
from __config__ import show as show_config
from version import version as __version__
import numpy._import_tools as _ni
pkgload = _ni.PackageLoader()
del _ni
import os as _os
SCIPY_IMPORT_VERBOSE = int(_os.environ.get('SCIPY_IMPORT_VERBOSE','0'))
pkgload(verbose=SCIPY_IMPORT_VERBOSE,postpone=True)
del _os
from numpy.testing import ScipyTest
test = ScipyTest('scipy').test
__all__.append('test')
import numpy as _num
from numpy import *
__all__ += _num.__all__
del _num
__doc__ += pkgload.get_pkgdocs()
| <commit_before>"""\
SciPy --- A scientific computing package for Python
===================================================
You can support the development of SciPy by purchasing documentation
at
http://www.trelgol.com
It is being distributed for a fee for a limited time to try and raise
money for development.
Documentation is also available in the docstrings.
Available subpackages
---------------------
"""
try:
import pkg_resources as _pr # activate namespace packages (manipulates __path__)
del _pr
except ImportError:
pass
from numpy import show_config as show_numpy_config
if show_numpy_config is None:
raise ImportError,"Cannot import scipy when running from numpy source directory."
from numpy import __version__ as __numpy_version__
from __config__ import show as show_config
from version import version as __version__
import numpy._import_tools as _ni
pkgload = _ni.PackageLoader()
del _ni
import os as _os
SCIPY_IMPORT_VERBOSE = int(_os.environ.get('SCIPY_IMPORT_VERBOSE','0'))
pkgload(verbose=SCIPY_IMPORT_VERBOSE,postpone=True)
del _os
from numpy.testing import ScipyTest
test = ScipyTest('scipy').test
__all__.append('test')
__doc__ += pkgload.get_pkgdocs()
<commit_msg>Put numpy namespace in scipy for backward compatibility...<commit_after>"""\
SciPy --- A scientific computing package for Python
===================================================
You can support the development of SciPy by purchasing documentation
at
http://www.trelgol.com
It is being distributed for a fee for a limited time to try and raise
money for development.
Documentation is also available in the docstrings.
Available subpackages
---------------------
"""
try:
import pkg_resources as _pr # activate namespace packages (manipulates __path__)
del _pr
except ImportError:
pass
from numpy import show_config as show_numpy_config
if show_numpy_config is None:
raise ImportError,"Cannot import scipy when running from numpy source directory."
from numpy import __version__ as __numpy_version__
from __config__ import show as show_config
from version import version as __version__
import numpy._import_tools as _ni
pkgload = _ni.PackageLoader()
del _ni
import os as _os
SCIPY_IMPORT_VERBOSE = int(_os.environ.get('SCIPY_IMPORT_VERBOSE','0'))
pkgload(verbose=SCIPY_IMPORT_VERBOSE,postpone=True)
del _os
from numpy.testing import ScipyTest
test = ScipyTest('scipy').test
__all__.append('test')
import numpy as _num
from numpy import *
__all__ += _num.__all__
del _num
__doc__ += pkgload.get_pkgdocs()
|
e827c54c7e3727c2554d5b0c74a085fec7348d0c | tests/test_utils.py | tests/test_utils.py | """
Test utilities.
"""
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import *
import json
from unittest import TestCase
def parent_module(module_name):
# type: (AnyStr) -> AnyStr
"""Return the parent module name for a module.
:param module_name: module nam
:type module_name: str
:return: module's parent name
:rtype: str
>>> parent_module('zsl.application.module')
'zsl.application'
"""
return '.'.join(module_name.split('.')[:-1])
def json_loads(str_):
# type: (AnyStr) -> Dict[str, str]
"""Parse json from flask response which could be in bytes in Py3."""
if isinstance(str_, bytes):
str_ = str_.decode()
return json.loads(str_)
class HttpTestCase(TestCase):
"""Extends TestCase with methods for easier testing of HTTP requests."""
def assertHTTPStatus(self, status, test_value, msg):
# type: (Union[int, HTTPStatus], int, AnyStr) -> None
"""Assert http status
:param status: http status
:param test_value: flask respond status
:param msg: test message
"""
if hasattr(status, 'value'): # py2/3
status = status.value
self.assertEquals(status, test_value, msg)
| """
Test utilities.
"""
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import *
import json
from unittest import TestCase
def parent_module(module_name):
# type: (AnyStr) -> AnyStr
"""Return the parent module name for a module.
:param module_name: module nam
:type module_name: str
:return: module's parent name
:rtype: str
>>> parent_module('zsl.application.module')
'zsl.application'
"""
return '.'.join(module_name.split('.')[:-1])
def json_loads(str_):
# type: (AnyStr) -> Dict[str, str]
"""Parse json from flask response which could be in bytes in Py3."""
if isinstance(str_, bytes):
str_ = str_.decode()
return json.loads(str_)
class HttpTestCase(TestCase):
"""Extends TestCase with methods for easier testing of HTTP requests."""
def assertHTTPStatus(self, status, test_value, msg):
# type: (Union[int, HTTPStatus], int, AnyStr) -> None
"""Assert http status
:param status: http status
:param test_value: flask respond status
:param msg: test message
"""
if hasattr(status, 'value'): # py2/3
status = status.value
self.assertEqual(status, test_value, msg)
| FIX deprecation warning for using assertEquals | FIX deprecation warning for using assertEquals
| Python | mit | AtteqCom/zsl,AtteqCom/zsl | """
Test utilities.
"""
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import *
import json
from unittest import TestCase
def parent_module(module_name):
# type: (AnyStr) -> AnyStr
"""Return the parent module name for a module.
:param module_name: module nam
:type module_name: str
:return: module's parent name
:rtype: str
>>> parent_module('zsl.application.module')
'zsl.application'
"""
return '.'.join(module_name.split('.')[:-1])
def json_loads(str_):
# type: (AnyStr) -> Dict[str, str]
"""Parse json from flask response which could be in bytes in Py3."""
if isinstance(str_, bytes):
str_ = str_.decode()
return json.loads(str_)
class HttpTestCase(TestCase):
"""Extends TestCase with methods for easier testing of HTTP requests."""
def assertHTTPStatus(self, status, test_value, msg):
# type: (Union[int, HTTPStatus], int, AnyStr) -> None
"""Assert http status
:param status: http status
:param test_value: flask respond status
:param msg: test message
"""
if hasattr(status, 'value'): # py2/3
status = status.value
self.assertEquals(status, test_value, msg)
FIX deprecation warning for using assertEquals | """
Test utilities.
"""
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import *
import json
from unittest import TestCase
def parent_module(module_name):
# type: (AnyStr) -> AnyStr
"""Return the parent module name for a module.
:param module_name: module nam
:type module_name: str
:return: module's parent name
:rtype: str
>>> parent_module('zsl.application.module')
'zsl.application'
"""
return '.'.join(module_name.split('.')[:-1])
def json_loads(str_):
# type: (AnyStr) -> Dict[str, str]
"""Parse json from flask response which could be in bytes in Py3."""
if isinstance(str_, bytes):
str_ = str_.decode()
return json.loads(str_)
class HttpTestCase(TestCase):
"""Extends TestCase with methods for easier testing of HTTP requests."""
def assertHTTPStatus(self, status, test_value, msg):
# type: (Union[int, HTTPStatus], int, AnyStr) -> None
"""Assert http status
:param status: http status
:param test_value: flask respond status
:param msg: test message
"""
if hasattr(status, 'value'): # py2/3
status = status.value
self.assertEqual(status, test_value, msg)
| <commit_before>"""
Test utilities.
"""
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import *
import json
from unittest import TestCase
def parent_module(module_name):
# type: (AnyStr) -> AnyStr
"""Return the parent module name for a module.
:param module_name: module nam
:type module_name: str
:return: module's parent name
:rtype: str
>>> parent_module('zsl.application.module')
'zsl.application'
"""
return '.'.join(module_name.split('.')[:-1])
def json_loads(str_):
# type: (AnyStr) -> Dict[str, str]
"""Parse json from flask response which could be in bytes in Py3."""
if isinstance(str_, bytes):
str_ = str_.decode()
return json.loads(str_)
class HttpTestCase(TestCase):
"""Extends TestCase with methods for easier testing of HTTP requests."""
def assertHTTPStatus(self, status, test_value, msg):
# type: (Union[int, HTTPStatus], int, AnyStr) -> None
"""Assert http status
:param status: http status
:param test_value: flask respond status
:param msg: test message
"""
if hasattr(status, 'value'): # py2/3
status = status.value
self.assertEquals(status, test_value, msg)
<commit_msg>FIX deprecation warning for using assertEquals<commit_after> | """
Test utilities.
"""
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import *
import json
from unittest import TestCase
def parent_module(module_name):
# type: (AnyStr) -> AnyStr
"""Return the parent module name for a module.
:param module_name: module nam
:type module_name: str
:return: module's parent name
:rtype: str
>>> parent_module('zsl.application.module')
'zsl.application'
"""
return '.'.join(module_name.split('.')[:-1])
def json_loads(str_):
# type: (AnyStr) -> Dict[str, str]
"""Parse json from flask response which could be in bytes in Py3."""
if isinstance(str_, bytes):
str_ = str_.decode()
return json.loads(str_)
class HttpTestCase(TestCase):
"""Extends TestCase with methods for easier testing of HTTP requests."""
def assertHTTPStatus(self, status, test_value, msg):
# type: (Union[int, HTTPStatus], int, AnyStr) -> None
"""Assert http status
:param status: http status
:param test_value: flask respond status
:param msg: test message
"""
if hasattr(status, 'value'): # py2/3
status = status.value
self.assertEqual(status, test_value, msg)
| """
Test utilities.
"""
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import *
import json
from unittest import TestCase
def parent_module(module_name):
# type: (AnyStr) -> AnyStr
"""Return the parent module name for a module.
:param module_name: module nam
:type module_name: str
:return: module's parent name
:rtype: str
>>> parent_module('zsl.application.module')
'zsl.application'
"""
return '.'.join(module_name.split('.')[:-1])
def json_loads(str_):
# type: (AnyStr) -> Dict[str, str]
"""Parse json from flask response which could be in bytes in Py3."""
if isinstance(str_, bytes):
str_ = str_.decode()
return json.loads(str_)
class HttpTestCase(TestCase):
"""Extends TestCase with methods for easier testing of HTTP requests."""
def assertHTTPStatus(self, status, test_value, msg):
# type: (Union[int, HTTPStatus], int, AnyStr) -> None
"""Assert http status
:param status: http status
:param test_value: flask respond status
:param msg: test message
"""
if hasattr(status, 'value'): # py2/3
status = status.value
self.assertEquals(status, test_value, msg)
FIX deprecation warning for using assertEquals"""
Test utilities.
"""
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import *
import json
from unittest import TestCase
def parent_module(module_name):
# type: (AnyStr) -> AnyStr
"""Return the parent module name for a module.
:param module_name: module nam
:type module_name: str
:return: module's parent name
:rtype: str
>>> parent_module('zsl.application.module')
'zsl.application'
"""
return '.'.join(module_name.split('.')[:-1])
def json_loads(str_):
# type: (AnyStr) -> Dict[str, str]
"""Parse json from flask response which could be in bytes in Py3."""
if isinstance(str_, bytes):
str_ = str_.decode()
return json.loads(str_)
class HttpTestCase(TestCase):
"""Extends TestCase with methods for easier testing of HTTP requests."""
def assertHTTPStatus(self, status, test_value, msg):
# type: (Union[int, HTTPStatus], int, AnyStr) -> None
"""Assert http status
:param status: http status
:param test_value: flask respond status
:param msg: test message
"""
if hasattr(status, 'value'): # py2/3
status = status.value
self.assertEqual(status, test_value, msg)
| <commit_before>"""
Test utilities.
"""
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import *
import json
from unittest import TestCase
def parent_module(module_name):
# type: (AnyStr) -> AnyStr
"""Return the parent module name for a module.
:param module_name: module nam
:type module_name: str
:return: module's parent name
:rtype: str
>>> parent_module('zsl.application.module')
'zsl.application'
"""
return '.'.join(module_name.split('.')[:-1])
def json_loads(str_):
# type: (AnyStr) -> Dict[str, str]
"""Parse json from flask response which could be in bytes in Py3."""
if isinstance(str_, bytes):
str_ = str_.decode()
return json.loads(str_)
class HttpTestCase(TestCase):
"""Extends TestCase with methods for easier testing of HTTP requests."""
def assertHTTPStatus(self, status, test_value, msg):
# type: (Union[int, HTTPStatus], int, AnyStr) -> None
"""Assert http status
:param status: http status
:param test_value: flask respond status
:param msg: test message
"""
if hasattr(status, 'value'): # py2/3
status = status.value
self.assertEquals(status, test_value, msg)
<commit_msg>FIX deprecation warning for using assertEquals<commit_after>"""
Test utilities.
"""
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import *
import json
from unittest import TestCase
def parent_module(module_name):
# type: (AnyStr) -> AnyStr
"""Return the parent module name for a module.
:param module_name: module nam
:type module_name: str
:return: module's parent name
:rtype: str
>>> parent_module('zsl.application.module')
'zsl.application'
"""
return '.'.join(module_name.split('.')[:-1])
def json_loads(str_):
# type: (AnyStr) -> Dict[str, str]
"""Parse json from flask response which could be in bytes in Py3."""
if isinstance(str_, bytes):
str_ = str_.decode()
return json.loads(str_)
class HttpTestCase(TestCase):
"""Extends TestCase with methods for easier testing of HTTP requests."""
def assertHTTPStatus(self, status, test_value, msg):
# type: (Union[int, HTTPStatus], int, AnyStr) -> None
"""Assert http status
:param status: http status
:param test_value: flask respond status
:param msg: test message
"""
if hasattr(status, 'value'): # py2/3
status = status.value
self.assertEqual(status, test_value, msg)
|
53b8c8efcef4c419c06197365448cc271a5f6aef | Lib/test/test_pep3120.py | Lib/test/test_pep3120.py | # This file is marked as binary in the CVS, to prevent MacCVS from recoding it.
import unittest
from test import support
class PEP3120Test(unittest.TestCase):
def test_pep3120(self):
self.assertEqual(
"Питон".encode("utf-8"),
b'\xd0\x9f\xd0\xb8\xd1\x82\xd0\xbe\xd0\xbd'
)
self.assertEqual(
"\П".encode("utf-8"),
b'\\\xd0\x9f'
)
def test_badsyntax(self):
try:
import test.badsyntax_pep3120
except SyntaxError as msg:
self.assertTrue(str(msg).find("Non-UTF-8 code starting with") >= 0)
else:
self.fail("expected exception didn't occur")
class BuiltinCompileTests(unittest.TestCase):
# Issue 3574.
def test_latin1(self):
# Allow compile() to read Latin-1 source.
source_code = '# coding: Latin-1\nu = "Ç"\n'.encode("Latin-1")
try:
code = compile(source_code, '<dummy>', 'exec')
except SyntaxError:
self.fail("compile() cannot handle Latin-1 source")
ns = {}
exec(code, ns)
self.assertEqual('Ç', ns['u'])
def test_main():
support.run_unittest(PEP3120Test, BuiltinCompileTests)
if __name__=="__main__":
test_main()
| # This file is marked as binary in the CVS, to prevent MacCVS from recoding it.
import unittest
from test import support
class PEP3120Test(unittest.TestCase):
def test_pep3120(self):
self.assertEqual(
"Питон".encode("utf-8"),
b'\xd0\x9f\xd0\xb8\xd1\x82\xd0\xbe\xd0\xbd'
)
self.assertEqual(
"\П".encode("utf-8"),
b'\\\xd0\x9f'
)
def test_badsyntax(self):
try:
import test.badsyntax_pep3120
except SyntaxError as msg:
msg = str(msg)
self.assertTrue('UTF-8' in msg or 'utf8' in msg)
else:
self.fail("expected exception didn't occur")
class BuiltinCompileTests(unittest.TestCase):
# Issue 3574.
def test_latin1(self):
# Allow compile() to read Latin-1 source.
source_code = '# coding: Latin-1\nu = "Ç"\n'.encode("Latin-1")
try:
code = compile(source_code, '<dummy>', 'exec')
except SyntaxError:
self.fail("compile() cannot handle Latin-1 source")
ns = {}
exec(code, ns)
self.assertEqual('Ç', ns['u'])
def test_main():
support.run_unittest(PEP3120Test, BuiltinCompileTests)
if __name__=="__main__":
test_main()
| Make the exception message check for malformed UTF-8 source looser so that SyntaxError triggered from UnicodeDecodeError is also acceptable. | Make the exception message check for malformed UTF-8 source looser so that SyntaxError triggered from UnicodeDecodeError is also acceptable.
| Python | mit | sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator | # This file is marked as binary in the CVS, to prevent MacCVS from recoding it.
import unittest
from test import support
class PEP3120Test(unittest.TestCase):
def test_pep3120(self):
self.assertEqual(
"Питон".encode("utf-8"),
b'\xd0\x9f\xd0\xb8\xd1\x82\xd0\xbe\xd0\xbd'
)
self.assertEqual(
"\П".encode("utf-8"),
b'\\\xd0\x9f'
)
def test_badsyntax(self):
try:
import test.badsyntax_pep3120
except SyntaxError as msg:
self.assertTrue(str(msg).find("Non-UTF-8 code starting with") >= 0)
else:
self.fail("expected exception didn't occur")
class BuiltinCompileTests(unittest.TestCase):
# Issue 3574.
def test_latin1(self):
# Allow compile() to read Latin-1 source.
source_code = '# coding: Latin-1\nu = "Ç"\n'.encode("Latin-1")
try:
code = compile(source_code, '<dummy>', 'exec')
except SyntaxError:
self.fail("compile() cannot handle Latin-1 source")
ns = {}
exec(code, ns)
self.assertEqual('Ç', ns['u'])
def test_main():
support.run_unittest(PEP3120Test, BuiltinCompileTests)
if __name__=="__main__":
test_main()
Make the exception message check for malformed UTF-8 source looser so that SyntaxError triggered from UnicodeDecodeError is also acceptable. | # This file is marked as binary in the CVS, to prevent MacCVS from recoding it.
import unittest
from test import support
class PEP3120Test(unittest.TestCase):
def test_pep3120(self):
self.assertEqual(
"Питон".encode("utf-8"),
b'\xd0\x9f\xd0\xb8\xd1\x82\xd0\xbe\xd0\xbd'
)
self.assertEqual(
"\П".encode("utf-8"),
b'\\\xd0\x9f'
)
def test_badsyntax(self):
try:
import test.badsyntax_pep3120
except SyntaxError as msg:
msg = str(msg)
self.assertTrue('UTF-8' in msg or 'utf8' in msg)
else:
self.fail("expected exception didn't occur")
class BuiltinCompileTests(unittest.TestCase):
# Issue 3574.
def test_latin1(self):
# Allow compile() to read Latin-1 source.
source_code = '# coding: Latin-1\nu = "Ç"\n'.encode("Latin-1")
try:
code = compile(source_code, '<dummy>', 'exec')
except SyntaxError:
self.fail("compile() cannot handle Latin-1 source")
ns = {}
exec(code, ns)
self.assertEqual('Ç', ns['u'])
def test_main():
support.run_unittest(PEP3120Test, BuiltinCompileTests)
if __name__=="__main__":
test_main()
| <commit_before># This file is marked as binary in the CVS, to prevent MacCVS from recoding it.
import unittest
from test import support
class PEP3120Test(unittest.TestCase):
def test_pep3120(self):
self.assertEqual(
"Питон".encode("utf-8"),
b'\xd0\x9f\xd0\xb8\xd1\x82\xd0\xbe\xd0\xbd'
)
self.assertEqual(
"\П".encode("utf-8"),
b'\\\xd0\x9f'
)
def test_badsyntax(self):
try:
import test.badsyntax_pep3120
except SyntaxError as msg:
self.assertTrue(str(msg).find("Non-UTF-8 code starting with") >= 0)
else:
self.fail("expected exception didn't occur")
class BuiltinCompileTests(unittest.TestCase):
# Issue 3574.
def test_latin1(self):
# Allow compile() to read Latin-1 source.
source_code = '# coding: Latin-1\nu = "Ç"\n'.encode("Latin-1")
try:
code = compile(source_code, '<dummy>', 'exec')
except SyntaxError:
self.fail("compile() cannot handle Latin-1 source")
ns = {}
exec(code, ns)
self.assertEqual('Ç', ns['u'])
def test_main():
support.run_unittest(PEP3120Test, BuiltinCompileTests)
if __name__=="__main__":
test_main()
<commit_msg>Make the exception message check for malformed UTF-8 source looser so that SyntaxError triggered from UnicodeDecodeError is also acceptable.<commit_after> | # This file is marked as binary in the CVS, to prevent MacCVS from recoding it.
import unittest
from test import support
class PEP3120Test(unittest.TestCase):
def test_pep3120(self):
self.assertEqual(
"Питон".encode("utf-8"),
b'\xd0\x9f\xd0\xb8\xd1\x82\xd0\xbe\xd0\xbd'
)
self.assertEqual(
"\П".encode("utf-8"),
b'\\\xd0\x9f'
)
def test_badsyntax(self):
try:
import test.badsyntax_pep3120
except SyntaxError as msg:
msg = str(msg)
self.assertTrue('UTF-8' in msg or 'utf8' in msg)
else:
self.fail("expected exception didn't occur")
class BuiltinCompileTests(unittest.TestCase):
# Issue 3574.
def test_latin1(self):
# Allow compile() to read Latin-1 source.
source_code = '# coding: Latin-1\nu = "Ç"\n'.encode("Latin-1")
try:
code = compile(source_code, '<dummy>', 'exec')
except SyntaxError:
self.fail("compile() cannot handle Latin-1 source")
ns = {}
exec(code, ns)
self.assertEqual('Ç', ns['u'])
def test_main():
support.run_unittest(PEP3120Test, BuiltinCompileTests)
if __name__=="__main__":
test_main()
| # This file is marked as binary in the CVS, to prevent MacCVS from recoding it.
import unittest
from test import support
class PEP3120Test(unittest.TestCase):
def test_pep3120(self):
self.assertEqual(
"Питон".encode("utf-8"),
b'\xd0\x9f\xd0\xb8\xd1\x82\xd0\xbe\xd0\xbd'
)
self.assertEqual(
"\П".encode("utf-8"),
b'\\\xd0\x9f'
)
def test_badsyntax(self):
try:
import test.badsyntax_pep3120
except SyntaxError as msg:
self.assertTrue(str(msg).find("Non-UTF-8 code starting with") >= 0)
else:
self.fail("expected exception didn't occur")
class BuiltinCompileTests(unittest.TestCase):
# Issue 3574.
def test_latin1(self):
# Allow compile() to read Latin-1 source.
source_code = '# coding: Latin-1\nu = "Ç"\n'.encode("Latin-1")
try:
code = compile(source_code, '<dummy>', 'exec')
except SyntaxError:
self.fail("compile() cannot handle Latin-1 source")
ns = {}
exec(code, ns)
self.assertEqual('Ç', ns['u'])
def test_main():
support.run_unittest(PEP3120Test, BuiltinCompileTests)
if __name__=="__main__":
test_main()
Make the exception message check for malformed UTF-8 source looser so that SyntaxError triggered from UnicodeDecodeError is also acceptable.# This file is marked as binary in the CVS, to prevent MacCVS from recoding it.
import unittest
from test import support
class PEP3120Test(unittest.TestCase):
def test_pep3120(self):
self.assertEqual(
"Питон".encode("utf-8"),
b'\xd0\x9f\xd0\xb8\xd1\x82\xd0\xbe\xd0\xbd'
)
self.assertEqual(
"\П".encode("utf-8"),
b'\\\xd0\x9f'
)
def test_badsyntax(self):
try:
import test.badsyntax_pep3120
except SyntaxError as msg:
msg = str(msg)
self.assertTrue('UTF-8' in msg or 'utf8' in msg)
else:
self.fail("expected exception didn't occur")
class BuiltinCompileTests(unittest.TestCase):
# Issue 3574.
def test_latin1(self):
# Allow compile() to read Latin-1 source.
source_code = '# coding: Latin-1\nu = "Ç"\n'.encode("Latin-1")
try:
code = compile(source_code, '<dummy>', 'exec')
except SyntaxError:
self.fail("compile() cannot handle Latin-1 source")
ns = {}
exec(code, ns)
self.assertEqual('Ç', ns['u'])
def test_main():
support.run_unittest(PEP3120Test, BuiltinCompileTests)
if __name__=="__main__":
test_main()
| <commit_before># This file is marked as binary in the CVS, to prevent MacCVS from recoding it.
import unittest
from test import support
class PEP3120Test(unittest.TestCase):
def test_pep3120(self):
self.assertEqual(
"Питон".encode("utf-8"),
b'\xd0\x9f\xd0\xb8\xd1\x82\xd0\xbe\xd0\xbd'
)
self.assertEqual(
"\П".encode("utf-8"),
b'\\\xd0\x9f'
)
def test_badsyntax(self):
try:
import test.badsyntax_pep3120
except SyntaxError as msg:
self.assertTrue(str(msg).find("Non-UTF-8 code starting with") >= 0)
else:
self.fail("expected exception didn't occur")
class BuiltinCompileTests(unittest.TestCase):
# Issue 3574.
def test_latin1(self):
# Allow compile() to read Latin-1 source.
source_code = '# coding: Latin-1\nu = "Ç"\n'.encode("Latin-1")
try:
code = compile(source_code, '<dummy>', 'exec')
except SyntaxError:
self.fail("compile() cannot handle Latin-1 source")
ns = {}
exec(code, ns)
self.assertEqual('Ç', ns['u'])
def test_main():
support.run_unittest(PEP3120Test, BuiltinCompileTests)
if __name__=="__main__":
test_main()
<commit_msg>Make the exception message check for malformed UTF-8 source looser so that SyntaxError triggered from UnicodeDecodeError is also acceptable.<commit_after># This file is marked as binary in the CVS, to prevent MacCVS from recoding it.
import unittest
from test import support
class PEP3120Test(unittest.TestCase):
def test_pep3120(self):
self.assertEqual(
"Питон".encode("utf-8"),
b'\xd0\x9f\xd0\xb8\xd1\x82\xd0\xbe\xd0\xbd'
)
self.assertEqual(
"\П".encode("utf-8"),
b'\\\xd0\x9f'
)
def test_badsyntax(self):
try:
import test.badsyntax_pep3120
except SyntaxError as msg:
msg = str(msg)
self.assertTrue('UTF-8' in msg or 'utf8' in msg)
else:
self.fail("expected exception didn't occur")
class BuiltinCompileTests(unittest.TestCase):
# Issue 3574.
def test_latin1(self):
# Allow compile() to read Latin-1 source.
source_code = '# coding: Latin-1\nu = "Ç"\n'.encode("Latin-1")
try:
code = compile(source_code, '<dummy>', 'exec')
except SyntaxError:
self.fail("compile() cannot handle Latin-1 source")
ns = {}
exec(code, ns)
self.assertEqual('Ç', ns['u'])
def test_main():
support.run_unittest(PEP3120Test, BuiltinCompileTests)
if __name__=="__main__":
test_main()
|
6763ebff720c8aa6acbfddccd4a8d3f7aeba2cee | roman.py | roman.py | #!/usr/bin/env python
def parser(roman):
'''
This function receives a Roman Numeral String and convert it to an
Arabic Number.
parameters:
---------------------------------
roman: Roman Numearl string input'''
roman_dic = {'M': 1000, 'C': 100, 'L': 50, 'D': 500,
'X': 10, 'V': 5, 'I': 1}
if roman:
if not all(numeral in roman_dic for numeral in roman):
print 'Illegal letter(s).'
else:
num_ls = [roman_dic.get(numeral, None) for numeral in roman]
prev = num_ls[0]
total = 0
for val in num_ls:
if val <= prev:
total += val
prev = val
else:
total = total + val - (2 * prev)
prev = val
return total
else:
print 'Empty String!'
def main():
roman = raw_input('Input the Roman Numeral:\n')
print '{roman} is {arabic}.'.format(roman=roman,
arabic=parser(roman))
if __name__ == '__main__':
main()
| #!/usr/bin/env python
def Parser(roman):
'''
This function receives a Roman Numeral String and convert it to an
Arabic Number.
parameters:
---------------------------------
roman: Roman Numearl string input'''
roman_dic = {'M': 1000, 'C': 100, 'L': 50, 'D': 500,
'X': 10, 'V': 5, 'I': 1}
if roman:
if not all(numeral in roman_dic for numeral in roman):
print 'Illegal letter(s).'
else:
num_ls = [roman_dic.get(numeral, None) for numeral in roman]
prev = num_ls[0]
total = 0
for val in num_ls:
if val <= prev:
total += val
prev = val
else:
total = total + val - (2 * prev)
prev = val
return total
else:
print 'Empty String!'
def main():
roman = raw_input('Input the Roman Numeral:\n')
print '{roman} is {arabic}.'.format(roman=roman,
arabic=Parser(roman))
if __name__ == '__main__':
main()
| Change function name to Camel | Change function name to Camel
| Python | cc0-1.0 | Elixeus/Snippets,Elixeus/Snippets | #!/usr/bin/env python
def parser(roman):
'''
This function receives a Roman Numeral String and convert it to an
Arabic Number.
parameters:
---------------------------------
roman: Roman Numearl string input'''
roman_dic = {'M': 1000, 'C': 100, 'L': 50, 'D': 500,
'X': 10, 'V': 5, 'I': 1}
if roman:
if not all(numeral in roman_dic for numeral in roman):
print 'Illegal letter(s).'
else:
num_ls = [roman_dic.get(numeral, None) for numeral in roman]
prev = num_ls[0]
total = 0
for val in num_ls:
if val <= prev:
total += val
prev = val
else:
total = total + val - (2 * prev)
prev = val
return total
else:
print 'Empty String!'
def main():
roman = raw_input('Input the Roman Numeral:\n')
print '{roman} is {arabic}.'.format(roman=roman,
arabic=parser(roman))
if __name__ == '__main__':
main()
Change function name to Camel | #!/usr/bin/env python
def Parser(roman):
'''
This function receives a Roman Numeral String and convert it to an
Arabic Number.
parameters:
---------------------------------
roman: Roman Numearl string input'''
roman_dic = {'M': 1000, 'C': 100, 'L': 50, 'D': 500,
'X': 10, 'V': 5, 'I': 1}
if roman:
if not all(numeral in roman_dic for numeral in roman):
print 'Illegal letter(s).'
else:
num_ls = [roman_dic.get(numeral, None) for numeral in roman]
prev = num_ls[0]
total = 0
for val in num_ls:
if val <= prev:
total += val
prev = val
else:
total = total + val - (2 * prev)
prev = val
return total
else:
print 'Empty String!'
def main():
roman = raw_input('Input the Roman Numeral:\n')
print '{roman} is {arabic}.'.format(roman=roman,
arabic=Parser(roman))
if __name__ == '__main__':
main()
| <commit_before>#!/usr/bin/env python
def parser(roman):
'''
This function receives a Roman Numeral String and convert it to an
Arabic Number.
parameters:
---------------------------------
roman: Roman Numearl string input'''
roman_dic = {'M': 1000, 'C': 100, 'L': 50, 'D': 500,
'X': 10, 'V': 5, 'I': 1}
if roman:
if not all(numeral in roman_dic for numeral in roman):
print 'Illegal letter(s).'
else:
num_ls = [roman_dic.get(numeral, None) for numeral in roman]
prev = num_ls[0]
total = 0
for val in num_ls:
if val <= prev:
total += val
prev = val
else:
total = total + val - (2 * prev)
prev = val
return total
else:
print 'Empty String!'
def main():
roman = raw_input('Input the Roman Numeral:\n')
print '{roman} is {arabic}.'.format(roman=roman,
arabic=parser(roman))
if __name__ == '__main__':
main()
<commit_msg>Change function name to Camel<commit_after> | #!/usr/bin/env python
def Parser(roman):
'''
This function receives a Roman Numeral String and convert it to an
Arabic Number.
parameters:
---------------------------------
roman: Roman Numearl string input'''
roman_dic = {'M': 1000, 'C': 100, 'L': 50, 'D': 500,
'X': 10, 'V': 5, 'I': 1}
if roman:
if not all(numeral in roman_dic for numeral in roman):
print 'Illegal letter(s).'
else:
num_ls = [roman_dic.get(numeral, None) for numeral in roman]
prev = num_ls[0]
total = 0
for val in num_ls:
if val <= prev:
total += val
prev = val
else:
total = total + val - (2 * prev)
prev = val
return total
else:
print 'Empty String!'
def main():
roman = raw_input('Input the Roman Numeral:\n')
print '{roman} is {arabic}.'.format(roman=roman,
arabic=Parser(roman))
if __name__ == '__main__':
main()
| #!/usr/bin/env python
def parser(roman):
'''
This function receives a Roman Numeral String and convert it to an
Arabic Number.
parameters:
---------------------------------
roman: Roman Numearl string input'''
roman_dic = {'M': 1000, 'C': 100, 'L': 50, 'D': 500,
'X': 10, 'V': 5, 'I': 1}
if roman:
if not all(numeral in roman_dic for numeral in roman):
print 'Illegal letter(s).'
else:
num_ls = [roman_dic.get(numeral, None) for numeral in roman]
prev = num_ls[0]
total = 0
for val in num_ls:
if val <= prev:
total += val
prev = val
else:
total = total + val - (2 * prev)
prev = val
return total
else:
print 'Empty String!'
def main():
roman = raw_input('Input the Roman Numeral:\n')
print '{roman} is {arabic}.'.format(roman=roman,
arabic=parser(roman))
if __name__ == '__main__':
main()
Change function name to Camel#!/usr/bin/env python
def Parser(roman):
'''
This function receives a Roman Numeral String and convert it to an
Arabic Number.
parameters:
---------------------------------
roman: Roman Numearl string input'''
roman_dic = {'M': 1000, 'C': 100, 'L': 50, 'D': 500,
'X': 10, 'V': 5, 'I': 1}
if roman:
if not all(numeral in roman_dic for numeral in roman):
print 'Illegal letter(s).'
else:
num_ls = [roman_dic.get(numeral, None) for numeral in roman]
prev = num_ls[0]
total = 0
for val in num_ls:
if val <= prev:
total += val
prev = val
else:
total = total + val - (2 * prev)
prev = val
return total
else:
print 'Empty String!'
def main():
roman = raw_input('Input the Roman Numeral:\n')
print '{roman} is {arabic}.'.format(roman=roman,
arabic=Parser(roman))
if __name__ == '__main__':
main()
| <commit_before>#!/usr/bin/env python
def parser(roman):
'''
This function receives a Roman Numeral String and convert it to an
Arabic Number.
parameters:
---------------------------------
roman: Roman Numearl string input'''
roman_dic = {'M': 1000, 'C': 100, 'L': 50, 'D': 500,
'X': 10, 'V': 5, 'I': 1}
if roman:
if not all(numeral in roman_dic for numeral in roman):
print 'Illegal letter(s).'
else:
num_ls = [roman_dic.get(numeral, None) for numeral in roman]
prev = num_ls[0]
total = 0
for val in num_ls:
if val <= prev:
total += val
prev = val
else:
total = total + val - (2 * prev)
prev = val
return total
else:
print 'Empty String!'
def main():
roman = raw_input('Input the Roman Numeral:\n')
print '{roman} is {arabic}.'.format(roman=roman,
arabic=parser(roman))
if __name__ == '__main__':
main()
<commit_msg>Change function name to Camel<commit_after>#!/usr/bin/env python
def Parser(roman):
'''
This function receives a Roman Numeral String and convert it to an
Arabic Number.
parameters:
---------------------------------
roman: Roman Numearl string input'''
roman_dic = {'M': 1000, 'C': 100, 'L': 50, 'D': 500,
'X': 10, 'V': 5, 'I': 1}
if roman:
if not all(numeral in roman_dic for numeral in roman):
print 'Illegal letter(s).'
else:
num_ls = [roman_dic.get(numeral, None) for numeral in roman]
prev = num_ls[0]
total = 0
for val in num_ls:
if val <= prev:
total += val
prev = val
else:
total = total + val - (2 * prev)
prev = val
return total
else:
print 'Empty String!'
def main():
roman = raw_input('Input the Roman Numeral:\n')
print '{roman} is {arabic}.'.format(roman=roman,
arabic=Parser(roman))
if __name__ == '__main__':
main()
|
e3f6be6f2ce00e335ebc4d17ff6b89f230dc34fa | rotor.py | rotor.py | """
Rotor model using mbwind
"""
from numpy import pi, dot
from mbwind import rotations, RigidConnection, RigidBody
from mbwind.elements.modal import ModalElementFromFE
class Rotor(object):
def __init__(self, num_blades, root_length, blade_fe):
self.num_blades = num_blades
self.root_length = root_length
self.blade_fe = blade_fe
# Build the elements
self.roots = []
self.blades = []
for ib in range(num_blades):
R = rotations(('y', -pi/2), ('x', ib*2*pi/3))
root_offset = dot(R, [root_length, 0, 0])
root = RigidConnection('root%d' % (ib+1), root_offset, R)
blade = ModalElementFromFE('blade%d' % (ib+1), blade_fe)
root.add_leaf(blade)
self.roots.append(root)
self.blades.append(blade)
@property
def mass(self):
return self.num_blades * self.blade_fe.fe.mass
def connect_to(self, parent):
for root in self.roots:
parent.add_leaf(root)
| """
Rotor model using mbwind
"""
from numpy import pi, dot
from mbwind import rotations, RigidConnection, RigidBody, Hinge
from mbwind.elements.modal import ModalElementFromFE
class Rotor(object):
def __init__(self, num_blades, root_length, blade_fe, pitch=False):
self.num_blades = num_blades
self.root_length = root_length
self.blade_fe = blade_fe
# Build the elements
self.roots = []
self.blades = []
for ib in range(num_blades):
R = rotations(('y', -pi/2), ('x', ib*2*pi/3))
root_offset = dot(R, [root_length, 0, 0])
root = RigidConnection('root%d' % (ib+1), root_offset, R)
blade = ModalElementFromFE('blade%d' % (ib+1), blade_fe)
self.roots.append(root)
self.blades.append(blade)
if pitch:
# Add bearing about blade X axis
bearing = Hinge('pitch%d' % (ib+1), [1, 0, 0])
root.add_leaf(bearing)
bearing.add_leaf(blade)
else:
root.add_leaf(blade)
@property
def mass(self):
return self.num_blades * self.blade_fe.fe.mass
def connect_to(self, parent):
for root in self.roots:
parent.add_leaf(root)
| Test step change in pitch in aeroelastic simulation | Test step change in pitch in aeroelastic simulation
| Python | mit | ricklupton/py-bem | """
Rotor model using mbwind
"""
from numpy import pi, dot
from mbwind import rotations, RigidConnection, RigidBody
from mbwind.elements.modal import ModalElementFromFE
class Rotor(object):
def __init__(self, num_blades, root_length, blade_fe):
self.num_blades = num_blades
self.root_length = root_length
self.blade_fe = blade_fe
# Build the elements
self.roots = []
self.blades = []
for ib in range(num_blades):
R = rotations(('y', -pi/2), ('x', ib*2*pi/3))
root_offset = dot(R, [root_length, 0, 0])
root = RigidConnection('root%d' % (ib+1), root_offset, R)
blade = ModalElementFromFE('blade%d' % (ib+1), blade_fe)
root.add_leaf(blade)
self.roots.append(root)
self.blades.append(blade)
@property
def mass(self):
return self.num_blades * self.blade_fe.fe.mass
def connect_to(self, parent):
for root in self.roots:
parent.add_leaf(root)
Test step change in pitch in aeroelastic simulation | """
Rotor model using mbwind
"""
from numpy import pi, dot
from mbwind import rotations, RigidConnection, RigidBody, Hinge
from mbwind.elements.modal import ModalElementFromFE
class Rotor(object):
def __init__(self, num_blades, root_length, blade_fe, pitch=False):
self.num_blades = num_blades
self.root_length = root_length
self.blade_fe = blade_fe
# Build the elements
self.roots = []
self.blades = []
for ib in range(num_blades):
R = rotations(('y', -pi/2), ('x', ib*2*pi/3))
root_offset = dot(R, [root_length, 0, 0])
root = RigidConnection('root%d' % (ib+1), root_offset, R)
blade = ModalElementFromFE('blade%d' % (ib+1), blade_fe)
self.roots.append(root)
self.blades.append(blade)
if pitch:
# Add bearing about blade X axis
bearing = Hinge('pitch%d' % (ib+1), [1, 0, 0])
root.add_leaf(bearing)
bearing.add_leaf(blade)
else:
root.add_leaf(blade)
@property
def mass(self):
return self.num_blades * self.blade_fe.fe.mass
def connect_to(self, parent):
for root in self.roots:
parent.add_leaf(root)
| <commit_before>"""
Rotor model using mbwind
"""
from numpy import pi, dot
from mbwind import rotations, RigidConnection, RigidBody
from mbwind.elements.modal import ModalElementFromFE
class Rotor(object):
def __init__(self, num_blades, root_length, blade_fe):
self.num_blades = num_blades
self.root_length = root_length
self.blade_fe = blade_fe
# Build the elements
self.roots = []
self.blades = []
for ib in range(num_blades):
R = rotations(('y', -pi/2), ('x', ib*2*pi/3))
root_offset = dot(R, [root_length, 0, 0])
root = RigidConnection('root%d' % (ib+1), root_offset, R)
blade = ModalElementFromFE('blade%d' % (ib+1), blade_fe)
root.add_leaf(blade)
self.roots.append(root)
self.blades.append(blade)
@property
def mass(self):
return self.num_blades * self.blade_fe.fe.mass
def connect_to(self, parent):
for root in self.roots:
parent.add_leaf(root)
<commit_msg>Test step change in pitch in aeroelastic simulation<commit_after> | """
Rotor model using mbwind
"""
from numpy import pi, dot
from mbwind import rotations, RigidConnection, RigidBody, Hinge
from mbwind.elements.modal import ModalElementFromFE
class Rotor(object):
def __init__(self, num_blades, root_length, blade_fe, pitch=False):
self.num_blades = num_blades
self.root_length = root_length
self.blade_fe = blade_fe
# Build the elements
self.roots = []
self.blades = []
for ib in range(num_blades):
R = rotations(('y', -pi/2), ('x', ib*2*pi/3))
root_offset = dot(R, [root_length, 0, 0])
root = RigidConnection('root%d' % (ib+1), root_offset, R)
blade = ModalElementFromFE('blade%d' % (ib+1), blade_fe)
self.roots.append(root)
self.blades.append(blade)
if pitch:
# Add bearing about blade X axis
bearing = Hinge('pitch%d' % (ib+1), [1, 0, 0])
root.add_leaf(bearing)
bearing.add_leaf(blade)
else:
root.add_leaf(blade)
@property
def mass(self):
return self.num_blades * self.blade_fe.fe.mass
def connect_to(self, parent):
for root in self.roots:
parent.add_leaf(root)
| """
Rotor model using mbwind
"""
from numpy import pi, dot
from mbwind import rotations, RigidConnection, RigidBody
from mbwind.elements.modal import ModalElementFromFE
class Rotor(object):
def __init__(self, num_blades, root_length, blade_fe):
self.num_blades = num_blades
self.root_length = root_length
self.blade_fe = blade_fe
# Build the elements
self.roots = []
self.blades = []
for ib in range(num_blades):
R = rotations(('y', -pi/2), ('x', ib*2*pi/3))
root_offset = dot(R, [root_length, 0, 0])
root = RigidConnection('root%d' % (ib+1), root_offset, R)
blade = ModalElementFromFE('blade%d' % (ib+1), blade_fe)
root.add_leaf(blade)
self.roots.append(root)
self.blades.append(blade)
@property
def mass(self):
return self.num_blades * self.blade_fe.fe.mass
def connect_to(self, parent):
for root in self.roots:
parent.add_leaf(root)
Test step change in pitch in aeroelastic simulation"""
Rotor model using mbwind
"""
from numpy import pi, dot
from mbwind import rotations, RigidConnection, RigidBody, Hinge
from mbwind.elements.modal import ModalElementFromFE
class Rotor(object):
def __init__(self, num_blades, root_length, blade_fe, pitch=False):
self.num_blades = num_blades
self.root_length = root_length
self.blade_fe = blade_fe
# Build the elements
self.roots = []
self.blades = []
for ib in range(num_blades):
R = rotations(('y', -pi/2), ('x', ib*2*pi/3))
root_offset = dot(R, [root_length, 0, 0])
root = RigidConnection('root%d' % (ib+1), root_offset, R)
blade = ModalElementFromFE('blade%d' % (ib+1), blade_fe)
self.roots.append(root)
self.blades.append(blade)
if pitch:
# Add bearing about blade X axis
bearing = Hinge('pitch%d' % (ib+1), [1, 0, 0])
root.add_leaf(bearing)
bearing.add_leaf(blade)
else:
root.add_leaf(blade)
@property
def mass(self):
return self.num_blades * self.blade_fe.fe.mass
def connect_to(self, parent):
for root in self.roots:
parent.add_leaf(root)
| <commit_before>"""
Rotor model using mbwind
"""
from numpy import pi, dot
from mbwind import rotations, RigidConnection, RigidBody
from mbwind.elements.modal import ModalElementFromFE
class Rotor(object):
def __init__(self, num_blades, root_length, blade_fe):
self.num_blades = num_blades
self.root_length = root_length
self.blade_fe = blade_fe
# Build the elements
self.roots = []
self.blades = []
for ib in range(num_blades):
R = rotations(('y', -pi/2), ('x', ib*2*pi/3))
root_offset = dot(R, [root_length, 0, 0])
root = RigidConnection('root%d' % (ib+1), root_offset, R)
blade = ModalElementFromFE('blade%d' % (ib+1), blade_fe)
root.add_leaf(blade)
self.roots.append(root)
self.blades.append(blade)
@property
def mass(self):
return self.num_blades * self.blade_fe.fe.mass
def connect_to(self, parent):
for root in self.roots:
parent.add_leaf(root)
<commit_msg>Test step change in pitch in aeroelastic simulation<commit_after>"""
Rotor model using mbwind
"""
from numpy import pi, dot
from mbwind import rotations, RigidConnection, RigidBody, Hinge
from mbwind.elements.modal import ModalElementFromFE
class Rotor(object):
def __init__(self, num_blades, root_length, blade_fe, pitch=False):
self.num_blades = num_blades
self.root_length = root_length
self.blade_fe = blade_fe
# Build the elements
self.roots = []
self.blades = []
for ib in range(num_blades):
R = rotations(('y', -pi/2), ('x', ib*2*pi/3))
root_offset = dot(R, [root_length, 0, 0])
root = RigidConnection('root%d' % (ib+1), root_offset, R)
blade = ModalElementFromFE('blade%d' % (ib+1), blade_fe)
self.roots.append(root)
self.blades.append(blade)
if pitch:
# Add bearing about blade X axis
bearing = Hinge('pitch%d' % (ib+1), [1, 0, 0])
root.add_leaf(bearing)
bearing.add_leaf(blade)
else:
root.add_leaf(blade)
@property
def mass(self):
return self.num_blades * self.blade_fe.fe.mass
def connect_to(self, parent):
for root in self.roots:
parent.add_leaf(root)
|
9ddbc2c319993a1084317f9af8796f25211c6d33 | sample-client.py | sample-client.py | import zlib
import zmq
import simplejson
import sys
def main():
context = zmq.Context()
subscriber = context.socket(zmq.SUB)
subscriber.connect('tcp://localhost:9500')
subscriber.setsockopt(zmq.SUBSCRIBE, "")
while True:
market_json = zlib.decompress(subscriber.recv())
market_data = simplejson.loads(market_json)
print market_data
sys.stdout.flush()
if __name__ == '__main__':
main()
| import zlib
import zmq
import simplejson
import sys
def main():
context = zmq.Context()
subscriber = context.socket(zmq.SUB)
subscriber.connect('tcp://firehose.elite-market-data.net:9500')
subscriber.setsockopt(zmq.SUBSCRIBE, "")
while True:
market_json = zlib.decompress(subscriber.recv())
market_data = simplejson.loads(market_json)
print market_data
sys.stdout.flush()
if __name__ == '__main__':
main()
| Use official URL for sample client. | Use official URL for sample client.
| Python | bsd-2-clause | andreas23/emdn | import zlib
import zmq
import simplejson
import sys
def main():
context = zmq.Context()
subscriber = context.socket(zmq.SUB)
subscriber.connect('tcp://localhost:9500')
subscriber.setsockopt(zmq.SUBSCRIBE, "")
while True:
market_json = zlib.decompress(subscriber.recv())
market_data = simplejson.loads(market_json)
print market_data
sys.stdout.flush()
if __name__ == '__main__':
main()
Use official URL for sample client. | import zlib
import zmq
import simplejson
import sys
def main():
context = zmq.Context()
subscriber = context.socket(zmq.SUB)
subscriber.connect('tcp://firehose.elite-market-data.net:9500')
subscriber.setsockopt(zmq.SUBSCRIBE, "")
while True:
market_json = zlib.decompress(subscriber.recv())
market_data = simplejson.loads(market_json)
print market_data
sys.stdout.flush()
if __name__ == '__main__':
main()
| <commit_before>import zlib
import zmq
import simplejson
import sys
def main():
context = zmq.Context()
subscriber = context.socket(zmq.SUB)
subscriber.connect('tcp://localhost:9500')
subscriber.setsockopt(zmq.SUBSCRIBE, "")
while True:
market_json = zlib.decompress(subscriber.recv())
market_data = simplejson.loads(market_json)
print market_data
sys.stdout.flush()
if __name__ == '__main__':
main()
<commit_msg>Use official URL for sample client.<commit_after> | import zlib
import zmq
import simplejson
import sys
def main():
context = zmq.Context()
subscriber = context.socket(zmq.SUB)
subscriber.connect('tcp://firehose.elite-market-data.net:9500')
subscriber.setsockopt(zmq.SUBSCRIBE, "")
while True:
market_json = zlib.decompress(subscriber.recv())
market_data = simplejson.loads(market_json)
print market_data
sys.stdout.flush()
if __name__ == '__main__':
main()
| import zlib
import zmq
import simplejson
import sys
def main():
context = zmq.Context()
subscriber = context.socket(zmq.SUB)
subscriber.connect('tcp://localhost:9500')
subscriber.setsockopt(zmq.SUBSCRIBE, "")
while True:
market_json = zlib.decompress(subscriber.recv())
market_data = simplejson.loads(market_json)
print market_data
sys.stdout.flush()
if __name__ == '__main__':
main()
Use official URL for sample client.import zlib
import zmq
import simplejson
import sys
def main():
context = zmq.Context()
subscriber = context.socket(zmq.SUB)
subscriber.connect('tcp://firehose.elite-market-data.net:9500')
subscriber.setsockopt(zmq.SUBSCRIBE, "")
while True:
market_json = zlib.decompress(subscriber.recv())
market_data = simplejson.loads(market_json)
print market_data
sys.stdout.flush()
if __name__ == '__main__':
main()
| <commit_before>import zlib
import zmq
import simplejson
import sys
def main():
context = zmq.Context()
subscriber = context.socket(zmq.SUB)
subscriber.connect('tcp://localhost:9500')
subscriber.setsockopt(zmq.SUBSCRIBE, "")
while True:
market_json = zlib.decompress(subscriber.recv())
market_data = simplejson.loads(market_json)
print market_data
sys.stdout.flush()
if __name__ == '__main__':
main()
<commit_msg>Use official URL for sample client.<commit_after>import zlib
import zmq
import simplejson
import sys
def main():
context = zmq.Context()
subscriber = context.socket(zmq.SUB)
subscriber.connect('tcp://firehose.elite-market-data.net:9500')
subscriber.setsockopt(zmq.SUBSCRIBE, "")
while True:
market_json = zlib.decompress(subscriber.recv())
market_data = simplejson.loads(market_json)
print market_data
sys.stdout.flush()
if __name__ == '__main__':
main()
|
198f6b0e0a98d0f5ef34d1aec44d5e9704d2cae9 | urllib3/__init__.py | urllib3/__init__.py | # urllib3/__init__.py
# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
urllib3 - Thread-safe connection pooling and re-using.
"""
__author__ = "Andrey Petrov (andrey.petrov@shazow.net)"
__license__ = "MIT"
__version__ = "$Rev$"
from .connectionpool import (
HTTPConnectionPool,
HTTPSConnectionPool,
connection_from_url,
get_host,
make_headers)
from .exceptions import (
HTTPError,
MaxRetryError,
SSLError,
TimeoutError)
from .poolmanager import PoolManager
from .response import HTTPResponse
from .filepost import encode_multipart_formdata
| # urllib3/__init__.py
# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
urllib3 - Thread-safe connection pooling and re-using.
"""
__author__ = "Andrey Petrov (andrey.petrov@shazow.net)"
__license__ = "MIT"
__version__ = "$Rev$"
from .connectionpool import (
HTTPConnectionPool,
HTTPSConnectionPool,
connection_from_url,
get_host,
make_headers)
from .exceptions import (
HTTPError,
MaxRetryError,
SSLError,
TimeoutError)
from .poolmanager import PoolManager
from .response import HTTPResponse
from .filepost import encode_multipart_formdata
import logging
logging.getLogger(__name__).setLevel(logging.ERROR)
del logging
| Set default logging level to ERROR, to avoid excessive "No handlers could be found" messages. | Set default logging level to ERROR, to avoid excessive "No handlers could be found" messages.
| Python | mit | Lukasa/urllib3,Geoion/urllib3,msabramo/urllib3,mikelambert/urllib3,haikuginger/urllib3,tutumcloud/urllib3,haikuginger/urllib3,luca3m/urllib3,boyxuper/urllib3,Lukasa/urllib3,sornars/urllib3,Geoion/urllib3,mikelambert/urllib3,tutumcloud/urllib3,gardner/urllib3,msabramo/urllib3,matejcik/urllib3,asmeurer/urllib3,urllib3/urllib3,sigmavirus24/urllib3,matejcik/urllib3,Disassem/urllib3,urllib3/urllib3,silveringsea/urllib3,denim2x/urllib3,gardner/urllib3,luca3m/urllib3,sileht/urllib3,asmeurer/urllib3,silveringsea/urllib3,denim2x/urllib3,sigmavirus24/urllib3,Disassem/urllib3,t-8ch/urllib3,t-8ch/urllib3,sileht/urllib3,boyxuper/urllib3,sornars/urllib3 | # urllib3/__init__.py
# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
urllib3 - Thread-safe connection pooling and re-using.
"""
__author__ = "Andrey Petrov (andrey.petrov@shazow.net)"
__license__ = "MIT"
__version__ = "$Rev$"
from .connectionpool import (
HTTPConnectionPool,
HTTPSConnectionPool,
connection_from_url,
get_host,
make_headers)
from .exceptions import (
HTTPError,
MaxRetryError,
SSLError,
TimeoutError)
from .poolmanager import PoolManager
from .response import HTTPResponse
from .filepost import encode_multipart_formdata
Set default logging level to ERROR, to avoid excessive "No handlers could be found" messages. | # urllib3/__init__.py
# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
urllib3 - Thread-safe connection pooling and re-using.
"""
__author__ = "Andrey Petrov (andrey.petrov@shazow.net)"
__license__ = "MIT"
__version__ = "$Rev$"
from .connectionpool import (
HTTPConnectionPool,
HTTPSConnectionPool,
connection_from_url,
get_host,
make_headers)
from .exceptions import (
HTTPError,
MaxRetryError,
SSLError,
TimeoutError)
from .poolmanager import PoolManager
from .response import HTTPResponse
from .filepost import encode_multipart_formdata
import logging
logging.getLogger(__name__).setLevel(logging.ERROR)
del logging
| <commit_before># urllib3/__init__.py
# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
urllib3 - Thread-safe connection pooling and re-using.
"""
__author__ = "Andrey Petrov (andrey.petrov@shazow.net)"
__license__ = "MIT"
__version__ = "$Rev$"
from .connectionpool import (
HTTPConnectionPool,
HTTPSConnectionPool,
connection_from_url,
get_host,
make_headers)
from .exceptions import (
HTTPError,
MaxRetryError,
SSLError,
TimeoutError)
from .poolmanager import PoolManager
from .response import HTTPResponse
from .filepost import encode_multipart_formdata
<commit_msg>Set default logging level to ERROR, to avoid excessive "No handlers could be found" messages.<commit_after> | # urllib3/__init__.py
# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
urllib3 - Thread-safe connection pooling and re-using.
"""
__author__ = "Andrey Petrov (andrey.petrov@shazow.net)"
__license__ = "MIT"
__version__ = "$Rev$"
from .connectionpool import (
HTTPConnectionPool,
HTTPSConnectionPool,
connection_from_url,
get_host,
make_headers)
from .exceptions import (
HTTPError,
MaxRetryError,
SSLError,
TimeoutError)
from .poolmanager import PoolManager
from .response import HTTPResponse
from .filepost import encode_multipart_formdata
import logging
logging.getLogger(__name__).setLevel(logging.ERROR)
del logging
| # urllib3/__init__.py
# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
urllib3 - Thread-safe connection pooling and re-using.
"""
__author__ = "Andrey Petrov (andrey.petrov@shazow.net)"
__license__ = "MIT"
__version__ = "$Rev$"
from .connectionpool import (
HTTPConnectionPool,
HTTPSConnectionPool,
connection_from_url,
get_host,
make_headers)
from .exceptions import (
HTTPError,
MaxRetryError,
SSLError,
TimeoutError)
from .poolmanager import PoolManager
from .response import HTTPResponse
from .filepost import encode_multipart_formdata
Set default logging level to ERROR, to avoid excessive "No handlers could be found" messages.# urllib3/__init__.py
# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
urllib3 - Thread-safe connection pooling and re-using.
"""
__author__ = "Andrey Petrov (andrey.petrov@shazow.net)"
__license__ = "MIT"
__version__ = "$Rev$"
from .connectionpool import (
HTTPConnectionPool,
HTTPSConnectionPool,
connection_from_url,
get_host,
make_headers)
from .exceptions import (
HTTPError,
MaxRetryError,
SSLError,
TimeoutError)
from .poolmanager import PoolManager
from .response import HTTPResponse
from .filepost import encode_multipart_formdata
import logging
logging.getLogger(__name__).setLevel(logging.ERROR)
del logging
| <commit_before># urllib3/__init__.py
# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
urllib3 - Thread-safe connection pooling and re-using.
"""
__author__ = "Andrey Petrov (andrey.petrov@shazow.net)"
__license__ = "MIT"
__version__ = "$Rev$"
from .connectionpool import (
HTTPConnectionPool,
HTTPSConnectionPool,
connection_from_url,
get_host,
make_headers)
from .exceptions import (
HTTPError,
MaxRetryError,
SSLError,
TimeoutError)
from .poolmanager import PoolManager
from .response import HTTPResponse
from .filepost import encode_multipart_formdata
<commit_msg>Set default logging level to ERROR, to avoid excessive "No handlers could be found" messages.<commit_after># urllib3/__init__.py
# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
urllib3 - Thread-safe connection pooling and re-using.
"""
__author__ = "Andrey Petrov (andrey.petrov@shazow.net)"
__license__ = "MIT"
__version__ = "$Rev$"
from .connectionpool import (
HTTPConnectionPool,
HTTPSConnectionPool,
connection_from_url,
get_host,
make_headers)
from .exceptions import (
HTTPError,
MaxRetryError,
SSLError,
TimeoutError)
from .poolmanager import PoolManager
from .response import HTTPResponse
from .filepost import encode_multipart_formdata
import logging
logging.getLogger(__name__).setLevel(logging.ERROR)
del logging
|
fd297665f1cb95ba3e8e069a18a9e8af18b449c8 | socketio/sdjango.py | socketio/sdjango.py | import logging
from socketio import socketio_manage
from django.conf.urls import patterns, url, include
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
SOCKETIO_NS = {}
class namespace(object):
def __init__(self, name=''):
self.name = name
def __call__(self, handler):
SOCKETIO_NS[self.name] = handler
@csrf_exempt
def socketio(request):
try:
socketio_manage(request.environ, SOCKETIO_NS, request)
except:
logging.getLogger("socketio").error("Exception while handling socketio connection", exc_info=True)
return HttpResponse("")
urls = patterns("", (r'', socketio))
| import logging
from socketio import socketio_manage
from django.conf.urls import patterns, url, include
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
SOCKETIO_NS = {}
class namespace(object):
def __init__(self, name=''):
self.name = name
def __call__(self, handler):
SOCKETIO_NS[self.name] = handler
return handler
@csrf_exempt
def socketio(request):
try:
socketio_manage(request.environ, SOCKETIO_NS, request)
except:
logging.getLogger("socketio").error("Exception while handling socketio connection", exc_info=True)
return HttpResponse("")
urls = patterns("", (r'', socketio))
| Return namespace class after decorating. | Return namespace class after decorating.
| Python | bsd-3-clause | smurfix/gevent-socketio,abourget/gevent-socketio,kazmiruk/gevent-socketio,arnuschky/gevent-socketio,bobvandevijver/gevent-socketio,hzruandd/gevent-socketio,yacneyac/gevent-socketio,Eugeny/gevent-socketio,theskumar-archive/gevent-socketio,arnuschky/gevent-socketio,kazmiruk/gevent-socketio,theskumar-archive/gevent-socketio,bobvandevijver/gevent-socketio,gutomaia/gevent-socketio,smurfix/gevent-socketio,Eugeny/gevent-socketio,gutomaia/gevent-socketio,gutomaia/gevent-socketio,hzruandd/gevent-socketio,yacneyac/gevent-socketio,abourget/gevent-socketio,smurfix/gevent-socketio | import logging
from socketio import socketio_manage
from django.conf.urls import patterns, url, include
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
SOCKETIO_NS = {}
class namespace(object):
def __init__(self, name=''):
self.name = name
def __call__(self, handler):
SOCKETIO_NS[self.name] = handler
@csrf_exempt
def socketio(request):
try:
socketio_manage(request.environ, SOCKETIO_NS, request)
except:
logging.getLogger("socketio").error("Exception while handling socketio connection", exc_info=True)
return HttpResponse("")
urls = patterns("", (r'', socketio))
Return namespace class after decorating. | import logging
from socketio import socketio_manage
from django.conf.urls import patterns, url, include
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
SOCKETIO_NS = {}
class namespace(object):
def __init__(self, name=''):
self.name = name
def __call__(self, handler):
SOCKETIO_NS[self.name] = handler
return handler
@csrf_exempt
def socketio(request):
try:
socketio_manage(request.environ, SOCKETIO_NS, request)
except:
logging.getLogger("socketio").error("Exception while handling socketio connection", exc_info=True)
return HttpResponse("")
urls = patterns("", (r'', socketio))
| <commit_before>import logging
from socketio import socketio_manage
from django.conf.urls import patterns, url, include
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
SOCKETIO_NS = {}
class namespace(object):
def __init__(self, name=''):
self.name = name
def __call__(self, handler):
SOCKETIO_NS[self.name] = handler
@csrf_exempt
def socketio(request):
try:
socketio_manage(request.environ, SOCKETIO_NS, request)
except:
logging.getLogger("socketio").error("Exception while handling socketio connection", exc_info=True)
return HttpResponse("")
urls = patterns("", (r'', socketio))
<commit_msg>Return namespace class after decorating.<commit_after> | import logging
from socketio import socketio_manage
from django.conf.urls import patterns, url, include
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
SOCKETIO_NS = {}
class namespace(object):
def __init__(self, name=''):
self.name = name
def __call__(self, handler):
SOCKETIO_NS[self.name] = handler
return handler
@csrf_exempt
def socketio(request):
try:
socketio_manage(request.environ, SOCKETIO_NS, request)
except:
logging.getLogger("socketio").error("Exception while handling socketio connection", exc_info=True)
return HttpResponse("")
urls = patterns("", (r'', socketio))
| import logging
from socketio import socketio_manage
from django.conf.urls import patterns, url, include
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
SOCKETIO_NS = {}
class namespace(object):
def __init__(self, name=''):
self.name = name
def __call__(self, handler):
SOCKETIO_NS[self.name] = handler
@csrf_exempt
def socketio(request):
try:
socketio_manage(request.environ, SOCKETIO_NS, request)
except:
logging.getLogger("socketio").error("Exception while handling socketio connection", exc_info=True)
return HttpResponse("")
urls = patterns("", (r'', socketio))
Return namespace class after decorating.import logging
from socketio import socketio_manage
from django.conf.urls import patterns, url, include
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
SOCKETIO_NS = {}
class namespace(object):
def __init__(self, name=''):
self.name = name
def __call__(self, handler):
SOCKETIO_NS[self.name] = handler
return handler
@csrf_exempt
def socketio(request):
try:
socketio_manage(request.environ, SOCKETIO_NS, request)
except:
logging.getLogger("socketio").error("Exception while handling socketio connection", exc_info=True)
return HttpResponse("")
urls = patterns("", (r'', socketio))
| <commit_before>import logging
from socketio import socketio_manage
from django.conf.urls import patterns, url, include
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
SOCKETIO_NS = {}
class namespace(object):
def __init__(self, name=''):
self.name = name
def __call__(self, handler):
SOCKETIO_NS[self.name] = handler
@csrf_exempt
def socketio(request):
try:
socketio_manage(request.environ, SOCKETIO_NS, request)
except:
logging.getLogger("socketio").error("Exception while handling socketio connection", exc_info=True)
return HttpResponse("")
urls = patterns("", (r'', socketio))
<commit_msg>Return namespace class after decorating.<commit_after>import logging
from socketio import socketio_manage
from django.conf.urls import patterns, url, include
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
SOCKETIO_NS = {}
class namespace(object):
def __init__(self, name=''):
self.name = name
def __call__(self, handler):
SOCKETIO_NS[self.name] = handler
return handler
@csrf_exempt
def socketio(request):
try:
socketio_manage(request.environ, SOCKETIO_NS, request)
except:
logging.getLogger("socketio").error("Exception while handling socketio connection", exc_info=True)
return HttpResponse("")
urls = patterns("", (r'', socketio))
|
28341d67a187a2433bd2a363ce262f401cae7e63 | setup.py | setup.py | #!/usr/bin/env python
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), "r") as readme_file:
readme = readme_file.read()
setup(
name = "parse_this",
version = "0.2.1",
description = "Makes it easy to create a command line interface for any function, method or classmethod..",
long_description = readme,
author = "Bertrand Vidal",
author_email = "vidal.bertrand@gmail.com",
download_url = "https://pypi.python.org/pypi/parse_this",
url = "https://github.com/bertrandvidal/parse_this",
classifiers = [
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Operating System :: OS Independent",
"Programming Language :: Python",
],
setup_requires = [
"nose",
],
)
| #!/usr/bin/env python
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), "r") as readme_file:
readme = readme_file.read()
setup(
name = "parse_this",
version = "0.2.1",
description = "Makes it easy to create a command line interface for any function, method or classmethod..",
long_description = readme,
py_modules = ["parse_this", "parse_this_test"],
author = "Bertrand Vidal",
author_email = "vidal.bertrand@gmail.com",
download_url = "https://pypi.python.org/pypi/parse_this",
url = "https://github.com/bertrandvidal/parse_this",
classifiers = [
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Operating System :: OS Independent",
"Programming Language :: Python",
],
setup_requires = [
"nose",
],
)
| Include the python module in the package | Include the python module in the package
The top level files, parse_this and parse_this_test, were not included.
By using the 'py_modules' option these files are now actually included in the
parse_this pacakge.
| Python | mit | bertrandvidal/parse_this | #!/usr/bin/env python
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), "r") as readme_file:
readme = readme_file.read()
setup(
name = "parse_this",
version = "0.2.1",
description = "Makes it easy to create a command line interface for any function, method or classmethod..",
long_description = readme,
author = "Bertrand Vidal",
author_email = "vidal.bertrand@gmail.com",
download_url = "https://pypi.python.org/pypi/parse_this",
url = "https://github.com/bertrandvidal/parse_this",
classifiers = [
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Operating System :: OS Independent",
"Programming Language :: Python",
],
setup_requires = [
"nose",
],
)
Include the python module in the package
The top level files, parse_this and parse_this_test, were not included.
By using the 'py_modules' option these files are now actually included in the
parse_this pacakge. | #!/usr/bin/env python
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), "r") as readme_file:
readme = readme_file.read()
setup(
name = "parse_this",
version = "0.2.1",
description = "Makes it easy to create a command line interface for any function, method or classmethod..",
long_description = readme,
py_modules = ["parse_this", "parse_this_test"],
author = "Bertrand Vidal",
author_email = "vidal.bertrand@gmail.com",
download_url = "https://pypi.python.org/pypi/parse_this",
url = "https://github.com/bertrandvidal/parse_this",
classifiers = [
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Operating System :: OS Independent",
"Programming Language :: Python",
],
setup_requires = [
"nose",
],
)
| <commit_before>#!/usr/bin/env python
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), "r") as readme_file:
readme = readme_file.read()
setup(
name = "parse_this",
version = "0.2.1",
description = "Makes it easy to create a command line interface for any function, method or classmethod..",
long_description = readme,
author = "Bertrand Vidal",
author_email = "vidal.bertrand@gmail.com",
download_url = "https://pypi.python.org/pypi/parse_this",
url = "https://github.com/bertrandvidal/parse_this",
classifiers = [
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Operating System :: OS Independent",
"Programming Language :: Python",
],
setup_requires = [
"nose",
],
)
<commit_msg>Include the python module in the package
The top level files, parse_this and parse_this_test, were not included.
By using the 'py_modules' option these files are now actually included in the
parse_this pacakge.<commit_after> | #!/usr/bin/env python
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), "r") as readme_file:
readme = readme_file.read()
setup(
name = "parse_this",
version = "0.2.1",
description = "Makes it easy to create a command line interface for any function, method or classmethod..",
long_description = readme,
py_modules = ["parse_this", "parse_this_test"],
author = "Bertrand Vidal",
author_email = "vidal.bertrand@gmail.com",
download_url = "https://pypi.python.org/pypi/parse_this",
url = "https://github.com/bertrandvidal/parse_this",
classifiers = [
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Operating System :: OS Independent",
"Programming Language :: Python",
],
setup_requires = [
"nose",
],
)
| #!/usr/bin/env python
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), "r") as readme_file:
readme = readme_file.read()
setup(
name = "parse_this",
version = "0.2.1",
description = "Makes it easy to create a command line interface for any function, method or classmethod..",
long_description = readme,
author = "Bertrand Vidal",
author_email = "vidal.bertrand@gmail.com",
download_url = "https://pypi.python.org/pypi/parse_this",
url = "https://github.com/bertrandvidal/parse_this",
classifiers = [
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Operating System :: OS Independent",
"Programming Language :: Python",
],
setup_requires = [
"nose",
],
)
Include the python module in the package
The top level files, parse_this and parse_this_test, were not included.
By using the 'py_modules' option these files are now actually included in the
parse_this pacakge.#!/usr/bin/env python
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), "r") as readme_file:
readme = readme_file.read()
setup(
name = "parse_this",
version = "0.2.1",
description = "Makes it easy to create a command line interface for any function, method or classmethod..",
long_description = readme,
py_modules = ["parse_this", "parse_this_test"],
author = "Bertrand Vidal",
author_email = "vidal.bertrand@gmail.com",
download_url = "https://pypi.python.org/pypi/parse_this",
url = "https://github.com/bertrandvidal/parse_this",
classifiers = [
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Operating System :: OS Independent",
"Programming Language :: Python",
],
setup_requires = [
"nose",
],
)
| <commit_before>#!/usr/bin/env python
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), "r") as readme_file:
readme = readme_file.read()
setup(
name = "parse_this",
version = "0.2.1",
description = "Makes it easy to create a command line interface for any function, method or classmethod..",
long_description = readme,
author = "Bertrand Vidal",
author_email = "vidal.bertrand@gmail.com",
download_url = "https://pypi.python.org/pypi/parse_this",
url = "https://github.com/bertrandvidal/parse_this",
classifiers = [
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Operating System :: OS Independent",
"Programming Language :: Python",
],
setup_requires = [
"nose",
],
)
<commit_msg>Include the python module in the package
The top level files, parse_this and parse_this_test, were not included.
By using the 'py_modules' option these files are now actually included in the
parse_this pacakge.<commit_after>#!/usr/bin/env python
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), "r") as readme_file:
readme = readme_file.read()
setup(
name = "parse_this",
version = "0.2.1",
description = "Makes it easy to create a command line interface for any function, method or classmethod..",
long_description = readme,
py_modules = ["parse_this", "parse_this_test"],
author = "Bertrand Vidal",
author_email = "vidal.bertrand@gmail.com",
download_url = "https://pypi.python.org/pypi/parse_this",
url = "https://github.com/bertrandvidal/parse_this",
classifiers = [
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Operating System :: OS Independent",
"Programming Language :: Python",
],
setup_requires = [
"nose",
],
)
|
320f64f9c5429b4ffc85d518eb8df02b4c28254f | setup.py | setup.py | """
Flask-Testing
--------------
Flask unittest integration.
Links
`````
* `documentation <http://packages.python.org/Flask-Testing>`_
* `development version
<http://bitbucket.org/danjac/flask-testing/get/tip.gz#egg=Flask-Testing-dev>`_
"""
from setuptools import setup
setup(
name='Flask-Testing',
version='0.3',
url='http://bitbucket.org/danjac/flask-testing',
license='BSD',
author='Dan Jacob',
author_email='danjac354@gmail.com',
description='Unit testing for Flask',
long_description=__doc__,
packages=['flaskext'],
namespace_packages=['flaskext'],
test_suite="nose.collector",
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'twill',
],
tests_require=[
'nose',
'blinker',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
| """
Flask-Testing
--------------
Flask unittest integration.
Links
`````
* `documentation <http://packages.python.org/Flask-Testing>`_
* `development version
<http://bitbucket.org/danjac/flask-testing/get/tip.gz#egg=Flask-Testing-dev>`_
"""
import sys
from setuptools import setup
tests_require = [
'nose',
'blinker',
]
if sys.version_info < (2,6):
tests_require.append('simplejson')
setup(
name='Flask-Testing',
version='0.3',
url='http://bitbucket.org/danjac/flask-testing',
license='BSD',
author='Dan Jacob',
author_email='danjac354@gmail.com',
description='Unit testing for Flask',
long_description=__doc__,
packages=['flaskext'],
namespace_packages=['flaskext'],
test_suite="nose.collector",
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'twill',
],
tests_require=tests_require,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
| Add simplejson as test requirements for python 2.5 | Add simplejson as test requirements for python 2.5
| Python | bsd-3-clause | ecarrara/flask-testing,jmagnusson/flask-testing,jmagnusson/flask-testing,ecarrara/flask-testing | """
Flask-Testing
--------------
Flask unittest integration.
Links
`````
* `documentation <http://packages.python.org/Flask-Testing>`_
* `development version
<http://bitbucket.org/danjac/flask-testing/get/tip.gz#egg=Flask-Testing-dev>`_
"""
from setuptools import setup
setup(
name='Flask-Testing',
version='0.3',
url='http://bitbucket.org/danjac/flask-testing',
license='BSD',
author='Dan Jacob',
author_email='danjac354@gmail.com',
description='Unit testing for Flask',
long_description=__doc__,
packages=['flaskext'],
namespace_packages=['flaskext'],
test_suite="nose.collector",
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'twill',
],
tests_require=[
'nose',
'blinker',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
Add simplejson as test requirements for python 2.5 | """
Flask-Testing
--------------
Flask unittest integration.
Links
`````
* `documentation <http://packages.python.org/Flask-Testing>`_
* `development version
<http://bitbucket.org/danjac/flask-testing/get/tip.gz#egg=Flask-Testing-dev>`_
"""
import sys
from setuptools import setup
tests_require = [
'nose',
'blinker',
]
if sys.version_info < (2,6):
tests_require.append('simplejson')
setup(
name='Flask-Testing',
version='0.3',
url='http://bitbucket.org/danjac/flask-testing',
license='BSD',
author='Dan Jacob',
author_email='danjac354@gmail.com',
description='Unit testing for Flask',
long_description=__doc__,
packages=['flaskext'],
namespace_packages=['flaskext'],
test_suite="nose.collector",
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'twill',
],
tests_require=tests_require,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
| <commit_before>"""
Flask-Testing
--------------
Flask unittest integration.
Links
`````
* `documentation <http://packages.python.org/Flask-Testing>`_
* `development version
<http://bitbucket.org/danjac/flask-testing/get/tip.gz#egg=Flask-Testing-dev>`_
"""
from setuptools import setup
setup(
name='Flask-Testing',
version='0.3',
url='http://bitbucket.org/danjac/flask-testing',
license='BSD',
author='Dan Jacob',
author_email='danjac354@gmail.com',
description='Unit testing for Flask',
long_description=__doc__,
packages=['flaskext'],
namespace_packages=['flaskext'],
test_suite="nose.collector",
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'twill',
],
tests_require=[
'nose',
'blinker',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
<commit_msg>Add simplejson as test requirements for python 2.5<commit_after> | """
Flask-Testing
--------------
Flask unittest integration.
Links
`````
* `documentation <http://packages.python.org/Flask-Testing>`_
* `development version
<http://bitbucket.org/danjac/flask-testing/get/tip.gz#egg=Flask-Testing-dev>`_
"""
import sys
from setuptools import setup
tests_require = [
'nose',
'blinker',
]
if sys.version_info < (2,6):
tests_require.append('simplejson')
setup(
name='Flask-Testing',
version='0.3',
url='http://bitbucket.org/danjac/flask-testing',
license='BSD',
author='Dan Jacob',
author_email='danjac354@gmail.com',
description='Unit testing for Flask',
long_description=__doc__,
packages=['flaskext'],
namespace_packages=['flaskext'],
test_suite="nose.collector",
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'twill',
],
tests_require=tests_require,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
| """
Flask-Testing
--------------
Flask unittest integration.
Links
`````
* `documentation <http://packages.python.org/Flask-Testing>`_
* `development version
<http://bitbucket.org/danjac/flask-testing/get/tip.gz#egg=Flask-Testing-dev>`_
"""
from setuptools import setup
setup(
name='Flask-Testing',
version='0.3',
url='http://bitbucket.org/danjac/flask-testing',
license='BSD',
author='Dan Jacob',
author_email='danjac354@gmail.com',
description='Unit testing for Flask',
long_description=__doc__,
packages=['flaskext'],
namespace_packages=['flaskext'],
test_suite="nose.collector",
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'twill',
],
tests_require=[
'nose',
'blinker',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
Add simplejson as test requirements for python 2.5"""
Flask-Testing
--------------
Flask unittest integration.
Links
`````
* `documentation <http://packages.python.org/Flask-Testing>`_
* `development version
<http://bitbucket.org/danjac/flask-testing/get/tip.gz#egg=Flask-Testing-dev>`_
"""
import sys
from setuptools import setup
tests_require = [
'nose',
'blinker',
]
if sys.version_info < (2,6):
tests_require.append('simplejson')
setup(
name='Flask-Testing',
version='0.3',
url='http://bitbucket.org/danjac/flask-testing',
license='BSD',
author='Dan Jacob',
author_email='danjac354@gmail.com',
description='Unit testing for Flask',
long_description=__doc__,
packages=['flaskext'],
namespace_packages=['flaskext'],
test_suite="nose.collector",
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'twill',
],
tests_require=tests_require,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
| <commit_before>"""
Flask-Testing
--------------
Flask unittest integration.
Links
`````
* `documentation <http://packages.python.org/Flask-Testing>`_
* `development version
<http://bitbucket.org/danjac/flask-testing/get/tip.gz#egg=Flask-Testing-dev>`_
"""
from setuptools import setup
setup(
name='Flask-Testing',
version='0.3',
url='http://bitbucket.org/danjac/flask-testing',
license='BSD',
author='Dan Jacob',
author_email='danjac354@gmail.com',
description='Unit testing for Flask',
long_description=__doc__,
packages=['flaskext'],
namespace_packages=['flaskext'],
test_suite="nose.collector",
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'twill',
],
tests_require=[
'nose',
'blinker',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
<commit_msg>Add simplejson as test requirements for python 2.5<commit_after>"""
Flask-Testing
--------------
Flask unittest integration.
Links
`````
* `documentation <http://packages.python.org/Flask-Testing>`_
* `development version
<http://bitbucket.org/danjac/flask-testing/get/tip.gz#egg=Flask-Testing-dev>`_
"""
import sys
from setuptools import setup
tests_require = [
'nose',
'blinker',
]
if sys.version_info < (2,6):
tests_require.append('simplejson')
setup(
name='Flask-Testing',
version='0.3',
url='http://bitbucket.org/danjac/flask-testing',
license='BSD',
author='Dan Jacob',
author_email='danjac354@gmail.com',
description='Unit testing for Flask',
long_description=__doc__,
packages=['flaskext'],
namespace_packages=['flaskext'],
test_suite="nose.collector",
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'twill',
],
tests_require=tests_require,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
599f9d56058418ff9d747b0c4f978c3f95d245d4 | setup.py | setup.py | import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.md')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-swiftbrowser',
version='0.22',
packages=['swiftbrowser'],
include_package_data=True,
license='Apache License (2.0)',
description='A simple Django app to access Openstack Swift',
long_description=README,
url='https://github.com/cschwede/django-swiftbrowser',
author='Christian Schwede',
author_email='info@cschwede.de',
install_requires=['django>=1.5', 'python-swiftclient'],
zip_safe=False,
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
| import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.md')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-swiftbrowser',
version='0.22',
packages=['swiftbrowser'],
include_package_data=True,
license='Apache License (2.0)',
description='A simple Django app to access Openstack Swift',
long_description=README,
url='https://github.com/cschwede/django-swiftbrowser',
author='Christian Schwede',
author_email='info@cschwede.de',
install_requires=['django>=1.5', 'python-swiftclient',
'python-keystoneclient'],
zip_safe=False,
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
| Add python-keystoneclient to dependencies This is needed for keystone authentication | Add python-keystoneclient to dependencies
This is needed for keystone authentication
| Python | apache-2.0 | honza801/django-swiftbrowser,honza801/django-swiftbrowser,honza801/django-swiftbrowser | import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.md')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-swiftbrowser',
version='0.22',
packages=['swiftbrowser'],
include_package_data=True,
license='Apache License (2.0)',
description='A simple Django app to access Openstack Swift',
long_description=README,
url='https://github.com/cschwede/django-swiftbrowser',
author='Christian Schwede',
author_email='info@cschwede.de',
install_requires=['django>=1.5', 'python-swiftclient'],
zip_safe=False,
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
Add python-keystoneclient to dependencies
This is needed for keystone authentication | import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.md')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-swiftbrowser',
version='0.22',
packages=['swiftbrowser'],
include_package_data=True,
license='Apache License (2.0)',
description='A simple Django app to access Openstack Swift',
long_description=README,
url='https://github.com/cschwede/django-swiftbrowser',
author='Christian Schwede',
author_email='info@cschwede.de',
install_requires=['django>=1.5', 'python-swiftclient',
'python-keystoneclient'],
zip_safe=False,
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
| <commit_before>import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.md')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-swiftbrowser',
version='0.22',
packages=['swiftbrowser'],
include_package_data=True,
license='Apache License (2.0)',
description='A simple Django app to access Openstack Swift',
long_description=README,
url='https://github.com/cschwede/django-swiftbrowser',
author='Christian Schwede',
author_email='info@cschwede.de',
install_requires=['django>=1.5', 'python-swiftclient'],
zip_safe=False,
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
<commit_msg>Add python-keystoneclient to dependencies
This is needed for keystone authentication<commit_after> | import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.md')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-swiftbrowser',
version='0.22',
packages=['swiftbrowser'],
include_package_data=True,
license='Apache License (2.0)',
description='A simple Django app to access Openstack Swift',
long_description=README,
url='https://github.com/cschwede/django-swiftbrowser',
author='Christian Schwede',
author_email='info@cschwede.de',
install_requires=['django>=1.5', 'python-swiftclient',
'python-keystoneclient'],
zip_safe=False,
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
| import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.md')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-swiftbrowser',
version='0.22',
packages=['swiftbrowser'],
include_package_data=True,
license='Apache License (2.0)',
description='A simple Django app to access Openstack Swift',
long_description=README,
url='https://github.com/cschwede/django-swiftbrowser',
author='Christian Schwede',
author_email='info@cschwede.de',
install_requires=['django>=1.5', 'python-swiftclient'],
zip_safe=False,
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
Add python-keystoneclient to dependencies
This is needed for keystone authenticationimport os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.md')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-swiftbrowser',
version='0.22',
packages=['swiftbrowser'],
include_package_data=True,
license='Apache License (2.0)',
description='A simple Django app to access Openstack Swift',
long_description=README,
url='https://github.com/cschwede/django-swiftbrowser',
author='Christian Schwede',
author_email='info@cschwede.de',
install_requires=['django>=1.5', 'python-swiftclient',
'python-keystoneclient'],
zip_safe=False,
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
| <commit_before>import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.md')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-swiftbrowser',
version='0.22',
packages=['swiftbrowser'],
include_package_data=True,
license='Apache License (2.0)',
description='A simple Django app to access Openstack Swift',
long_description=README,
url='https://github.com/cschwede/django-swiftbrowser',
author='Christian Schwede',
author_email='info@cschwede.de',
install_requires=['django>=1.5', 'python-swiftclient'],
zip_safe=False,
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
<commit_msg>Add python-keystoneclient to dependencies
This is needed for keystone authentication<commit_after>import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.md')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-swiftbrowser',
version='0.22',
packages=['swiftbrowser'],
include_package_data=True,
license='Apache License (2.0)',
description='A simple Django app to access Openstack Swift',
long_description=README,
url='https://github.com/cschwede/django-swiftbrowser',
author='Christian Schwede',
author_email='info@cschwede.de',
install_requires=['django>=1.5', 'python-swiftclient',
'python-keystoneclient'],
zip_safe=False,
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
3b6995c5103ba09b21619ac3ae4f4d9733144d89 | craft_message.py | craft_message.py | from datetime import date
import calendar
def get_time_index(update):
if "warning" in update:
return 7
if "pollution" in update:
return 8
elif "today" in update:
return 0
elif "this morning" in update:
return 1
elif "this lunchtime" in update:
return 2
elif "this afternoon" in update:
return 3
elif "this evening" in update:
return 4
elif "tonight" in update:
return 5
elif " day" in update:
return 6
else:
raise Exception("time not set: {}".format(update))
def sort_by_time(updates):
timed_updates = {}
for update in updates:
timed_updates[update] = get_time_index(update)
for update in sorted(timed_updates, key=timed_updates.get):
yield update
def fit_into_tweets(updates):
dayname = calendar.day_name[date.today().weekday()]
alerts = []
alert = dayname + ": "
addToAlert = True
i = 1
for update in updates:
if (len(alert) + len(update) < 140):
alert += update + " "
addToAlert = True
else:
alerts.append(alert.rstrip())
i += 1
alert = dayname + ": " + update + " "
if addToAlert or len(alerts) < i:
alerts.append(alert.rstrip())
return alerts
| from datetime import date
import calendar
def get_time_index(update):
if "warning" in update:
return 7
if "pollution" in update:
return 8
elif "today" in update:
return 0
elif "this morning" in update:
return 1
elif "this lunchtime" in update:
return 2
elif "this afternoon" in update:
return 3
elif "this evening" in update:
return 4
elif "tonight" in update:
return 5
elif " day" in update:
return 6
else:
raise Exception("time not set: {}".format(update))
def sort_by_time(updates):
timed_updates = {}
for update in updates:
timed_updates[update] = get_time_index(update)
for update in sorted(timed_updates, key=timed_updates.get):
yield update
def fit_into_tweets(updates):
dayname = calendar.day_name[date.today().weekday()]
alerts = []
alert = dayname + ": "
addToAlert = True
i = 1
for update in updates:
if (len(alert) + len(update) < 280):
alert += update + " "
addToAlert = True
else:
alerts.append(alert.rstrip())
i += 1
alert = dayname + ": " + update + " "
if addToAlert or len(alerts) < i:
alerts.append(alert.rstrip())
return alerts
| Change Tweet upper length limit | Change Tweet upper length limit
| Python | unlicense | ElizabethSEden/cycling-weather-bot | from datetime import date
import calendar
def get_time_index(update):
if "warning" in update:
return 7
if "pollution" in update:
return 8
elif "today" in update:
return 0
elif "this morning" in update:
return 1
elif "this lunchtime" in update:
return 2
elif "this afternoon" in update:
return 3
elif "this evening" in update:
return 4
elif "tonight" in update:
return 5
elif " day" in update:
return 6
else:
raise Exception("time not set: {}".format(update))
def sort_by_time(updates):
timed_updates = {}
for update in updates:
timed_updates[update] = get_time_index(update)
for update in sorted(timed_updates, key=timed_updates.get):
yield update
def fit_into_tweets(updates):
dayname = calendar.day_name[date.today().weekday()]
alerts = []
alert = dayname + ": "
addToAlert = True
i = 1
for update in updates:
if (len(alert) + len(update) < 140):
alert += update + " "
addToAlert = True
else:
alerts.append(alert.rstrip())
i += 1
alert = dayname + ": " + update + " "
if addToAlert or len(alerts) < i:
alerts.append(alert.rstrip())
return alerts
Change Tweet upper length limit | from datetime import date
import calendar
def get_time_index(update):
if "warning" in update:
return 7
if "pollution" in update:
return 8
elif "today" in update:
return 0
elif "this morning" in update:
return 1
elif "this lunchtime" in update:
return 2
elif "this afternoon" in update:
return 3
elif "this evening" in update:
return 4
elif "tonight" in update:
return 5
elif " day" in update:
return 6
else:
raise Exception("time not set: {}".format(update))
def sort_by_time(updates):
timed_updates = {}
for update in updates:
timed_updates[update] = get_time_index(update)
for update in sorted(timed_updates, key=timed_updates.get):
yield update
def fit_into_tweets(updates):
dayname = calendar.day_name[date.today().weekday()]
alerts = []
alert = dayname + ": "
addToAlert = True
i = 1
for update in updates:
if (len(alert) + len(update) < 280):
alert += update + " "
addToAlert = True
else:
alerts.append(alert.rstrip())
i += 1
alert = dayname + ": " + update + " "
if addToAlert or len(alerts) < i:
alerts.append(alert.rstrip())
return alerts
| <commit_before>from datetime import date
import calendar
def get_time_index(update):
if "warning" in update:
return 7
if "pollution" in update:
return 8
elif "today" in update:
return 0
elif "this morning" in update:
return 1
elif "this lunchtime" in update:
return 2
elif "this afternoon" in update:
return 3
elif "this evening" in update:
return 4
elif "tonight" in update:
return 5
elif " day" in update:
return 6
else:
raise Exception("time not set: {}".format(update))
def sort_by_time(updates):
timed_updates = {}
for update in updates:
timed_updates[update] = get_time_index(update)
for update in sorted(timed_updates, key=timed_updates.get):
yield update
def fit_into_tweets(updates):
dayname = calendar.day_name[date.today().weekday()]
alerts = []
alert = dayname + ": "
addToAlert = True
i = 1
for update in updates:
if (len(alert) + len(update) < 140):
alert += update + " "
addToAlert = True
else:
alerts.append(alert.rstrip())
i += 1
alert = dayname + ": " + update + " "
if addToAlert or len(alerts) < i:
alerts.append(alert.rstrip())
return alerts
<commit_msg>Change Tweet upper length limit<commit_after> | from datetime import date
import calendar
def get_time_index(update):
if "warning" in update:
return 7
if "pollution" in update:
return 8
elif "today" in update:
return 0
elif "this morning" in update:
return 1
elif "this lunchtime" in update:
return 2
elif "this afternoon" in update:
return 3
elif "this evening" in update:
return 4
elif "tonight" in update:
return 5
elif " day" in update:
return 6
else:
raise Exception("time not set: {}".format(update))
def sort_by_time(updates):
timed_updates = {}
for update in updates:
timed_updates[update] = get_time_index(update)
for update in sorted(timed_updates, key=timed_updates.get):
yield update
def fit_into_tweets(updates):
dayname = calendar.day_name[date.today().weekday()]
alerts = []
alert = dayname + ": "
addToAlert = True
i = 1
for update in updates:
if (len(alert) + len(update) < 280):
alert += update + " "
addToAlert = True
else:
alerts.append(alert.rstrip())
i += 1
alert = dayname + ": " + update + " "
if addToAlert or len(alerts) < i:
alerts.append(alert.rstrip())
return alerts
| from datetime import date
import calendar
def get_time_index(update):
if "warning" in update:
return 7
if "pollution" in update:
return 8
elif "today" in update:
return 0
elif "this morning" in update:
return 1
elif "this lunchtime" in update:
return 2
elif "this afternoon" in update:
return 3
elif "this evening" in update:
return 4
elif "tonight" in update:
return 5
elif " day" in update:
return 6
else:
raise Exception("time not set: {}".format(update))
def sort_by_time(updates):
timed_updates = {}
for update in updates:
timed_updates[update] = get_time_index(update)
for update in sorted(timed_updates, key=timed_updates.get):
yield update
def fit_into_tweets(updates):
dayname = calendar.day_name[date.today().weekday()]
alerts = []
alert = dayname + ": "
addToAlert = True
i = 1
for update in updates:
if (len(alert) + len(update) < 140):
alert += update + " "
addToAlert = True
else:
alerts.append(alert.rstrip())
i += 1
alert = dayname + ": " + update + " "
if addToAlert or len(alerts) < i:
alerts.append(alert.rstrip())
return alerts
Change Tweet upper length limitfrom datetime import date
import calendar
def get_time_index(update):
if "warning" in update:
return 7
if "pollution" in update:
return 8
elif "today" in update:
return 0
elif "this morning" in update:
return 1
elif "this lunchtime" in update:
return 2
elif "this afternoon" in update:
return 3
elif "this evening" in update:
return 4
elif "tonight" in update:
return 5
elif " day" in update:
return 6
else:
raise Exception("time not set: {}".format(update))
def sort_by_time(updates):
timed_updates = {}
for update in updates:
timed_updates[update] = get_time_index(update)
for update in sorted(timed_updates, key=timed_updates.get):
yield update
def fit_into_tweets(updates):
dayname = calendar.day_name[date.today().weekday()]
alerts = []
alert = dayname + ": "
addToAlert = True
i = 1
for update in updates:
if (len(alert) + len(update) < 280):
alert += update + " "
addToAlert = True
else:
alerts.append(alert.rstrip())
i += 1
alert = dayname + ": " + update + " "
if addToAlert or len(alerts) < i:
alerts.append(alert.rstrip())
return alerts
| <commit_before>from datetime import date
import calendar
def get_time_index(update):
if "warning" in update:
return 7
if "pollution" in update:
return 8
elif "today" in update:
return 0
elif "this morning" in update:
return 1
elif "this lunchtime" in update:
return 2
elif "this afternoon" in update:
return 3
elif "this evening" in update:
return 4
elif "tonight" in update:
return 5
elif " day" in update:
return 6
else:
raise Exception("time not set: {}".format(update))
def sort_by_time(updates):
timed_updates = {}
for update in updates:
timed_updates[update] = get_time_index(update)
for update in sorted(timed_updates, key=timed_updates.get):
yield update
def fit_into_tweets(updates):
dayname = calendar.day_name[date.today().weekday()]
alerts = []
alert = dayname + ": "
addToAlert = True
i = 1
for update in updates:
if (len(alert) + len(update) < 140):
alert += update + " "
addToAlert = True
else:
alerts.append(alert.rstrip())
i += 1
alert = dayname + ": " + update + " "
if addToAlert or len(alerts) < i:
alerts.append(alert.rstrip())
return alerts
<commit_msg>Change Tweet upper length limit<commit_after>from datetime import date
import calendar
def get_time_index(update):
if "warning" in update:
return 7
if "pollution" in update:
return 8
elif "today" in update:
return 0
elif "this morning" in update:
return 1
elif "this lunchtime" in update:
return 2
elif "this afternoon" in update:
return 3
elif "this evening" in update:
return 4
elif "tonight" in update:
return 5
elif " day" in update:
return 6
else:
raise Exception("time not set: {}".format(update))
def sort_by_time(updates):
timed_updates = {}
for update in updates:
timed_updates[update] = get_time_index(update)
for update in sorted(timed_updates, key=timed_updates.get):
yield update
def fit_into_tweets(updates):
dayname = calendar.day_name[date.today().weekday()]
alerts = []
alert = dayname + ": "
addToAlert = True
i = 1
for update in updates:
if (len(alert) + len(update) < 280):
alert += update + " "
addToAlert = True
else:
alerts.append(alert.rstrip())
i += 1
alert = dayname + ": " + update + " "
if addToAlert or len(alerts) < i:
alerts.append(alert.rstrip())
return alerts
|
0a300314c0fae8420db1aa773e4ec8c96fca1cf5 | setup.py | setup.py | from distutils.core import setup
from Cython.Build import cythonize
from distutils.extension import Extension
import numpy as np
# To compile and install locally run "python setup.py build_ext --inplace"
# To install library to Python site-packages run "python setup.py build_ext install"
ext_modules = [
Extension(
'pycocotools._mask',
sources=['./pycocotools/headers/maskApi.c', 'pycocotools/_mask.pyx'],
include_dirs = [np.get_include(), './pycocotools/headers'],
extra_compile_args=['-Wno-cpp', '-Wno-unused-function', '-std=c99'],
)
]
setup(name='pycocotools',
packages=['pycocotools'],
package_dir = {'pycocotools': 'pycocotools'},
version='2.0',
ext_modules=
cythonize(ext_modules)
)
| from distutils.core import setup
from Cython.Build import cythonize
from distutils.extension import Extension
import numpy as np
import sys
# To compile and install locally run "python setup.py build_ext --inplace"
# To install library to Python site-packages run "python setup.py build_ext install"
extra_compile_args = ['-Wno-cpp', '-Wno-unused-function', '-std=c99']
if sys.platform == 'win32':
extra_compile_args = []
ext_modules = [
Extension(
'pycocotools._mask',
sources=['./pycocotools/headers/maskApi.c', 'pycocotools/_mask.pyx'],
include_dirs = [np.get_include(), './pycocotools/headers'],
extra_compile_args=extra_compile_args,
)
]
setup(name='pycocotools',
packages=['pycocotools'],
package_dir = {'pycocotools': 'pycocotools'},
version='2.0',
ext_modules=
cythonize(ext_modules)
)
| Make it compile on windows | Make it compile on windows | Python | mit | matteorr/coco-analyze,matteorr/coco-analyze,matteorr/coco-analyze | from distutils.core import setup
from Cython.Build import cythonize
from distutils.extension import Extension
import numpy as np
# To compile and install locally run "python setup.py build_ext --inplace"
# To install library to Python site-packages run "python setup.py build_ext install"
ext_modules = [
Extension(
'pycocotools._mask',
sources=['./pycocotools/headers/maskApi.c', 'pycocotools/_mask.pyx'],
include_dirs = [np.get_include(), './pycocotools/headers'],
extra_compile_args=['-Wno-cpp', '-Wno-unused-function', '-std=c99'],
)
]
setup(name='pycocotools',
packages=['pycocotools'],
package_dir = {'pycocotools': 'pycocotools'},
version='2.0',
ext_modules=
cythonize(ext_modules)
)
Make it compile on windows | from distutils.core import setup
from Cython.Build import cythonize
from distutils.extension import Extension
import numpy as np
import sys
# To compile and install locally run "python setup.py build_ext --inplace"
# To install library to Python site-packages run "python setup.py build_ext install"
extra_compile_args = ['-Wno-cpp', '-Wno-unused-function', '-std=c99']
if sys.platform == 'win32':
extra_compile_args = []
ext_modules = [
Extension(
'pycocotools._mask',
sources=['./pycocotools/headers/maskApi.c', 'pycocotools/_mask.pyx'],
include_dirs = [np.get_include(), './pycocotools/headers'],
extra_compile_args=extra_compile_args,
)
]
setup(name='pycocotools',
packages=['pycocotools'],
package_dir = {'pycocotools': 'pycocotools'},
version='2.0',
ext_modules=
cythonize(ext_modules)
)
| <commit_before>from distutils.core import setup
from Cython.Build import cythonize
from distutils.extension import Extension
import numpy as np
# To compile and install locally run "python setup.py build_ext --inplace"
# To install library to Python site-packages run "python setup.py build_ext install"
ext_modules = [
Extension(
'pycocotools._mask',
sources=['./pycocotools/headers/maskApi.c', 'pycocotools/_mask.pyx'],
include_dirs = [np.get_include(), './pycocotools/headers'],
extra_compile_args=['-Wno-cpp', '-Wno-unused-function', '-std=c99'],
)
]
setup(name='pycocotools',
packages=['pycocotools'],
package_dir = {'pycocotools': 'pycocotools'},
version='2.0',
ext_modules=
cythonize(ext_modules)
)
<commit_msg>Make it compile on windows<commit_after> | from distutils.core import setup
from Cython.Build import cythonize
from distutils.extension import Extension
import numpy as np
import sys
# To compile and install locally run "python setup.py build_ext --inplace"
# To install library to Python site-packages run "python setup.py build_ext install"
extra_compile_args = ['-Wno-cpp', '-Wno-unused-function', '-std=c99']
if sys.platform == 'win32':
extra_compile_args = []
ext_modules = [
Extension(
'pycocotools._mask',
sources=['./pycocotools/headers/maskApi.c', 'pycocotools/_mask.pyx'],
include_dirs = [np.get_include(), './pycocotools/headers'],
extra_compile_args=extra_compile_args,
)
]
setup(name='pycocotools',
packages=['pycocotools'],
package_dir = {'pycocotools': 'pycocotools'},
version='2.0',
ext_modules=
cythonize(ext_modules)
)
| from distutils.core import setup
from Cython.Build import cythonize
from distutils.extension import Extension
import numpy as np
# To compile and install locally run "python setup.py build_ext --inplace"
# To install library to Python site-packages run "python setup.py build_ext install"
ext_modules = [
Extension(
'pycocotools._mask',
sources=['./pycocotools/headers/maskApi.c', 'pycocotools/_mask.pyx'],
include_dirs = [np.get_include(), './pycocotools/headers'],
extra_compile_args=['-Wno-cpp', '-Wno-unused-function', '-std=c99'],
)
]
setup(name='pycocotools',
packages=['pycocotools'],
package_dir = {'pycocotools': 'pycocotools'},
version='2.0',
ext_modules=
cythonize(ext_modules)
)
Make it compile on windowsfrom distutils.core import setup
from Cython.Build import cythonize
from distutils.extension import Extension
import numpy as np
import sys
# To compile and install locally run "python setup.py build_ext --inplace"
# To install library to Python site-packages run "python setup.py build_ext install"
extra_compile_args = ['-Wno-cpp', '-Wno-unused-function', '-std=c99']
if sys.platform == 'win32':
extra_compile_args = []
ext_modules = [
Extension(
'pycocotools._mask',
sources=['./pycocotools/headers/maskApi.c', 'pycocotools/_mask.pyx'],
include_dirs = [np.get_include(), './pycocotools/headers'],
extra_compile_args=extra_compile_args,
)
]
setup(name='pycocotools',
packages=['pycocotools'],
package_dir = {'pycocotools': 'pycocotools'},
version='2.0',
ext_modules=
cythonize(ext_modules)
)
| <commit_before>from distutils.core import setup
from Cython.Build import cythonize
from distutils.extension import Extension
import numpy as np
# To compile and install locally run "python setup.py build_ext --inplace"
# To install library to Python site-packages run "python setup.py build_ext install"
ext_modules = [
Extension(
'pycocotools._mask',
sources=['./pycocotools/headers/maskApi.c', 'pycocotools/_mask.pyx'],
include_dirs = [np.get_include(), './pycocotools/headers'],
extra_compile_args=['-Wno-cpp', '-Wno-unused-function', '-std=c99'],
)
]
setup(name='pycocotools',
packages=['pycocotools'],
package_dir = {'pycocotools': 'pycocotools'},
version='2.0',
ext_modules=
cythonize(ext_modules)
)
<commit_msg>Make it compile on windows<commit_after>from distutils.core import setup
from Cython.Build import cythonize
from distutils.extension import Extension
import numpy as np
import sys
# To compile and install locally run "python setup.py build_ext --inplace"
# To install library to Python site-packages run "python setup.py build_ext install"
extra_compile_args = ['-Wno-cpp', '-Wno-unused-function', '-std=c99']
if sys.platform == 'win32':
extra_compile_args = []
ext_modules = [
Extension(
'pycocotools._mask',
sources=['./pycocotools/headers/maskApi.c', 'pycocotools/_mask.pyx'],
include_dirs = [np.get_include(), './pycocotools/headers'],
extra_compile_args=extra_compile_args,
)
]
setup(name='pycocotools',
packages=['pycocotools'],
package_dir = {'pycocotools': 'pycocotools'},
version='2.0',
ext_modules=
cythonize(ext_modules)
)
|
1d9d661dd03c6f66f62b3f32865eadd4b5afe304 | setup.py | setup.py | from setuptools import setup, find_packages
setup(
name="selectivesearch",
version="0.1",
url="https://github.com/AlpacaDB/selectivesearch",
description="Selective Search implementation for Python",
author="AlpacaDB, Inc.",
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
keywords='rcnn',
packages=find_packages(),
install_requires=['numpy', 'scikit-image'],
)
| from setuptools import setup, find_packages
setup(
name="selectivesearch",
version="0.2",
url="https://github.com/AlpacaDB/selectivesearch",
description="Selective Search implementation for Python",
author="AlpacaDB, Inc.",
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
keywords='rcnn',
packages=find_packages(),
install_requires=['numpy', 'scikit-image'],
)
| Bump the version to 0.2 | Bump the version to 0.2
| Python | mit | AlpacaDB/selectivesearch | from setuptools import setup, find_packages
setup(
name="selectivesearch",
version="0.1",
url="https://github.com/AlpacaDB/selectivesearch",
description="Selective Search implementation for Python",
author="AlpacaDB, Inc.",
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
keywords='rcnn',
packages=find_packages(),
install_requires=['numpy', 'scikit-image'],
)
Bump the version to 0.2 | from setuptools import setup, find_packages
setup(
name="selectivesearch",
version="0.2",
url="https://github.com/AlpacaDB/selectivesearch",
description="Selective Search implementation for Python",
author="AlpacaDB, Inc.",
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
keywords='rcnn',
packages=find_packages(),
install_requires=['numpy', 'scikit-image'],
)
| <commit_before>from setuptools import setup, find_packages
setup(
name="selectivesearch",
version="0.1",
url="https://github.com/AlpacaDB/selectivesearch",
description="Selective Search implementation for Python",
author="AlpacaDB, Inc.",
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
keywords='rcnn',
packages=find_packages(),
install_requires=['numpy', 'scikit-image'],
)
<commit_msg>Bump the version to 0.2<commit_after> | from setuptools import setup, find_packages
setup(
name="selectivesearch",
version="0.2",
url="https://github.com/AlpacaDB/selectivesearch",
description="Selective Search implementation for Python",
author="AlpacaDB, Inc.",
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
keywords='rcnn',
packages=find_packages(),
install_requires=['numpy', 'scikit-image'],
)
| from setuptools import setup, find_packages
setup(
name="selectivesearch",
version="0.1",
url="https://github.com/AlpacaDB/selectivesearch",
description="Selective Search implementation for Python",
author="AlpacaDB, Inc.",
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
keywords='rcnn',
packages=find_packages(),
install_requires=['numpy', 'scikit-image'],
)
Bump the version to 0.2from setuptools import setup, find_packages
setup(
name="selectivesearch",
version="0.2",
url="https://github.com/AlpacaDB/selectivesearch",
description="Selective Search implementation for Python",
author="AlpacaDB, Inc.",
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
keywords='rcnn',
packages=find_packages(),
install_requires=['numpy', 'scikit-image'],
)
| <commit_before>from setuptools import setup, find_packages
setup(
name="selectivesearch",
version="0.1",
url="https://github.com/AlpacaDB/selectivesearch",
description="Selective Search implementation for Python",
author="AlpacaDB, Inc.",
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
keywords='rcnn',
packages=find_packages(),
install_requires=['numpy', 'scikit-image'],
)
<commit_msg>Bump the version to 0.2<commit_after>from setuptools import setup, find_packages
setup(
name="selectivesearch",
version="0.2",
url="https://github.com/AlpacaDB/selectivesearch",
description="Selective Search implementation for Python",
author="AlpacaDB, Inc.",
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
keywords='rcnn',
packages=find_packages(),
install_requires=['numpy', 'scikit-image'],
)
|
ad64d506a721b27150acdac300570235b4fc1f01 | setup.py | setup.py | import sys
from setuptools import setup
from setuptools.command.test import test as TestCommand
import schunk
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = '--doctest-modules --ignore setup.py'
self.test_suite = True
def run_tests(self):
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
setup(
name="SchunkMotionProtocol",
version=schunk.__version__,
py_modules=['schunk'],
author="Matthias Geier",
author_email="Matthias.Geier@gmail.com",
description="Schunk Motion Protocol for Python",
long_description=open('README.rst').read(),
license="MIT",
keywords="Schunk serial servo motor".split(),
url="http://schunk.rtfd.org/",
download_url="https://github.com/spatialaudio/schunk/releases/",
platforms='any',
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Scientific/Engineering",
],
tests_require=['pytest'],
cmdclass={'test': PyTest},
)
| import sys
from setuptools import setup
from setuptools.command.test import test as TestCommand
import schunk
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = '--doctest-modules --ignore setup.py'
self.test_suite = True
def run_tests(self):
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
setup(
name="SchunkMotionProtocol",
version=schunk.__version__,
py_modules=['schunk'],
author="Matthias Geier",
author_email="Matthias.Geier@gmail.com",
description="Schunk Motion Protocol for Python",
long_description=open('README.rst').read(),
license="MIT",
keywords="Schunk serial servo motor".split(),
url="http://schunk.rtfd.org/",
download_url="https://github.com/spatialaudio/schunk/releases/",
platforms='any',
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Scientific/Engineering",
],
tests_require=['pytest', 'pyserial'],
cmdclass={'test': PyTest},
)
| Add PySerial to the test requirements | Add PySerial to the test requirements
It is needed for the doctests.
| Python | mit | spatialaudio/schunk | import sys
from setuptools import setup
from setuptools.command.test import test as TestCommand
import schunk
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = '--doctest-modules --ignore setup.py'
self.test_suite = True
def run_tests(self):
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
setup(
name="SchunkMotionProtocol",
version=schunk.__version__,
py_modules=['schunk'],
author="Matthias Geier",
author_email="Matthias.Geier@gmail.com",
description="Schunk Motion Protocol for Python",
long_description=open('README.rst').read(),
license="MIT",
keywords="Schunk serial servo motor".split(),
url="http://schunk.rtfd.org/",
download_url="https://github.com/spatialaudio/schunk/releases/",
platforms='any',
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Scientific/Engineering",
],
tests_require=['pytest'],
cmdclass={'test': PyTest},
)
Add PySerial to the test requirements
It is needed for the doctests. | import sys
from setuptools import setup
from setuptools.command.test import test as TestCommand
import schunk
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = '--doctest-modules --ignore setup.py'
self.test_suite = True
def run_tests(self):
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
setup(
name="SchunkMotionProtocol",
version=schunk.__version__,
py_modules=['schunk'],
author="Matthias Geier",
author_email="Matthias.Geier@gmail.com",
description="Schunk Motion Protocol for Python",
long_description=open('README.rst').read(),
license="MIT",
keywords="Schunk serial servo motor".split(),
url="http://schunk.rtfd.org/",
download_url="https://github.com/spatialaudio/schunk/releases/",
platforms='any',
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Scientific/Engineering",
],
tests_require=['pytest', 'pyserial'],
cmdclass={'test': PyTest},
)
| <commit_before>import sys
from setuptools import setup
from setuptools.command.test import test as TestCommand
import schunk
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = '--doctest-modules --ignore setup.py'
self.test_suite = True
def run_tests(self):
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
setup(
name="SchunkMotionProtocol",
version=schunk.__version__,
py_modules=['schunk'],
author="Matthias Geier",
author_email="Matthias.Geier@gmail.com",
description="Schunk Motion Protocol for Python",
long_description=open('README.rst').read(),
license="MIT",
keywords="Schunk serial servo motor".split(),
url="http://schunk.rtfd.org/",
download_url="https://github.com/spatialaudio/schunk/releases/",
platforms='any',
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Scientific/Engineering",
],
tests_require=['pytest'],
cmdclass={'test': PyTest},
)
<commit_msg>Add PySerial to the test requirements
It is needed for the doctests.<commit_after> | import sys
from setuptools import setup
from setuptools.command.test import test as TestCommand
import schunk
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = '--doctest-modules --ignore setup.py'
self.test_suite = True
def run_tests(self):
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
setup(
name="SchunkMotionProtocol",
version=schunk.__version__,
py_modules=['schunk'],
author="Matthias Geier",
author_email="Matthias.Geier@gmail.com",
description="Schunk Motion Protocol for Python",
long_description=open('README.rst').read(),
license="MIT",
keywords="Schunk serial servo motor".split(),
url="http://schunk.rtfd.org/",
download_url="https://github.com/spatialaudio/schunk/releases/",
platforms='any',
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Scientific/Engineering",
],
tests_require=['pytest', 'pyserial'],
cmdclass={'test': PyTest},
)
| import sys
from setuptools import setup
from setuptools.command.test import test as TestCommand
import schunk
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = '--doctest-modules --ignore setup.py'
self.test_suite = True
def run_tests(self):
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
setup(
name="SchunkMotionProtocol",
version=schunk.__version__,
py_modules=['schunk'],
author="Matthias Geier",
author_email="Matthias.Geier@gmail.com",
description="Schunk Motion Protocol for Python",
long_description=open('README.rst').read(),
license="MIT",
keywords="Schunk serial servo motor".split(),
url="http://schunk.rtfd.org/",
download_url="https://github.com/spatialaudio/schunk/releases/",
platforms='any',
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Scientific/Engineering",
],
tests_require=['pytest'],
cmdclass={'test': PyTest},
)
Add PySerial to the test requirements
It is needed for the doctests.import sys
from setuptools import setup
from setuptools.command.test import test as TestCommand
import schunk
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = '--doctest-modules --ignore setup.py'
self.test_suite = True
def run_tests(self):
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
setup(
name="SchunkMotionProtocol",
version=schunk.__version__,
py_modules=['schunk'],
author="Matthias Geier",
author_email="Matthias.Geier@gmail.com",
description="Schunk Motion Protocol for Python",
long_description=open('README.rst').read(),
license="MIT",
keywords="Schunk serial servo motor".split(),
url="http://schunk.rtfd.org/",
download_url="https://github.com/spatialaudio/schunk/releases/",
platforms='any',
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Scientific/Engineering",
],
tests_require=['pytest', 'pyserial'],
cmdclass={'test': PyTest},
)
| <commit_before>import sys
from setuptools import setup
from setuptools.command.test import test as TestCommand
import schunk
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = '--doctest-modules --ignore setup.py'
self.test_suite = True
def run_tests(self):
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
setup(
name="SchunkMotionProtocol",
version=schunk.__version__,
py_modules=['schunk'],
author="Matthias Geier",
author_email="Matthias.Geier@gmail.com",
description="Schunk Motion Protocol for Python",
long_description=open('README.rst').read(),
license="MIT",
keywords="Schunk serial servo motor".split(),
url="http://schunk.rtfd.org/",
download_url="https://github.com/spatialaudio/schunk/releases/",
platforms='any',
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Scientific/Engineering",
],
tests_require=['pytest'],
cmdclass={'test': PyTest},
)
<commit_msg>Add PySerial to the test requirements
It is needed for the doctests.<commit_after>import sys
from setuptools import setup
from setuptools.command.test import test as TestCommand
import schunk
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = '--doctest-modules --ignore setup.py'
self.test_suite = True
def run_tests(self):
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
setup(
name="SchunkMotionProtocol",
version=schunk.__version__,
py_modules=['schunk'],
author="Matthias Geier",
author_email="Matthias.Geier@gmail.com",
description="Schunk Motion Protocol for Python",
long_description=open('README.rst').read(),
license="MIT",
keywords="Schunk serial servo motor".split(),
url="http://schunk.rtfd.org/",
download_url="https://github.com/spatialaudio/schunk/releases/",
platforms='any',
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Scientific/Engineering",
],
tests_require=['pytest', 'pyserial'],
cmdclass={'test': PyTest},
)
|
e060500e4979f918b507a6f7d12f8d25e2be318b | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
from wonderful_bing import wonderful_bing
try:
import pypandoc
long_description = pypandoc.convert('README.md','rst')
except (IOError, ImportError):
with open('README.md') as f:
long_description = f.read()
setup(
name='wonderful_bing',
version=wonderful_bing.__version__,
description="A script download Bing's img and set as wallpaper",
long_description=long_description,
url='https://github.com/lord63/wonderful_bing',
author='lord63',
author_email='lord63.j@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
keywords='bing wallpaper',
packages=['wonderful_bing'],
install_requires=['requests'],
include_package_data=True,
entry_points={
'console_scripts': [
'wonderful_bing=wonderful_bing.wonderful_bing:main']
}
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
from wonderful_bing import wonderful_bing
try:
import pypandoc
long_description = pypandoc.convert('README.md','rst')
except (IOError, ImportError):
with open('README.md') as f:
long_description = f.read()
setup(
name='wonderful_bing',
version=wonderful_bing.__version__,
description="A script download Bing's img and set as wallpaper",
long_description=long_description,
url='https://github.com/lord63/wonderful_bing',
author='lord63',
author_email='lord63.j@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
keywords='bing wallpaper',
packages=['wonderful_bing'],
install_requires=['requests'],
include_package_data=True,
entry_points={
'console_scripts': [
'bing=wonderful_bing.wonderful_bing:main']
}
)
| Change the command: wonderful_bing -> bing. | Change the command: wonderful_bing -> bing.
Close #15
| Python | mit | lord63/wonderful_bing | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
from wonderful_bing import wonderful_bing
try:
import pypandoc
long_description = pypandoc.convert('README.md','rst')
except (IOError, ImportError):
with open('README.md') as f:
long_description = f.read()
setup(
name='wonderful_bing',
version=wonderful_bing.__version__,
description="A script download Bing's img and set as wallpaper",
long_description=long_description,
url='https://github.com/lord63/wonderful_bing',
author='lord63',
author_email='lord63.j@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
keywords='bing wallpaper',
packages=['wonderful_bing'],
install_requires=['requests'],
include_package_data=True,
entry_points={
'console_scripts': [
'wonderful_bing=wonderful_bing.wonderful_bing:main']
}
)
Change the command: wonderful_bing -> bing.
Close #15 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
from wonderful_bing import wonderful_bing
try:
import pypandoc
long_description = pypandoc.convert('README.md','rst')
except (IOError, ImportError):
with open('README.md') as f:
long_description = f.read()
setup(
name='wonderful_bing',
version=wonderful_bing.__version__,
description="A script download Bing's img and set as wallpaper",
long_description=long_description,
url='https://github.com/lord63/wonderful_bing',
author='lord63',
author_email='lord63.j@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
keywords='bing wallpaper',
packages=['wonderful_bing'],
install_requires=['requests'],
include_package_data=True,
entry_points={
'console_scripts': [
'bing=wonderful_bing.wonderful_bing:main']
}
)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
from wonderful_bing import wonderful_bing
try:
import pypandoc
long_description = pypandoc.convert('README.md','rst')
except (IOError, ImportError):
with open('README.md') as f:
long_description = f.read()
setup(
name='wonderful_bing',
version=wonderful_bing.__version__,
description="A script download Bing's img and set as wallpaper",
long_description=long_description,
url='https://github.com/lord63/wonderful_bing',
author='lord63',
author_email='lord63.j@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
keywords='bing wallpaper',
packages=['wonderful_bing'],
install_requires=['requests'],
include_package_data=True,
entry_points={
'console_scripts': [
'wonderful_bing=wonderful_bing.wonderful_bing:main']
}
)
<commit_msg>Change the command: wonderful_bing -> bing.
Close #15<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
from wonderful_bing import wonderful_bing
try:
import pypandoc
long_description = pypandoc.convert('README.md','rst')
except (IOError, ImportError):
with open('README.md') as f:
long_description = f.read()
setup(
name='wonderful_bing',
version=wonderful_bing.__version__,
description="A script download Bing's img and set as wallpaper",
long_description=long_description,
url='https://github.com/lord63/wonderful_bing',
author='lord63',
author_email='lord63.j@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
keywords='bing wallpaper',
packages=['wonderful_bing'],
install_requires=['requests'],
include_package_data=True,
entry_points={
'console_scripts': [
'bing=wonderful_bing.wonderful_bing:main']
}
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
from wonderful_bing import wonderful_bing
try:
import pypandoc
long_description = pypandoc.convert('README.md','rst')
except (IOError, ImportError):
with open('README.md') as f:
long_description = f.read()
setup(
name='wonderful_bing',
version=wonderful_bing.__version__,
description="A script download Bing's img and set as wallpaper",
long_description=long_description,
url='https://github.com/lord63/wonderful_bing',
author='lord63',
author_email='lord63.j@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
keywords='bing wallpaper',
packages=['wonderful_bing'],
install_requires=['requests'],
include_package_data=True,
entry_points={
'console_scripts': [
'wonderful_bing=wonderful_bing.wonderful_bing:main']
}
)
Change the command: wonderful_bing -> bing.
Close #15#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
from wonderful_bing import wonderful_bing
try:
import pypandoc
long_description = pypandoc.convert('README.md','rst')
except (IOError, ImportError):
with open('README.md') as f:
long_description = f.read()
setup(
name='wonderful_bing',
version=wonderful_bing.__version__,
description="A script download Bing's img and set as wallpaper",
long_description=long_description,
url='https://github.com/lord63/wonderful_bing',
author='lord63',
author_email='lord63.j@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
keywords='bing wallpaper',
packages=['wonderful_bing'],
install_requires=['requests'],
include_package_data=True,
entry_points={
'console_scripts': [
'bing=wonderful_bing.wonderful_bing:main']
}
)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
from wonderful_bing import wonderful_bing
try:
import pypandoc
long_description = pypandoc.convert('README.md','rst')
except (IOError, ImportError):
with open('README.md') as f:
long_description = f.read()
setup(
name='wonderful_bing',
version=wonderful_bing.__version__,
description="A script download Bing's img and set as wallpaper",
long_description=long_description,
url='https://github.com/lord63/wonderful_bing',
author='lord63',
author_email='lord63.j@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
keywords='bing wallpaper',
packages=['wonderful_bing'],
install_requires=['requests'],
include_package_data=True,
entry_points={
'console_scripts': [
'wonderful_bing=wonderful_bing.wonderful_bing:main']
}
)
<commit_msg>Change the command: wonderful_bing -> bing.
Close #15<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
from wonderful_bing import wonderful_bing
try:
import pypandoc
long_description = pypandoc.convert('README.md','rst')
except (IOError, ImportError):
with open('README.md') as f:
long_description = f.read()
setup(
name='wonderful_bing',
version=wonderful_bing.__version__,
description="A script download Bing's img and set as wallpaper",
long_description=long_description,
url='https://github.com/lord63/wonderful_bing',
author='lord63',
author_email='lord63.j@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
keywords='bing wallpaper',
packages=['wonderful_bing'],
install_requires=['requests'],
include_package_data=True,
entry_points={
'console_scripts': [
'bing=wonderful_bing.wonderful_bing:main']
}
)
|
4c0bbbe3072f3ae373cd1357daec7d064ec22dd2 | setup.py | setup.py | from setuptools import setup
setup(name='tfr',
version='0.1',
description='Time-frequency reassigned spectrograms',
url='http://github.com/bzamecnik/tfr',
author='Bohumir Zamecnik',
author_email='bohumir.zamecnik@gmail.com',
license='MIT',
packages=['tfr'],
zip_safe=False,
install_requires=[
'numpy',
'scikit-learn',
'scipy',
'soundfile',
],
setup_requires=['setuptools-markdown'],
long_description_markdown_filename='README.md',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Topic :: Multimedia :: Sound/Audio :: Analysis',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
])
| from setuptools import setup
setup(name='tfr',
version='0.2',
description='Time-frequency reassigned spectrograms',
url='http://github.com/bzamecnik/tfr',
author='Bohumir Zamecnik',
author_email='bohumir.zamecnik@gmail.com',
license='MIT',
packages=['tfr'],
zip_safe=False,
install_requires=[
'numpy',
'scikit-learn',
'scipy',
'soundfile',
],
setup_requires=['setuptools-markdown'],
long_description_markdown_filename='README.md',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Topic :: Multimedia :: Sound/Audio :: Analysis',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
])
| Increment the version to 0.2. | Increment the version to 0.2.
| Python | mit | bzamecnik/tfr,bzamecnik/tfr | from setuptools import setup
setup(name='tfr',
version='0.1',
description='Time-frequency reassigned spectrograms',
url='http://github.com/bzamecnik/tfr',
author='Bohumir Zamecnik',
author_email='bohumir.zamecnik@gmail.com',
license='MIT',
packages=['tfr'],
zip_safe=False,
install_requires=[
'numpy',
'scikit-learn',
'scipy',
'soundfile',
],
setup_requires=['setuptools-markdown'],
long_description_markdown_filename='README.md',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Topic :: Multimedia :: Sound/Audio :: Analysis',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
])
Increment the version to 0.2. | from setuptools import setup
setup(name='tfr',
version='0.2',
description='Time-frequency reassigned spectrograms',
url='http://github.com/bzamecnik/tfr',
author='Bohumir Zamecnik',
author_email='bohumir.zamecnik@gmail.com',
license='MIT',
packages=['tfr'],
zip_safe=False,
install_requires=[
'numpy',
'scikit-learn',
'scipy',
'soundfile',
],
setup_requires=['setuptools-markdown'],
long_description_markdown_filename='README.md',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Topic :: Multimedia :: Sound/Audio :: Analysis',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
])
| <commit_before>from setuptools import setup
setup(name='tfr',
version='0.1',
description='Time-frequency reassigned spectrograms',
url='http://github.com/bzamecnik/tfr',
author='Bohumir Zamecnik',
author_email='bohumir.zamecnik@gmail.com',
license='MIT',
packages=['tfr'],
zip_safe=False,
install_requires=[
'numpy',
'scikit-learn',
'scipy',
'soundfile',
],
setup_requires=['setuptools-markdown'],
long_description_markdown_filename='README.md',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Topic :: Multimedia :: Sound/Audio :: Analysis',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
])
<commit_msg>Increment the version to 0.2.<commit_after> | from setuptools import setup
setup(name='tfr',
version='0.2',
description='Time-frequency reassigned spectrograms',
url='http://github.com/bzamecnik/tfr',
author='Bohumir Zamecnik',
author_email='bohumir.zamecnik@gmail.com',
license='MIT',
packages=['tfr'],
zip_safe=False,
install_requires=[
'numpy',
'scikit-learn',
'scipy',
'soundfile',
],
setup_requires=['setuptools-markdown'],
long_description_markdown_filename='README.md',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Topic :: Multimedia :: Sound/Audio :: Analysis',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
])
| from setuptools import setup
setup(name='tfr',
version='0.1',
description='Time-frequency reassigned spectrograms',
url='http://github.com/bzamecnik/tfr',
author='Bohumir Zamecnik',
author_email='bohumir.zamecnik@gmail.com',
license='MIT',
packages=['tfr'],
zip_safe=False,
install_requires=[
'numpy',
'scikit-learn',
'scipy',
'soundfile',
],
setup_requires=['setuptools-markdown'],
long_description_markdown_filename='README.md',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Topic :: Multimedia :: Sound/Audio :: Analysis',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
])
Increment the version to 0.2.from setuptools import setup
setup(name='tfr',
version='0.2',
description='Time-frequency reassigned spectrograms',
url='http://github.com/bzamecnik/tfr',
author='Bohumir Zamecnik',
author_email='bohumir.zamecnik@gmail.com',
license='MIT',
packages=['tfr'],
zip_safe=False,
install_requires=[
'numpy',
'scikit-learn',
'scipy',
'soundfile',
],
setup_requires=['setuptools-markdown'],
long_description_markdown_filename='README.md',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Topic :: Multimedia :: Sound/Audio :: Analysis',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
])
| <commit_before>from setuptools import setup
setup(name='tfr',
version='0.1',
description='Time-frequency reassigned spectrograms',
url='http://github.com/bzamecnik/tfr',
author='Bohumir Zamecnik',
author_email='bohumir.zamecnik@gmail.com',
license='MIT',
packages=['tfr'],
zip_safe=False,
install_requires=[
'numpy',
'scikit-learn',
'scipy',
'soundfile',
],
setup_requires=['setuptools-markdown'],
long_description_markdown_filename='README.md',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Topic :: Multimedia :: Sound/Audio :: Analysis',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
])
<commit_msg>Increment the version to 0.2.<commit_after>from setuptools import setup
setup(name='tfr',
version='0.2',
description='Time-frequency reassigned spectrograms',
url='http://github.com/bzamecnik/tfr',
author='Bohumir Zamecnik',
author_email='bohumir.zamecnik@gmail.com',
license='MIT',
packages=['tfr'],
zip_safe=False,
install_requires=[
'numpy',
'scikit-learn',
'scipy',
'soundfile',
],
setup_requires=['setuptools-markdown'],
long_description_markdown_filename='README.md',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Topic :: Multimedia :: Sound/Audio :: Analysis',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
])
|
9629f665d11131e045d366a828a356f3e8916492 | setup.py | setup.py | from os.path import abspath, dirname, join, normpath
from setuptools import setup
setup(
# Basic package information:
name = 'django-heroku-postgresify',
version = '0.4',
py_modules = ('postgresify',),
# Packaging options:
zip_safe = False,
include_package_data = True,
# Package dependencies:
install_requires = ['Django>=1.2', 'dj-database-url>=0.3.0'],
# Metadata for PyPI:
author = 'Randall Degges',
author_email = 'rdegges@gmail.com',
license = 'UNLICENSE',
url = 'https://github.com/rdegges/django-heroku-postgresify',
keywords = 'django heroku cloud postgresql postgres db database awesome epic',
description = 'Automatic Django database configuration on Heroku.',
long_description = open(normpath(join(dirname(abspath(__file__)),
'README.md'))).read()
)
| from os.path import abspath, dirname, join, normpath
from setuptools import setup
setup(
# Basic package information:
name = 'django-heroku-postgresify',
version = '0.4',
py_modules = ('postgresify',),
# Packaging options:
zip_safe = False,
include_package_data = True,
# Package dependencies:
install_requires = ['Django>=1.2', 'dj-database-url>=0.3.0'],
# Metadata for PyPI:
author = 'Randall Degges',
author_email = 'rdegges@gmail.com',
license = 'UNLICENSE',
url = 'https://github.com/rdegges/django-heroku-postgresify',
keywords = 'django heroku cloud postgresql postgres db database awesome epic',
description = 'Automatic Django database configuration on Heroku.',
long_description = open(normpath(join(dirname(abspath(__file__)),
'README.md'))).read(),
long_description_content_type='text/markdown'
)
| Fix PyPI README.MD showing problem. | Fix PyPI README.MD showing problem.
There is a problem in the project's PyPI page. To fix this I added the following line in the setup.py file:
```python
long_description_content_type='text/markdown'
``` | Python | unlicense | rdegges/django-heroku-postgresify | from os.path import abspath, dirname, join, normpath
from setuptools import setup
setup(
# Basic package information:
name = 'django-heroku-postgresify',
version = '0.4',
py_modules = ('postgresify',),
# Packaging options:
zip_safe = False,
include_package_data = True,
# Package dependencies:
install_requires = ['Django>=1.2', 'dj-database-url>=0.3.0'],
# Metadata for PyPI:
author = 'Randall Degges',
author_email = 'rdegges@gmail.com',
license = 'UNLICENSE',
url = 'https://github.com/rdegges/django-heroku-postgresify',
keywords = 'django heroku cloud postgresql postgres db database awesome epic',
description = 'Automatic Django database configuration on Heroku.',
long_description = open(normpath(join(dirname(abspath(__file__)),
'README.md'))).read()
)
Fix PyPI README.MD showing problem.
There is a problem in the project's PyPI page. To fix this I added the following line in the setup.py file:
```python
long_description_content_type='text/markdown'
``` | from os.path import abspath, dirname, join, normpath
from setuptools import setup
setup(
# Basic package information:
name = 'django-heroku-postgresify',
version = '0.4',
py_modules = ('postgresify',),
# Packaging options:
zip_safe = False,
include_package_data = True,
# Package dependencies:
install_requires = ['Django>=1.2', 'dj-database-url>=0.3.0'],
# Metadata for PyPI:
author = 'Randall Degges',
author_email = 'rdegges@gmail.com',
license = 'UNLICENSE',
url = 'https://github.com/rdegges/django-heroku-postgresify',
keywords = 'django heroku cloud postgresql postgres db database awesome epic',
description = 'Automatic Django database configuration on Heroku.',
long_description = open(normpath(join(dirname(abspath(__file__)),
'README.md'))).read(),
long_description_content_type='text/markdown'
)
| <commit_before>from os.path import abspath, dirname, join, normpath
from setuptools import setup
setup(
# Basic package information:
name = 'django-heroku-postgresify',
version = '0.4',
py_modules = ('postgresify',),
# Packaging options:
zip_safe = False,
include_package_data = True,
# Package dependencies:
install_requires = ['Django>=1.2', 'dj-database-url>=0.3.0'],
# Metadata for PyPI:
author = 'Randall Degges',
author_email = 'rdegges@gmail.com',
license = 'UNLICENSE',
url = 'https://github.com/rdegges/django-heroku-postgresify',
keywords = 'django heroku cloud postgresql postgres db database awesome epic',
description = 'Automatic Django database configuration on Heroku.',
long_description = open(normpath(join(dirname(abspath(__file__)),
'README.md'))).read()
)
<commit_msg>Fix PyPI README.MD showing problem.
There is a problem in the project's PyPI page. To fix this I added the following line in the setup.py file:
```python
long_description_content_type='text/markdown'
```<commit_after> | from os.path import abspath, dirname, join, normpath
from setuptools import setup
setup(
# Basic package information:
name = 'django-heroku-postgresify',
version = '0.4',
py_modules = ('postgresify',),
# Packaging options:
zip_safe = False,
include_package_data = True,
# Package dependencies:
install_requires = ['Django>=1.2', 'dj-database-url>=0.3.0'],
# Metadata for PyPI:
author = 'Randall Degges',
author_email = 'rdegges@gmail.com',
license = 'UNLICENSE',
url = 'https://github.com/rdegges/django-heroku-postgresify',
keywords = 'django heroku cloud postgresql postgres db database awesome epic',
description = 'Automatic Django database configuration on Heroku.',
long_description = open(normpath(join(dirname(abspath(__file__)),
'README.md'))).read(),
long_description_content_type='text/markdown'
)
| from os.path import abspath, dirname, join, normpath
from setuptools import setup
setup(
# Basic package information:
name = 'django-heroku-postgresify',
version = '0.4',
py_modules = ('postgresify',),
# Packaging options:
zip_safe = False,
include_package_data = True,
# Package dependencies:
install_requires = ['Django>=1.2', 'dj-database-url>=0.3.0'],
# Metadata for PyPI:
author = 'Randall Degges',
author_email = 'rdegges@gmail.com',
license = 'UNLICENSE',
url = 'https://github.com/rdegges/django-heroku-postgresify',
keywords = 'django heroku cloud postgresql postgres db database awesome epic',
description = 'Automatic Django database configuration on Heroku.',
long_description = open(normpath(join(dirname(abspath(__file__)),
'README.md'))).read()
)
Fix PyPI README.MD showing problem.
There is a problem in the project's PyPI page. To fix this I added the following line in the setup.py file:
```python
long_description_content_type='text/markdown'
```from os.path import abspath, dirname, join, normpath
from setuptools import setup
setup(
# Basic package information:
name = 'django-heroku-postgresify',
version = '0.4',
py_modules = ('postgresify',),
# Packaging options:
zip_safe = False,
include_package_data = True,
# Package dependencies:
install_requires = ['Django>=1.2', 'dj-database-url>=0.3.0'],
# Metadata for PyPI:
author = 'Randall Degges',
author_email = 'rdegges@gmail.com',
license = 'UNLICENSE',
url = 'https://github.com/rdegges/django-heroku-postgresify',
keywords = 'django heroku cloud postgresql postgres db database awesome epic',
description = 'Automatic Django database configuration on Heroku.',
long_description = open(normpath(join(dirname(abspath(__file__)),
'README.md'))).read(),
long_description_content_type='text/markdown'
)
| <commit_before>from os.path import abspath, dirname, join, normpath
from setuptools import setup
setup(
# Basic package information:
name = 'django-heroku-postgresify',
version = '0.4',
py_modules = ('postgresify',),
# Packaging options:
zip_safe = False,
include_package_data = True,
# Package dependencies:
install_requires = ['Django>=1.2', 'dj-database-url>=0.3.0'],
# Metadata for PyPI:
author = 'Randall Degges',
author_email = 'rdegges@gmail.com',
license = 'UNLICENSE',
url = 'https://github.com/rdegges/django-heroku-postgresify',
keywords = 'django heroku cloud postgresql postgres db database awesome epic',
description = 'Automatic Django database configuration on Heroku.',
long_description = open(normpath(join(dirname(abspath(__file__)),
'README.md'))).read()
)
<commit_msg>Fix PyPI README.MD showing problem.
There is a problem in the project's PyPI page. To fix this I added the following line in the setup.py file:
```python
long_description_content_type='text/markdown'
```<commit_after>from os.path import abspath, dirname, join, normpath
from setuptools import setup
setup(
# Basic package information:
name = 'django-heroku-postgresify',
version = '0.4',
py_modules = ('postgresify',),
# Packaging options:
zip_safe = False,
include_package_data = True,
# Package dependencies:
install_requires = ['Django>=1.2', 'dj-database-url>=0.3.0'],
# Metadata for PyPI:
author = 'Randall Degges',
author_email = 'rdegges@gmail.com',
license = 'UNLICENSE',
url = 'https://github.com/rdegges/django-heroku-postgresify',
keywords = 'django heroku cloud postgresql postgres db database awesome epic',
description = 'Automatic Django database configuration on Heroku.',
long_description = open(normpath(join(dirname(abspath(__file__)),
'README.md'))).read(),
long_description_content_type='text/markdown'
)
|
385b280ccad9385d24d2ad3f892718c8302f8718 | setup.py | setup.py | #!/usr/bin/env python3
# encoding: utf-8
from setuptools import setup, find_packages
setup(
name='pyatv',
version='0.1.4',
license='MIT',
url='https://github.com/postlund/pyatv',
author='Pierre Ståhl',
author_email='pierre.staahl@gmail.com',
description='Library for controlling an Apple TV',
packages=find_packages(exclude=['tests', 'tests.*']),
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=[
'aiohttp==1.3.1',
'zeroconf==0.18.0',
],
test_suite='tests',
keywords=['apple', 'tv'],
tests_require=['tox'],
entry_points={
'console_scripts': [
'atvremote = pyatv.__main__:main'
]
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Libraries',
'Topic :: Home Automation',
],
)
| #!/usr/bin/env python3
# encoding: utf-8
from setuptools import setup, find_packages
setup(
name='pyatv',
version='0.1.5.dev1',
license='MIT',
url='https://github.com/postlund/pyatv',
author='Pierre Ståhl',
author_email='pierre.staahl@gmail.com',
description='Library for controlling an Apple TV',
packages=find_packages(exclude=['tests', 'tests.*']),
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=[
'aiohttp==1.3.1',
'zeroconf==0.18.0',
],
test_suite='tests',
keywords=['apple', 'tv'],
tests_require=['tox'],
entry_points={
'console_scripts': [
'atvremote = pyatv.__main__:main'
]
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Libraries',
'Topic :: Home Automation',
],
)
| Bump to next dev 0.1.5.dev1 | Bump to next dev 0.1.5.dev1
| Python | mit | postlund/pyatv,postlund/pyatv | #!/usr/bin/env python3
# encoding: utf-8
from setuptools import setup, find_packages
setup(
name='pyatv',
version='0.1.4',
license='MIT',
url='https://github.com/postlund/pyatv',
author='Pierre Ståhl',
author_email='pierre.staahl@gmail.com',
description='Library for controlling an Apple TV',
packages=find_packages(exclude=['tests', 'tests.*']),
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=[
'aiohttp==1.3.1',
'zeroconf==0.18.0',
],
test_suite='tests',
keywords=['apple', 'tv'],
tests_require=['tox'],
entry_points={
'console_scripts': [
'atvremote = pyatv.__main__:main'
]
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Libraries',
'Topic :: Home Automation',
],
)
Bump to next dev 0.1.5.dev1 | #!/usr/bin/env python3
# encoding: utf-8
from setuptools import setup, find_packages
setup(
name='pyatv',
version='0.1.5.dev1',
license='MIT',
url='https://github.com/postlund/pyatv',
author='Pierre Ståhl',
author_email='pierre.staahl@gmail.com',
description='Library for controlling an Apple TV',
packages=find_packages(exclude=['tests', 'tests.*']),
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=[
'aiohttp==1.3.1',
'zeroconf==0.18.0',
],
test_suite='tests',
keywords=['apple', 'tv'],
tests_require=['tox'],
entry_points={
'console_scripts': [
'atvremote = pyatv.__main__:main'
]
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Libraries',
'Topic :: Home Automation',
],
)
| <commit_before>#!/usr/bin/env python3
# encoding: utf-8
from setuptools import setup, find_packages
setup(
name='pyatv',
version='0.1.4',
license='MIT',
url='https://github.com/postlund/pyatv',
author='Pierre Ståhl',
author_email='pierre.staahl@gmail.com',
description='Library for controlling an Apple TV',
packages=find_packages(exclude=['tests', 'tests.*']),
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=[
'aiohttp==1.3.1',
'zeroconf==0.18.0',
],
test_suite='tests',
keywords=['apple', 'tv'],
tests_require=['tox'],
entry_points={
'console_scripts': [
'atvremote = pyatv.__main__:main'
]
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Libraries',
'Topic :: Home Automation',
],
)
<commit_msg>Bump to next dev 0.1.5.dev1<commit_after> | #!/usr/bin/env python3
# encoding: utf-8
from setuptools import setup, find_packages
setup(
name='pyatv',
version='0.1.5.dev1',
license='MIT',
url='https://github.com/postlund/pyatv',
author='Pierre Ståhl',
author_email='pierre.staahl@gmail.com',
description='Library for controlling an Apple TV',
packages=find_packages(exclude=['tests', 'tests.*']),
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=[
'aiohttp==1.3.1',
'zeroconf==0.18.0',
],
test_suite='tests',
keywords=['apple', 'tv'],
tests_require=['tox'],
entry_points={
'console_scripts': [
'atvremote = pyatv.__main__:main'
]
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Libraries',
'Topic :: Home Automation',
],
)
| #!/usr/bin/env python3
# encoding: utf-8
from setuptools import setup, find_packages
setup(
name='pyatv',
version='0.1.4',
license='MIT',
url='https://github.com/postlund/pyatv',
author='Pierre Ståhl',
author_email='pierre.staahl@gmail.com',
description='Library for controlling an Apple TV',
packages=find_packages(exclude=['tests', 'tests.*']),
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=[
'aiohttp==1.3.1',
'zeroconf==0.18.0',
],
test_suite='tests',
keywords=['apple', 'tv'],
tests_require=['tox'],
entry_points={
'console_scripts': [
'atvremote = pyatv.__main__:main'
]
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Libraries',
'Topic :: Home Automation',
],
)
Bump to next dev 0.1.5.dev1#!/usr/bin/env python3
# encoding: utf-8
from setuptools import setup, find_packages
setup(
name='pyatv',
version='0.1.5.dev1',
license='MIT',
url='https://github.com/postlund/pyatv',
author='Pierre Ståhl',
author_email='pierre.staahl@gmail.com',
description='Library for controlling an Apple TV',
packages=find_packages(exclude=['tests', 'tests.*']),
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=[
'aiohttp==1.3.1',
'zeroconf==0.18.0',
],
test_suite='tests',
keywords=['apple', 'tv'],
tests_require=['tox'],
entry_points={
'console_scripts': [
'atvremote = pyatv.__main__:main'
]
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Libraries',
'Topic :: Home Automation',
],
)
| <commit_before>#!/usr/bin/env python3
# encoding: utf-8
from setuptools import setup, find_packages
setup(
name='pyatv',
version='0.1.4',
license='MIT',
url='https://github.com/postlund/pyatv',
author='Pierre Ståhl',
author_email='pierre.staahl@gmail.com',
description='Library for controlling an Apple TV',
packages=find_packages(exclude=['tests', 'tests.*']),
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=[
'aiohttp==1.3.1',
'zeroconf==0.18.0',
],
test_suite='tests',
keywords=['apple', 'tv'],
tests_require=['tox'],
entry_points={
'console_scripts': [
'atvremote = pyatv.__main__:main'
]
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Libraries',
'Topic :: Home Automation',
],
)
<commit_msg>Bump to next dev 0.1.5.dev1<commit_after>#!/usr/bin/env python3
# encoding: utf-8
from setuptools import setup, find_packages
setup(
name='pyatv',
version='0.1.5.dev1',
license='MIT',
url='https://github.com/postlund/pyatv',
author='Pierre Ståhl',
author_email='pierre.staahl@gmail.com',
description='Library for controlling an Apple TV',
packages=find_packages(exclude=['tests', 'tests.*']),
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=[
'aiohttp==1.3.1',
'zeroconf==0.18.0',
],
test_suite='tests',
keywords=['apple', 'tv'],
tests_require=['tox'],
entry_points={
'console_scripts': [
'atvremote = pyatv.__main__:main'
]
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Libraries',
'Topic :: Home Automation',
],
)
|
31cf6e14f42b2703e410426e7f84ffc49457cbbb | setup.py | setup.py | #!/usr/bin/env python3
from setuptools import setup
setup(
name="Pelops",
version="0.1",
description="Car re-identification via deep learning",
url="https://www.python.org/sigs/distutils-sig/",
author="Lab41",
author_email="lab41@iqt.org",
license="Apache Software License",
packages=["pelops"],
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: Apache Software License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
],
keywords=[
"computer vision",
"deep learning",
"resnet",
"vehicle re-identification",
],
)
| #!/usr/bin/env python3
from setuptools import find_package
from setuptools import setup
setup(
name="Pelops",
version="0.1.1",
description="Car re-identification via deep learning",
url="https://www.python.org/sigs/distutils-sig/",
author="Lab41",
author_email="lab41@iqt.org",
license="Apache Software License",
packages=find_package(),
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: Apache Software License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
],
keywords=[
"computer vision",
"deep learning",
"resnet",
"vehicle re-identification",
],
)
| Add automatic search for installable packages | Add automatic search for installable packages
| Python | apache-2.0 | dave-lab41/pelops,dave-lab41/pelops,Lab41/pelops,Lab41/pelops | #!/usr/bin/env python3
from setuptools import setup
setup(
name="Pelops",
version="0.1",
description="Car re-identification via deep learning",
url="https://www.python.org/sigs/distutils-sig/",
author="Lab41",
author_email="lab41@iqt.org",
license="Apache Software License",
packages=["pelops"],
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: Apache Software License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
],
keywords=[
"computer vision",
"deep learning",
"resnet",
"vehicle re-identification",
],
)
Add automatic search for installable packages | #!/usr/bin/env python3
from setuptools import find_package
from setuptools import setup
setup(
name="Pelops",
version="0.1.1",
description="Car re-identification via deep learning",
url="https://www.python.org/sigs/distutils-sig/",
author="Lab41",
author_email="lab41@iqt.org",
license="Apache Software License",
packages=find_package(),
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: Apache Software License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
],
keywords=[
"computer vision",
"deep learning",
"resnet",
"vehicle re-identification",
],
)
| <commit_before>#!/usr/bin/env python3
from setuptools import setup
setup(
name="Pelops",
version="0.1",
description="Car re-identification via deep learning",
url="https://www.python.org/sigs/distutils-sig/",
author="Lab41",
author_email="lab41@iqt.org",
license="Apache Software License",
packages=["pelops"],
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: Apache Software License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
],
keywords=[
"computer vision",
"deep learning",
"resnet",
"vehicle re-identification",
],
)
<commit_msg>Add automatic search for installable packages<commit_after> | #!/usr/bin/env python3
from setuptools import find_package
from setuptools import setup
setup(
name="Pelops",
version="0.1.1",
description="Car re-identification via deep learning",
url="https://www.python.org/sigs/distutils-sig/",
author="Lab41",
author_email="lab41@iqt.org",
license="Apache Software License",
packages=find_package(),
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: Apache Software License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
],
keywords=[
"computer vision",
"deep learning",
"resnet",
"vehicle re-identification",
],
)
| #!/usr/bin/env python3
from setuptools import setup
setup(
name="Pelops",
version="0.1",
description="Car re-identification via deep learning",
url="https://www.python.org/sigs/distutils-sig/",
author="Lab41",
author_email="lab41@iqt.org",
license="Apache Software License",
packages=["pelops"],
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: Apache Software License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
],
keywords=[
"computer vision",
"deep learning",
"resnet",
"vehicle re-identification",
],
)
Add automatic search for installable packages#!/usr/bin/env python3
from setuptools import find_package
from setuptools import setup
setup(
name="Pelops",
version="0.1.1",
description="Car re-identification via deep learning",
url="https://www.python.org/sigs/distutils-sig/",
author="Lab41",
author_email="lab41@iqt.org",
license="Apache Software License",
packages=find_package(),
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: Apache Software License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
],
keywords=[
"computer vision",
"deep learning",
"resnet",
"vehicle re-identification",
],
)
| <commit_before>#!/usr/bin/env python3
from setuptools import setup
setup(
name="Pelops",
version="0.1",
description="Car re-identification via deep learning",
url="https://www.python.org/sigs/distutils-sig/",
author="Lab41",
author_email="lab41@iqt.org",
license="Apache Software License",
packages=["pelops"],
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: Apache Software License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
],
keywords=[
"computer vision",
"deep learning",
"resnet",
"vehicle re-identification",
],
)
<commit_msg>Add automatic search for installable packages<commit_after>#!/usr/bin/env python3
from setuptools import find_package
from setuptools import setup
setup(
name="Pelops",
version="0.1.1",
description="Car re-identification via deep learning",
url="https://www.python.org/sigs/distutils-sig/",
author="Lab41",
author_email="lab41@iqt.org",
license="Apache Software License",
packages=find_package(),
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: Apache Software License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
],
keywords=[
"computer vision",
"deep learning",
"resnet",
"vehicle re-identification",
],
)
|
d81f621552b3cc1bb081faf491728ba59e0d1075 | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup, find_packages
README = 'README.md'
def long_desc():
try:
import pypandoc
except ImportError:
with open(README) as f:
return f.read()
else:
return pypandoc.convert(README, 'rst')
setup(
name='ecmcli',
version='0.0.2',
description='Command Line Client for Cradlepoint ECM',
author='Justin Mayfield',
author_email='tooker@gmail.com',
url='https://github.com/mayfield/ecmcli/',
license='MIT',
long_description=long_desc(),
packages=find_packages(),
install_requires=[
'syndicate',
],
entry_points = {
'console_scripts': ['ecm=ecmcli.main:main'],
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
| #!/usr/bin/env python
from setuptools import setup, find_packages
README = 'README.md'
def long_desc():
try:
import pypandoc
except ImportError:
with open(README) as f:
return f.read()
else:
return pypandoc.convert(README, 'rst')
setup(
name='ecmcli',
version='0.0.2',
description='Command Line Client for Cradlepoint ECM',
author='Justin Mayfield',
author_email='tooker@gmail.com',
url='https://github.com/mayfield/ecmcli/',
license='MIT',
long_description=long_desc(),
packages=find_packages(),
install_requires=[
'syndicate',
'humanize'
],
entry_points = {
'console_scripts': ['ecm=ecmcli.main:main'],
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
| Add pip requirement for humanize. | Add pip requirement for humanize.
| Python | mit | mayfield/ecmcli | #!/usr/bin/env python
from setuptools import setup, find_packages
README = 'README.md'
def long_desc():
try:
import pypandoc
except ImportError:
with open(README) as f:
return f.read()
else:
return pypandoc.convert(README, 'rst')
setup(
name='ecmcli',
version='0.0.2',
description='Command Line Client for Cradlepoint ECM',
author='Justin Mayfield',
author_email='tooker@gmail.com',
url='https://github.com/mayfield/ecmcli/',
license='MIT',
long_description=long_desc(),
packages=find_packages(),
install_requires=[
'syndicate',
],
entry_points = {
'console_scripts': ['ecm=ecmcli.main:main'],
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
Add pip requirement for humanize. | #!/usr/bin/env python
from setuptools import setup, find_packages
README = 'README.md'
def long_desc():
try:
import pypandoc
except ImportError:
with open(README) as f:
return f.read()
else:
return pypandoc.convert(README, 'rst')
setup(
name='ecmcli',
version='0.0.2',
description='Command Line Client for Cradlepoint ECM',
author='Justin Mayfield',
author_email='tooker@gmail.com',
url='https://github.com/mayfield/ecmcli/',
license='MIT',
long_description=long_desc(),
packages=find_packages(),
install_requires=[
'syndicate',
'humanize'
],
entry_points = {
'console_scripts': ['ecm=ecmcli.main:main'],
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
| <commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
README = 'README.md'
def long_desc():
try:
import pypandoc
except ImportError:
with open(README) as f:
return f.read()
else:
return pypandoc.convert(README, 'rst')
setup(
name='ecmcli',
version='0.0.2',
description='Command Line Client for Cradlepoint ECM',
author='Justin Mayfield',
author_email='tooker@gmail.com',
url='https://github.com/mayfield/ecmcli/',
license='MIT',
long_description=long_desc(),
packages=find_packages(),
install_requires=[
'syndicate',
],
entry_points = {
'console_scripts': ['ecm=ecmcli.main:main'],
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
<commit_msg>Add pip requirement for humanize.<commit_after> | #!/usr/bin/env python
from setuptools import setup, find_packages
README = 'README.md'
def long_desc():
try:
import pypandoc
except ImportError:
with open(README) as f:
return f.read()
else:
return pypandoc.convert(README, 'rst')
setup(
name='ecmcli',
version='0.0.2',
description='Command Line Client for Cradlepoint ECM',
author='Justin Mayfield',
author_email='tooker@gmail.com',
url='https://github.com/mayfield/ecmcli/',
license='MIT',
long_description=long_desc(),
packages=find_packages(),
install_requires=[
'syndicate',
'humanize'
],
entry_points = {
'console_scripts': ['ecm=ecmcli.main:main'],
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
| #!/usr/bin/env python
from setuptools import setup, find_packages
README = 'README.md'
def long_desc():
try:
import pypandoc
except ImportError:
with open(README) as f:
return f.read()
else:
return pypandoc.convert(README, 'rst')
setup(
name='ecmcli',
version='0.0.2',
description='Command Line Client for Cradlepoint ECM',
author='Justin Mayfield',
author_email='tooker@gmail.com',
url='https://github.com/mayfield/ecmcli/',
license='MIT',
long_description=long_desc(),
packages=find_packages(),
install_requires=[
'syndicate',
],
entry_points = {
'console_scripts': ['ecm=ecmcli.main:main'],
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
Add pip requirement for humanize.#!/usr/bin/env python
from setuptools import setup, find_packages
README = 'README.md'
def long_desc():
try:
import pypandoc
except ImportError:
with open(README) as f:
return f.read()
else:
return pypandoc.convert(README, 'rst')
setup(
name='ecmcli',
version='0.0.2',
description='Command Line Client for Cradlepoint ECM',
author='Justin Mayfield',
author_email='tooker@gmail.com',
url='https://github.com/mayfield/ecmcli/',
license='MIT',
long_description=long_desc(),
packages=find_packages(),
install_requires=[
'syndicate',
'humanize'
],
entry_points = {
'console_scripts': ['ecm=ecmcli.main:main'],
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
| <commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
README = 'README.md'
def long_desc():
try:
import pypandoc
except ImportError:
with open(README) as f:
return f.read()
else:
return pypandoc.convert(README, 'rst')
setup(
name='ecmcli',
version='0.0.2',
description='Command Line Client for Cradlepoint ECM',
author='Justin Mayfield',
author_email='tooker@gmail.com',
url='https://github.com/mayfield/ecmcli/',
license='MIT',
long_description=long_desc(),
packages=find_packages(),
install_requires=[
'syndicate',
],
entry_points = {
'console_scripts': ['ecm=ecmcli.main:main'],
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
<commit_msg>Add pip requirement for humanize.<commit_after>#!/usr/bin/env python
from setuptools import setup, find_packages
README = 'README.md'
def long_desc():
try:
import pypandoc
except ImportError:
with open(README) as f:
return f.read()
else:
return pypandoc.convert(README, 'rst')
setup(
name='ecmcli',
version='0.0.2',
description='Command Line Client for Cradlepoint ECM',
author='Justin Mayfield',
author_email='tooker@gmail.com',
url='https://github.com/mayfield/ecmcli/',
license='MIT',
long_description=long_desc(),
packages=find_packages(),
install_requires=[
'syndicate',
'humanize'
],
entry_points = {
'console_scripts': ['ecm=ecmcli.main:main'],
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
|
9051a84130aabe97f9f83176be443d79825f3dda | setup.py | setup.py | # -*- coding: utf-8 -*-
#
# Warthog - Simple client for A10 load balancers
#
# Copyright 2014 Smarter Travel
#
# Available under the MIT license. See LICENSE for details.
#
import codecs
from setuptools import find_packages, setup
import warthog
AUTHOR = 'Smarter Travel'
EMAIL = ''
DESCRIPTION = 'Simple client for A10 load balancers'
URL = 'https://warthog.readthedocs.org/'
LICENSE = 'MIT'
CLASSIFIERS = [
"Development Status :: 2 - Pre-Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: System :: Installation/Setup",
"Topic :: System :: Software Distribution"
]
REQUIREMENTS = [
'requests'
]
with codecs.open('README.rst', 'r', 'utf-8') as handle:
LONG_DESCRIPTION = handle.read()
setup(
name='warthog',
version=warthog.__version__,
author=AUTHOR,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
author_email=EMAIL,
classifiers=CLASSIFIERS,
license=LICENSE,
url=URL,
install_requires=REQUIREMENTS,
zip_safe=True,
packages=find_packages())
| # -*- coding: utf-8 -*-
#
# Warthog - Simple client for A10 load balancers
#
# Copyright 2014 Smarter Travel
#
# Available under the MIT license. See LICENSE for details.
#
import codecs
from setuptools import find_packages, setup
import warthog
AUTHOR = 'Smarter Travel'
EMAIL = ''
DESCRIPTION = 'Simple client for A10 load balancers'
URL = 'https://github.com/smarter-travel-media/warthog'
LICENSE = 'MIT'
CLASSIFIERS = [
"Development Status :: 2 - Pre-Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: System :: Installation/Setup",
"Topic :: System :: Software Distribution"
]
REQUIREMENTS = [
'requests'
]
with codecs.open('README.rst', 'r', 'utf-8') as handle:
LONG_DESCRIPTION = handle.read()
setup(
name='warthog',
version=warthog.__version__,
author=AUTHOR,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
author_email=EMAIL,
classifiers=CLASSIFIERS,
license=LICENSE,
url=URL,
install_requires=REQUIREMENTS,
zip_safe=True,
packages=find_packages())
| Make github project page the project URL | Make github project page the project URL
| Python | mit | smarter-travel-media/warthog | # -*- coding: utf-8 -*-
#
# Warthog - Simple client for A10 load balancers
#
# Copyright 2014 Smarter Travel
#
# Available under the MIT license. See LICENSE for details.
#
import codecs
from setuptools import find_packages, setup
import warthog
AUTHOR = 'Smarter Travel'
EMAIL = ''
DESCRIPTION = 'Simple client for A10 load balancers'
URL = 'https://warthog.readthedocs.org/'
LICENSE = 'MIT'
CLASSIFIERS = [
"Development Status :: 2 - Pre-Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: System :: Installation/Setup",
"Topic :: System :: Software Distribution"
]
REQUIREMENTS = [
'requests'
]
with codecs.open('README.rst', 'r', 'utf-8') as handle:
LONG_DESCRIPTION = handle.read()
setup(
name='warthog',
version=warthog.__version__,
author=AUTHOR,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
author_email=EMAIL,
classifiers=CLASSIFIERS,
license=LICENSE,
url=URL,
install_requires=REQUIREMENTS,
zip_safe=True,
packages=find_packages())
Make github project page the project URL | # -*- coding: utf-8 -*-
#
# Warthog - Simple client for A10 load balancers
#
# Copyright 2014 Smarter Travel
#
# Available under the MIT license. See LICENSE for details.
#
import codecs
from setuptools import find_packages, setup
import warthog
AUTHOR = 'Smarter Travel'
EMAIL = ''
DESCRIPTION = 'Simple client for A10 load balancers'
URL = 'https://github.com/smarter-travel-media/warthog'
LICENSE = 'MIT'
CLASSIFIERS = [
"Development Status :: 2 - Pre-Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: System :: Installation/Setup",
"Topic :: System :: Software Distribution"
]
REQUIREMENTS = [
'requests'
]
with codecs.open('README.rst', 'r', 'utf-8') as handle:
LONG_DESCRIPTION = handle.read()
setup(
name='warthog',
version=warthog.__version__,
author=AUTHOR,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
author_email=EMAIL,
classifiers=CLASSIFIERS,
license=LICENSE,
url=URL,
install_requires=REQUIREMENTS,
zip_safe=True,
packages=find_packages())
| <commit_before># -*- coding: utf-8 -*-
#
# Warthog - Simple client for A10 load balancers
#
# Copyright 2014 Smarter Travel
#
# Available under the MIT license. See LICENSE for details.
#
import codecs
from setuptools import find_packages, setup
import warthog
AUTHOR = 'Smarter Travel'
EMAIL = ''
DESCRIPTION = 'Simple client for A10 load balancers'
URL = 'https://warthog.readthedocs.org/'
LICENSE = 'MIT'
CLASSIFIERS = [
"Development Status :: 2 - Pre-Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: System :: Installation/Setup",
"Topic :: System :: Software Distribution"
]
REQUIREMENTS = [
'requests'
]
with codecs.open('README.rst', 'r', 'utf-8') as handle:
LONG_DESCRIPTION = handle.read()
setup(
name='warthog',
version=warthog.__version__,
author=AUTHOR,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
author_email=EMAIL,
classifiers=CLASSIFIERS,
license=LICENSE,
url=URL,
install_requires=REQUIREMENTS,
zip_safe=True,
packages=find_packages())
<commit_msg>Make github project page the project URL<commit_after> | # -*- coding: utf-8 -*-
#
# Warthog - Simple client for A10 load balancers
#
# Copyright 2014 Smarter Travel
#
# Available under the MIT license. See LICENSE for details.
#
import codecs
from setuptools import find_packages, setup
import warthog
AUTHOR = 'Smarter Travel'
EMAIL = ''
DESCRIPTION = 'Simple client for A10 load balancers'
URL = 'https://github.com/smarter-travel-media/warthog'
LICENSE = 'MIT'
CLASSIFIERS = [
"Development Status :: 2 - Pre-Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: System :: Installation/Setup",
"Topic :: System :: Software Distribution"
]
REQUIREMENTS = [
'requests'
]
with codecs.open('README.rst', 'r', 'utf-8') as handle:
LONG_DESCRIPTION = handle.read()
setup(
name='warthog',
version=warthog.__version__,
author=AUTHOR,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
author_email=EMAIL,
classifiers=CLASSIFIERS,
license=LICENSE,
url=URL,
install_requires=REQUIREMENTS,
zip_safe=True,
packages=find_packages())
| # -*- coding: utf-8 -*-
#
# Warthog - Simple client for A10 load balancers
#
# Copyright 2014 Smarter Travel
#
# Available under the MIT license. See LICENSE for details.
#
import codecs
from setuptools import find_packages, setup
import warthog
AUTHOR = 'Smarter Travel'
EMAIL = ''
DESCRIPTION = 'Simple client for A10 load balancers'
URL = 'https://warthog.readthedocs.org/'
LICENSE = 'MIT'
CLASSIFIERS = [
"Development Status :: 2 - Pre-Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: System :: Installation/Setup",
"Topic :: System :: Software Distribution"
]
REQUIREMENTS = [
'requests'
]
with codecs.open('README.rst', 'r', 'utf-8') as handle:
LONG_DESCRIPTION = handle.read()
setup(
name='warthog',
version=warthog.__version__,
author=AUTHOR,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
author_email=EMAIL,
classifiers=CLASSIFIERS,
license=LICENSE,
url=URL,
install_requires=REQUIREMENTS,
zip_safe=True,
packages=find_packages())
Make github project page the project URL# -*- coding: utf-8 -*-
#
# Warthog - Simple client for A10 load balancers
#
# Copyright 2014 Smarter Travel
#
# Available under the MIT license. See LICENSE for details.
#
import codecs
from setuptools import find_packages, setup
import warthog
AUTHOR = 'Smarter Travel'
EMAIL = ''
DESCRIPTION = 'Simple client for A10 load balancers'
URL = 'https://github.com/smarter-travel-media/warthog'
LICENSE = 'MIT'
CLASSIFIERS = [
"Development Status :: 2 - Pre-Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: System :: Installation/Setup",
"Topic :: System :: Software Distribution"
]
REQUIREMENTS = [
'requests'
]
with codecs.open('README.rst', 'r', 'utf-8') as handle:
LONG_DESCRIPTION = handle.read()
setup(
name='warthog',
version=warthog.__version__,
author=AUTHOR,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
author_email=EMAIL,
classifiers=CLASSIFIERS,
license=LICENSE,
url=URL,
install_requires=REQUIREMENTS,
zip_safe=True,
packages=find_packages())
| <commit_before># -*- coding: utf-8 -*-
#
# Warthog - Simple client for A10 load balancers
#
# Copyright 2014 Smarter Travel
#
# Available under the MIT license. See LICENSE for details.
#
import codecs
from setuptools import find_packages, setup
import warthog
AUTHOR = 'Smarter Travel'
EMAIL = ''
DESCRIPTION = 'Simple client for A10 load balancers'
URL = 'https://warthog.readthedocs.org/'
LICENSE = 'MIT'
CLASSIFIERS = [
"Development Status :: 2 - Pre-Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: System :: Installation/Setup",
"Topic :: System :: Software Distribution"
]
REQUIREMENTS = [
'requests'
]
with codecs.open('README.rst', 'r', 'utf-8') as handle:
LONG_DESCRIPTION = handle.read()
setup(
name='warthog',
version=warthog.__version__,
author=AUTHOR,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
author_email=EMAIL,
classifiers=CLASSIFIERS,
license=LICENSE,
url=URL,
install_requires=REQUIREMENTS,
zip_safe=True,
packages=find_packages())
<commit_msg>Make github project page the project URL<commit_after># -*- coding: utf-8 -*-
#
# Warthog - Simple client for A10 load balancers
#
# Copyright 2014 Smarter Travel
#
# Available under the MIT license. See LICENSE for details.
#
import codecs
from setuptools import find_packages, setup
import warthog
AUTHOR = 'Smarter Travel'
EMAIL = ''
DESCRIPTION = 'Simple client for A10 load balancers'
URL = 'https://github.com/smarter-travel-media/warthog'
LICENSE = 'MIT'
CLASSIFIERS = [
"Development Status :: 2 - Pre-Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: System :: Installation/Setup",
"Topic :: System :: Software Distribution"
]
REQUIREMENTS = [
'requests'
]
with codecs.open('README.rst', 'r', 'utf-8') as handle:
LONG_DESCRIPTION = handle.read()
setup(
name='warthog',
version=warthog.__version__,
author=AUTHOR,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
author_email=EMAIL,
classifiers=CLASSIFIERS,
license=LICENSE,
url=URL,
install_requires=REQUIREMENTS,
zip_safe=True,
packages=find_packages())
|
089c1a2b8645d7e3db13f2c4a73cfcd3925dc4c2 | setup.py | setup.py | from setuptools import setup, find_packages
import sys, os
version = '0.1'
setup(name='arbeiter',
version=version,
description="An unassuming work queue system",
long_description="""\
A work queue system built using Kestrel""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='Eric Moritz',
author_email='eric@themoritzfamily.com',
url='',
license='BSD',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
# -*- Extra requirements: -*-
"pykestrel",
],
entry_points="""
# -*- Entry points: -*-
""",
)
| from setuptools import setup, find_packages
import sys, os
version = '0.0'
setup(name='arbeiter',
version=version,
description="An unassuming work queue system",
long_description="""\
A work queue system built using Kestrel""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='Eric Moritz',
author_email='eric@themoritzfamily.com',
url='',
license='BSD',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
# -*- Extra requirements: -*-
"pykestrel",
],
entry_points="""
# -*- Entry points: -*-
""",
)
| Set the version to 0.0 to let people know that this is ever changing | Set the version to 0.0 to let people know that this is ever changing
| Python | bsd-2-clause | ericmoritz/arbeiter | from setuptools import setup, find_packages
import sys, os
version = '0.1'
setup(name='arbeiter',
version=version,
description="An unassuming work queue system",
long_description="""\
A work queue system built using Kestrel""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='Eric Moritz',
author_email='eric@themoritzfamily.com',
url='',
license='BSD',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
# -*- Extra requirements: -*-
"pykestrel",
],
entry_points="""
# -*- Entry points: -*-
""",
)
Set the version to 0.0 to let people know that this is ever changing | from setuptools import setup, find_packages
import sys, os
version = '0.0'
setup(name='arbeiter',
version=version,
description="An unassuming work queue system",
long_description="""\
A work queue system built using Kestrel""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='Eric Moritz',
author_email='eric@themoritzfamily.com',
url='',
license='BSD',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
# -*- Extra requirements: -*-
"pykestrel",
],
entry_points="""
# -*- Entry points: -*-
""",
)
| <commit_before>from setuptools import setup, find_packages
import sys, os
version = '0.1'
setup(name='arbeiter',
version=version,
description="An unassuming work queue system",
long_description="""\
A work queue system built using Kestrel""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='Eric Moritz',
author_email='eric@themoritzfamily.com',
url='',
license='BSD',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
# -*- Extra requirements: -*-
"pykestrel",
],
entry_points="""
# -*- Entry points: -*-
""",
)
<commit_msg>Set the version to 0.0 to let people know that this is ever changing<commit_after> | from setuptools import setup, find_packages
import sys, os
version = '0.0'
setup(name='arbeiter',
version=version,
description="An unassuming work queue system",
long_description="""\
A work queue system built using Kestrel""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='Eric Moritz',
author_email='eric@themoritzfamily.com',
url='',
license='BSD',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
# -*- Extra requirements: -*-
"pykestrel",
],
entry_points="""
# -*- Entry points: -*-
""",
)
| from setuptools import setup, find_packages
import sys, os
version = '0.1'
setup(name='arbeiter',
version=version,
description="An unassuming work queue system",
long_description="""\
A work queue system built using Kestrel""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='Eric Moritz',
author_email='eric@themoritzfamily.com',
url='',
license='BSD',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
# -*- Extra requirements: -*-
"pykestrel",
],
entry_points="""
# -*- Entry points: -*-
""",
)
Set the version to 0.0 to let people know that this is ever changingfrom setuptools import setup, find_packages
import sys, os
version = '0.0'
setup(name='arbeiter',
version=version,
description="An unassuming work queue system",
long_description="""\
A work queue system built using Kestrel""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='Eric Moritz',
author_email='eric@themoritzfamily.com',
url='',
license='BSD',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
# -*- Extra requirements: -*-
"pykestrel",
],
entry_points="""
# -*- Entry points: -*-
""",
)
| <commit_before>from setuptools import setup, find_packages
import sys, os
version = '0.1'
setup(name='arbeiter',
version=version,
description="An unassuming work queue system",
long_description="""\
A work queue system built using Kestrel""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='Eric Moritz',
author_email='eric@themoritzfamily.com',
url='',
license='BSD',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
# -*- Extra requirements: -*-
"pykestrel",
],
entry_points="""
# -*- Entry points: -*-
""",
)
<commit_msg>Set the version to 0.0 to let people know that this is ever changing<commit_after>from setuptools import setup, find_packages
import sys, os
version = '0.0'
setup(name='arbeiter',
version=version,
description="An unassuming work queue system",
long_description="""\
A work queue system built using Kestrel""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='Eric Moritz',
author_email='eric@themoritzfamily.com',
url='',
license='BSD',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
# -*- Extra requirements: -*-
"pykestrel",
],
entry_points="""
# -*- Entry points: -*-
""",
)
|
99fae00aca7bb28b0915664698937e2464e92eb4 | setup.py | setup.py | import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.md')).read()
CHANGES = open(os.path.join(here, 'CHANGES.md')).read()
requires = [
'pycrypto',
'requests>=2.5.1',
'six',
]
setup(name='launchkey-python',
version='1.3.0',
description='LaunchKey Python SDK',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='LaunchKey',
author_email='support@launchkey.com',
url='https://launchkey.com',
keywords='launchkey security authentication',
license='MIT',
py_modules=[
'launchkey',
],
zip_safe=False,
test_suite='tests',
install_requires=requires,
tests_require=[
'Mocker',
'mock',
],
)
| import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.md')).read()
CHANGES = open(os.path.join(here, 'CHANGES.md')).read()
requires = [
'pycrypto >= 2.6.1', 'pycrypto < 3.0.0',
'requests >= 2.5.1', 'requests < 3.0.0',
'six >= 1.10.0', 'six < 2.0.0',
]
setup(name='launchkey-python',
version='1.3.0',
description='LaunchKey Python SDK',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='LaunchKey',
author_email='support@launchkey.com',
url='https://launchkey.com',
keywords='launchkey security authentication',
license='MIT',
py_modules=[
'launchkey',
],
zip_safe=False,
test_suite='tests',
install_requires=requires,
tests_require=[
'Mocker==1.1.1',
'mock==2.0.0',
],
)
| Update package restrictions for clarity and testing | Update package restrictions for clarity and testing
| Python | mit | LaunchKey/launchkey-python,iovation/launchkey-python | import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.md')).read()
CHANGES = open(os.path.join(here, 'CHANGES.md')).read()
requires = [
'pycrypto',
'requests>=2.5.1',
'six',
]
setup(name='launchkey-python',
version='1.3.0',
description='LaunchKey Python SDK',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='LaunchKey',
author_email='support@launchkey.com',
url='https://launchkey.com',
keywords='launchkey security authentication',
license='MIT',
py_modules=[
'launchkey',
],
zip_safe=False,
test_suite='tests',
install_requires=requires,
tests_require=[
'Mocker',
'mock',
],
)
Update package restrictions for clarity and testing | import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.md')).read()
CHANGES = open(os.path.join(here, 'CHANGES.md')).read()
requires = [
'pycrypto >= 2.6.1', 'pycrypto < 3.0.0',
'requests >= 2.5.1', 'requests < 3.0.0',
'six >= 1.10.0', 'six < 2.0.0',
]
setup(name='launchkey-python',
version='1.3.0',
description='LaunchKey Python SDK',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='LaunchKey',
author_email='support@launchkey.com',
url='https://launchkey.com',
keywords='launchkey security authentication',
license='MIT',
py_modules=[
'launchkey',
],
zip_safe=False,
test_suite='tests',
install_requires=requires,
tests_require=[
'Mocker==1.1.1',
'mock==2.0.0',
],
)
| <commit_before>import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.md')).read()
CHANGES = open(os.path.join(here, 'CHANGES.md')).read()
requires = [
'pycrypto',
'requests>=2.5.1',
'six',
]
setup(name='launchkey-python',
version='1.3.0',
description='LaunchKey Python SDK',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='LaunchKey',
author_email='support@launchkey.com',
url='https://launchkey.com',
keywords='launchkey security authentication',
license='MIT',
py_modules=[
'launchkey',
],
zip_safe=False,
test_suite='tests',
install_requires=requires,
tests_require=[
'Mocker',
'mock',
],
)
<commit_msg>Update package restrictions for clarity and testing<commit_after> | import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.md')).read()
CHANGES = open(os.path.join(here, 'CHANGES.md')).read()
requires = [
'pycrypto >= 2.6.1', 'pycrypto < 3.0.0',
'requests >= 2.5.1', 'requests < 3.0.0',
'six >= 1.10.0', 'six < 2.0.0',
]
setup(name='launchkey-python',
version='1.3.0',
description='LaunchKey Python SDK',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='LaunchKey',
author_email='support@launchkey.com',
url='https://launchkey.com',
keywords='launchkey security authentication',
license='MIT',
py_modules=[
'launchkey',
],
zip_safe=False,
test_suite='tests',
install_requires=requires,
tests_require=[
'Mocker==1.1.1',
'mock==2.0.0',
],
)
| import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.md')).read()
CHANGES = open(os.path.join(here, 'CHANGES.md')).read()
requires = [
'pycrypto',
'requests>=2.5.1',
'six',
]
setup(name='launchkey-python',
version='1.3.0',
description='LaunchKey Python SDK',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='LaunchKey',
author_email='support@launchkey.com',
url='https://launchkey.com',
keywords='launchkey security authentication',
license='MIT',
py_modules=[
'launchkey',
],
zip_safe=False,
test_suite='tests',
install_requires=requires,
tests_require=[
'Mocker',
'mock',
],
)
Update package restrictions for clarity and testingimport os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.md')).read()
CHANGES = open(os.path.join(here, 'CHANGES.md')).read()
requires = [
'pycrypto >= 2.6.1', 'pycrypto < 3.0.0',
'requests >= 2.5.1', 'requests < 3.0.0',
'six >= 1.10.0', 'six < 2.0.0',
]
setup(name='launchkey-python',
version='1.3.0',
description='LaunchKey Python SDK',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='LaunchKey',
author_email='support@launchkey.com',
url='https://launchkey.com',
keywords='launchkey security authentication',
license='MIT',
py_modules=[
'launchkey',
],
zip_safe=False,
test_suite='tests',
install_requires=requires,
tests_require=[
'Mocker==1.1.1',
'mock==2.0.0',
],
)
| <commit_before>import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.md')).read()
CHANGES = open(os.path.join(here, 'CHANGES.md')).read()
requires = [
'pycrypto',
'requests>=2.5.1',
'six',
]
setup(name='launchkey-python',
version='1.3.0',
description='LaunchKey Python SDK',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='LaunchKey',
author_email='support@launchkey.com',
url='https://launchkey.com',
keywords='launchkey security authentication',
license='MIT',
py_modules=[
'launchkey',
],
zip_safe=False,
test_suite='tests',
install_requires=requires,
tests_require=[
'Mocker',
'mock',
],
)
<commit_msg>Update package restrictions for clarity and testing<commit_after>import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.md')).read()
CHANGES = open(os.path.join(here, 'CHANGES.md')).read()
requires = [
'pycrypto >= 2.6.1', 'pycrypto < 3.0.0',
'requests >= 2.5.1', 'requests < 3.0.0',
'six >= 1.10.0', 'six < 2.0.0',
]
setup(name='launchkey-python',
version='1.3.0',
description='LaunchKey Python SDK',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='LaunchKey',
author_email='support@launchkey.com',
url='https://launchkey.com',
keywords='launchkey security authentication',
license='MIT',
py_modules=[
'launchkey',
],
zip_safe=False,
test_suite='tests',
install_requires=requires,
tests_require=[
'Mocker==1.1.1',
'mock==2.0.0',
],
)
|
a3053c843a5709d3fd0fe1dc6c93f369dc101d8b | setup.py | setup.py | from setuptools import setup, find_packages
import sys, os
version = '0.1'
setup(name='ckan-service-provider',
version=version,
description="A server that can server jobs at services.",
long_description="""\
""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='David Raznick',
author_email='kindly@gmail.com',
url='',
license='AGPL',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=['''
APScheduler
Flask
SQLAlchemy
requests'''
],
entry_points="""
# -*- Entry points: -*-
""",
)
| from setuptools import setup, find_packages
import sys, os
version = '0.1'
setup(name='ckanserviceprovider',
version=version,
description="A server that can server jobs at services.",
long_description="""\
""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='David Raznick',
author_email='kindly@gmail.com',
url='',
license='AGPL',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=['''
APScheduler
Flask
SQLAlchemy
requests'''
],
entry_points="""
# -*- Entry points: -*-
""",
)
| Rename the package so that it does not contain - | Rename the package so that it does not contain - | Python | agpl-3.0 | ESRC-CDRC/ckan-service-provider,datawagovau/ckan-service-provider,deniszgonjanin/ckan-service-provider,ckan/ckan-service-provider | from setuptools import setup, find_packages
import sys, os
version = '0.1'
setup(name='ckan-service-provider',
version=version,
description="A server that can server jobs at services.",
long_description="""\
""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='David Raznick',
author_email='kindly@gmail.com',
url='',
license='AGPL',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=['''
APScheduler
Flask
SQLAlchemy
requests'''
],
entry_points="""
# -*- Entry points: -*-
""",
)
Rename the package so that it does not contain - | from setuptools import setup, find_packages
import sys, os
version = '0.1'
setup(name='ckanserviceprovider',
version=version,
description="A server that can server jobs at services.",
long_description="""\
""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='David Raznick',
author_email='kindly@gmail.com',
url='',
license='AGPL',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=['''
APScheduler
Flask
SQLAlchemy
requests'''
],
entry_points="""
# -*- Entry points: -*-
""",
)
| <commit_before>from setuptools import setup, find_packages
import sys, os
version = '0.1'
setup(name='ckan-service-provider',
version=version,
description="A server that can server jobs at services.",
long_description="""\
""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='David Raznick',
author_email='kindly@gmail.com',
url='',
license='AGPL',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=['''
APScheduler
Flask
SQLAlchemy
requests'''
],
entry_points="""
# -*- Entry points: -*-
""",
)
<commit_msg>Rename the package so that it does not contain -<commit_after> | from setuptools import setup, find_packages
import sys, os
version = '0.1'
setup(name='ckanserviceprovider',
version=version,
description="A server that can server jobs at services.",
long_description="""\
""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='David Raznick',
author_email='kindly@gmail.com',
url='',
license='AGPL',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=['''
APScheduler
Flask
SQLAlchemy
requests'''
],
entry_points="""
# -*- Entry points: -*-
""",
)
| from setuptools import setup, find_packages
import sys, os
version = '0.1'
setup(name='ckan-service-provider',
version=version,
description="A server that can server jobs at services.",
long_description="""\
""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='David Raznick',
author_email='kindly@gmail.com',
url='',
license='AGPL',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=['''
APScheduler
Flask
SQLAlchemy
requests'''
],
entry_points="""
# -*- Entry points: -*-
""",
)
Rename the package so that it does not contain -from setuptools import setup, find_packages
import sys, os
version = '0.1'
setup(name='ckanserviceprovider',
version=version,
description="A server that can server jobs at services.",
long_description="""\
""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='David Raznick',
author_email='kindly@gmail.com',
url='',
license='AGPL',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=['''
APScheduler
Flask
SQLAlchemy
requests'''
],
entry_points="""
# -*- Entry points: -*-
""",
)
| <commit_before>from setuptools import setup, find_packages
import sys, os
version = '0.1'
setup(name='ckan-service-provider',
version=version,
description="A server that can server jobs at services.",
long_description="""\
""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='David Raznick',
author_email='kindly@gmail.com',
url='',
license='AGPL',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=['''
APScheduler
Flask
SQLAlchemy
requests'''
],
entry_points="""
# -*- Entry points: -*-
""",
)
<commit_msg>Rename the package so that it does not contain -<commit_after>from setuptools import setup, find_packages
import sys, os
version = '0.1'
setup(name='ckanserviceprovider',
version=version,
description="A server that can server jobs at services.",
long_description="""\
""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='David Raznick',
author_email='kindly@gmail.com',
url='',
license='AGPL',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=['''
APScheduler
Flask
SQLAlchemy
requests'''
],
entry_points="""
# -*- Entry points: -*-
""",
)
|
203fd6173cdc27d32c57db7c0a0bdd7335cfa885 | setup.py | setup.py | from distutils.core import setup
import skyfield # safe, because __init__.py contains no import statements
setup(
name='skyfield',
version=skyfield.__version__,
description=skyfield.__doc__.split('\n', 1)[0],
long_description=open('README.rst').read(),
license='MIT',
author='Brandon Rhodes',
author_email='brandon@rhodesmill.org',
url='http://github.com/brandon-rhodes/python-skyfield/',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Scientific/Engineering :: Astronomy',
],
packages=[
'skyfield',
'skyfield.data',
'skyfield.tests',
],
package_data = {
'skyfield': ['documentation/*.rst'],
'skyfield.data': ['*.npy', '*.txt'],
},
install_requires=[
'jplephem>=2.3',
'numpy',
'sgp4>=1.4',
])
| from distutils.core import setup
import skyfield # safe, because __init__.py contains no import statements
setup(
name='skyfield',
version=skyfield.__version__,
description=skyfield.__doc__.split('\n', 1)[0],
long_description=open('README.rst').read(),
license='MIT',
author='Brandon Rhodes',
author_email='brandon@rhodesmill.org',
url='http://github.com/brandon-rhodes/python-skyfield/',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering :: Astronomy',
],
packages=[
'skyfield',
'skyfield.data',
'skyfield.tests',
],
package_data = {
'skyfield': ['documentation/*.rst'],
'skyfield.data': ['*.npy', '*.txt'],
},
install_requires=[
'jplephem>=2.3',
'numpy',
'sgp4>=1.4',
])
| Update trove to remove Python 3.2, add Python 3.5 | Update trove to remove Python 3.2, add Python 3.5
| Python | mit | ozialien/python-skyfield,skyfielders/python-skyfield,ozialien/python-skyfield,skyfielders/python-skyfield | from distutils.core import setup
import skyfield # safe, because __init__.py contains no import statements
setup(
name='skyfield',
version=skyfield.__version__,
description=skyfield.__doc__.split('\n', 1)[0],
long_description=open('README.rst').read(),
license='MIT',
author='Brandon Rhodes',
author_email='brandon@rhodesmill.org',
url='http://github.com/brandon-rhodes/python-skyfield/',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Scientific/Engineering :: Astronomy',
],
packages=[
'skyfield',
'skyfield.data',
'skyfield.tests',
],
package_data = {
'skyfield': ['documentation/*.rst'],
'skyfield.data': ['*.npy', '*.txt'],
},
install_requires=[
'jplephem>=2.3',
'numpy',
'sgp4>=1.4',
])
Update trove to remove Python 3.2, add Python 3.5 | from distutils.core import setup
import skyfield # safe, because __init__.py contains no import statements
setup(
name='skyfield',
version=skyfield.__version__,
description=skyfield.__doc__.split('\n', 1)[0],
long_description=open('README.rst').read(),
license='MIT',
author='Brandon Rhodes',
author_email='brandon@rhodesmill.org',
url='http://github.com/brandon-rhodes/python-skyfield/',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering :: Astronomy',
],
packages=[
'skyfield',
'skyfield.data',
'skyfield.tests',
],
package_data = {
'skyfield': ['documentation/*.rst'],
'skyfield.data': ['*.npy', '*.txt'],
},
install_requires=[
'jplephem>=2.3',
'numpy',
'sgp4>=1.4',
])
| <commit_before>from distutils.core import setup
import skyfield # safe, because __init__.py contains no import statements
setup(
name='skyfield',
version=skyfield.__version__,
description=skyfield.__doc__.split('\n', 1)[0],
long_description=open('README.rst').read(),
license='MIT',
author='Brandon Rhodes',
author_email='brandon@rhodesmill.org',
url='http://github.com/brandon-rhodes/python-skyfield/',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Scientific/Engineering :: Astronomy',
],
packages=[
'skyfield',
'skyfield.data',
'skyfield.tests',
],
package_data = {
'skyfield': ['documentation/*.rst'],
'skyfield.data': ['*.npy', '*.txt'],
},
install_requires=[
'jplephem>=2.3',
'numpy',
'sgp4>=1.4',
])
<commit_msg>Update trove to remove Python 3.2, add Python 3.5<commit_after> | from distutils.core import setup
import skyfield # safe, because __init__.py contains no import statements
setup(
name='skyfield',
version=skyfield.__version__,
description=skyfield.__doc__.split('\n', 1)[0],
long_description=open('README.rst').read(),
license='MIT',
author='Brandon Rhodes',
author_email='brandon@rhodesmill.org',
url='http://github.com/brandon-rhodes/python-skyfield/',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering :: Astronomy',
],
packages=[
'skyfield',
'skyfield.data',
'skyfield.tests',
],
package_data = {
'skyfield': ['documentation/*.rst'],
'skyfield.data': ['*.npy', '*.txt'],
},
install_requires=[
'jplephem>=2.3',
'numpy',
'sgp4>=1.4',
])
| from distutils.core import setup
import skyfield # safe, because __init__.py contains no import statements
setup(
name='skyfield',
version=skyfield.__version__,
description=skyfield.__doc__.split('\n', 1)[0],
long_description=open('README.rst').read(),
license='MIT',
author='Brandon Rhodes',
author_email='brandon@rhodesmill.org',
url='http://github.com/brandon-rhodes/python-skyfield/',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Scientific/Engineering :: Astronomy',
],
packages=[
'skyfield',
'skyfield.data',
'skyfield.tests',
],
package_data = {
'skyfield': ['documentation/*.rst'],
'skyfield.data': ['*.npy', '*.txt'],
},
install_requires=[
'jplephem>=2.3',
'numpy',
'sgp4>=1.4',
])
Update trove to remove Python 3.2, add Python 3.5from distutils.core import setup
import skyfield # safe, because __init__.py contains no import statements
setup(
name='skyfield',
version=skyfield.__version__,
description=skyfield.__doc__.split('\n', 1)[0],
long_description=open('README.rst').read(),
license='MIT',
author='Brandon Rhodes',
author_email='brandon@rhodesmill.org',
url='http://github.com/brandon-rhodes/python-skyfield/',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering :: Astronomy',
],
packages=[
'skyfield',
'skyfield.data',
'skyfield.tests',
],
package_data = {
'skyfield': ['documentation/*.rst'],
'skyfield.data': ['*.npy', '*.txt'],
},
install_requires=[
'jplephem>=2.3',
'numpy',
'sgp4>=1.4',
])
| <commit_before>from distutils.core import setup
import skyfield # safe, because __init__.py contains no import statements
setup(
name='skyfield',
version=skyfield.__version__,
description=skyfield.__doc__.split('\n', 1)[0],
long_description=open('README.rst').read(),
license='MIT',
author='Brandon Rhodes',
author_email='brandon@rhodesmill.org',
url='http://github.com/brandon-rhodes/python-skyfield/',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Scientific/Engineering :: Astronomy',
],
packages=[
'skyfield',
'skyfield.data',
'skyfield.tests',
],
package_data = {
'skyfield': ['documentation/*.rst'],
'skyfield.data': ['*.npy', '*.txt'],
},
install_requires=[
'jplephem>=2.3',
'numpy',
'sgp4>=1.4',
])
<commit_msg>Update trove to remove Python 3.2, add Python 3.5<commit_after>from distutils.core import setup
import skyfield # safe, because __init__.py contains no import statements
setup(
name='skyfield',
version=skyfield.__version__,
description=skyfield.__doc__.split('\n', 1)[0],
long_description=open('README.rst').read(),
license='MIT',
author='Brandon Rhodes',
author_email='brandon@rhodesmill.org',
url='http://github.com/brandon-rhodes/python-skyfield/',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering :: Astronomy',
],
packages=[
'skyfield',
'skyfield.data',
'skyfield.tests',
],
package_data = {
'skyfield': ['documentation/*.rst'],
'skyfield.data': ['*.npy', '*.txt'],
},
install_requires=[
'jplephem>=2.3',
'numpy',
'sgp4>=1.4',
])
|
32eccc0c3d8ec0ba19e507d538c1f5cc5f6baaf5 | setup.py | setup.py | import sys
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
packages = setuptools.find_namespace_packages(include=['simple_parsing*'])
print("PACKAGES FOUND:", packages)
print(sys.version_info)
setuptools.setup(
name="simple_parsing",
version="0.0.12.post5",
author="Fabrice Normandin",
author_email="fabrice.normandin@gmail.com",
description="A small utility for simplifying and cleaning up argument parsing scripts.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/lebrice/SimpleParsing",
packages=packages,
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
install_requires=[
"typing_inspect",
"dataclasses;python_version<'3.7'",
]
)
| import sys
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
packages = setuptools.find_namespace_packages(include=['simple_parsing*'])
print("PACKAGES FOUND:", packages)
print(sys.version_info)
setuptools.setup(
name="simple_parsing",
version="0.0.12.post6",
author="Fabrice Normandin",
author_email="fabrice.normandin@gmail.com",
description="A small utility for simplifying and cleaning up argument parsing scripts.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/lebrice/SimpleParsing",
packages=packages,
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
install_requires=[
"typing_inspect",
"dataclasses;python_version<'3.7'",
]
)
| Upgrade version to 0.0.12.post6 (choice_dict) | Upgrade version to 0.0.12.post6 (choice_dict)
Signed-off-by: Fabrice Normandin <ee438dab901b32439200d6bb23a0e635234ed3f0@gmail.com>
| Python | mit | lebrice/SimpleParsing | import sys
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
packages = setuptools.find_namespace_packages(include=['simple_parsing*'])
print("PACKAGES FOUND:", packages)
print(sys.version_info)
setuptools.setup(
name="simple_parsing",
version="0.0.12.post5",
author="Fabrice Normandin",
author_email="fabrice.normandin@gmail.com",
description="A small utility for simplifying and cleaning up argument parsing scripts.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/lebrice/SimpleParsing",
packages=packages,
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
install_requires=[
"typing_inspect",
"dataclasses;python_version<'3.7'",
]
)
Upgrade version to 0.0.12.post6 (choice_dict)
Signed-off-by: Fabrice Normandin <ee438dab901b32439200d6bb23a0e635234ed3f0@gmail.com> | import sys
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
packages = setuptools.find_namespace_packages(include=['simple_parsing*'])
print("PACKAGES FOUND:", packages)
print(sys.version_info)
setuptools.setup(
name="simple_parsing",
version="0.0.12.post6",
author="Fabrice Normandin",
author_email="fabrice.normandin@gmail.com",
description="A small utility for simplifying and cleaning up argument parsing scripts.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/lebrice/SimpleParsing",
packages=packages,
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
install_requires=[
"typing_inspect",
"dataclasses;python_version<'3.7'",
]
)
| <commit_before>import sys
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
packages = setuptools.find_namespace_packages(include=['simple_parsing*'])
print("PACKAGES FOUND:", packages)
print(sys.version_info)
setuptools.setup(
name="simple_parsing",
version="0.0.12.post5",
author="Fabrice Normandin",
author_email="fabrice.normandin@gmail.com",
description="A small utility for simplifying and cleaning up argument parsing scripts.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/lebrice/SimpleParsing",
packages=packages,
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
install_requires=[
"typing_inspect",
"dataclasses;python_version<'3.7'",
]
)
<commit_msg>Upgrade version to 0.0.12.post6 (choice_dict)
Signed-off-by: Fabrice Normandin <ee438dab901b32439200d6bb23a0e635234ed3f0@gmail.com><commit_after> | import sys
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
packages = setuptools.find_namespace_packages(include=['simple_parsing*'])
print("PACKAGES FOUND:", packages)
print(sys.version_info)
setuptools.setup(
name="simple_parsing",
version="0.0.12.post6",
author="Fabrice Normandin",
author_email="fabrice.normandin@gmail.com",
description="A small utility for simplifying and cleaning up argument parsing scripts.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/lebrice/SimpleParsing",
packages=packages,
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
install_requires=[
"typing_inspect",
"dataclasses;python_version<'3.7'",
]
)
| import sys
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
packages = setuptools.find_namespace_packages(include=['simple_parsing*'])
print("PACKAGES FOUND:", packages)
print(sys.version_info)
setuptools.setup(
name="simple_parsing",
version="0.0.12.post5",
author="Fabrice Normandin",
author_email="fabrice.normandin@gmail.com",
description="A small utility for simplifying and cleaning up argument parsing scripts.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/lebrice/SimpleParsing",
packages=packages,
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
install_requires=[
"typing_inspect",
"dataclasses;python_version<'3.7'",
]
)
Upgrade version to 0.0.12.post6 (choice_dict)
Signed-off-by: Fabrice Normandin <ee438dab901b32439200d6bb23a0e635234ed3f0@gmail.com>import sys
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
packages = setuptools.find_namespace_packages(include=['simple_parsing*'])
print("PACKAGES FOUND:", packages)
print(sys.version_info)
setuptools.setup(
name="simple_parsing",
version="0.0.12.post6",
author="Fabrice Normandin",
author_email="fabrice.normandin@gmail.com",
description="A small utility for simplifying and cleaning up argument parsing scripts.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/lebrice/SimpleParsing",
packages=packages,
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
install_requires=[
"typing_inspect",
"dataclasses;python_version<'3.7'",
]
)
| <commit_before>import sys
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
packages = setuptools.find_namespace_packages(include=['simple_parsing*'])
print("PACKAGES FOUND:", packages)
print(sys.version_info)
setuptools.setup(
name="simple_parsing",
version="0.0.12.post5",
author="Fabrice Normandin",
author_email="fabrice.normandin@gmail.com",
description="A small utility for simplifying and cleaning up argument parsing scripts.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/lebrice/SimpleParsing",
packages=packages,
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
install_requires=[
"typing_inspect",
"dataclasses;python_version<'3.7'",
]
)
<commit_msg>Upgrade version to 0.0.12.post6 (choice_dict)
Signed-off-by: Fabrice Normandin <ee438dab901b32439200d6bb23a0e635234ed3f0@gmail.com><commit_after>import sys
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
packages = setuptools.find_namespace_packages(include=['simple_parsing*'])
print("PACKAGES FOUND:", packages)
print(sys.version_info)
setuptools.setup(
name="simple_parsing",
version="0.0.12.post6",
author="Fabrice Normandin",
author_email="fabrice.normandin@gmail.com",
description="A small utility for simplifying and cleaning up argument parsing scripts.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/lebrice/SimpleParsing",
packages=packages,
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
install_requires=[
"typing_inspect",
"dataclasses;python_version<'3.7'",
]
)
|
f8359081afadf6a653b5df3d28d4a6676c9c6860 | setup.py | setup.py | from distutils.core import setup
import skyfield # to learn the version
setup(
name='skyfield',
version=skyfield.__version__,
description=skyfield.__doc__,
long_description=open('README.rst').read(),
license='MIT',
author='Brandon Rhodes',
author_email='brandon@rhodesmill.org',
url='http://github.com/brandon-rhodes/python-skyfield/',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Topic :: Scientific/Engineering :: Astronomy',
],
packages=[ 'skyfield', 'skyfield.tests' ],
install_requires=['jplephem', 'numpy', 'sgp4'],
)
| from distutils.core import setup
import skyfield # to learn the version
setup(
name='skyfield',
version=skyfield.__version__,
description=skyfield.__doc__,
long_description=open('README.rst').read(),
license='MIT',
author='Brandon Rhodes',
author_email='brandon@rhodesmill.org',
url='http://github.com/brandon-rhodes/python-skyfield/',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Topic :: Scientific/Engineering :: Astronomy',
],
packages=[ 'skyfield', 'skyfield.tests' ],
install_requires=['de421', 'jplephem', 'numpy', 'sgp4'],
)
| Add de421 as an install dependency | Add de421 as an install dependency
| Python | mit | exoanalytic/python-skyfield,skyfielders/python-skyfield,ozialien/python-skyfield,GuidoBR/python-skyfield,GuidoBR/python-skyfield,skyfielders/python-skyfield,ozialien/python-skyfield,exoanalytic/python-skyfield | from distutils.core import setup
import skyfield # to learn the version
setup(
name='skyfield',
version=skyfield.__version__,
description=skyfield.__doc__,
long_description=open('README.rst').read(),
license='MIT',
author='Brandon Rhodes',
author_email='brandon@rhodesmill.org',
url='http://github.com/brandon-rhodes/python-skyfield/',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Topic :: Scientific/Engineering :: Astronomy',
],
packages=[ 'skyfield', 'skyfield.tests' ],
install_requires=['jplephem', 'numpy', 'sgp4'],
)
Add de421 as an install dependency | from distutils.core import setup
import skyfield # to learn the version
setup(
name='skyfield',
version=skyfield.__version__,
description=skyfield.__doc__,
long_description=open('README.rst').read(),
license='MIT',
author='Brandon Rhodes',
author_email='brandon@rhodesmill.org',
url='http://github.com/brandon-rhodes/python-skyfield/',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Topic :: Scientific/Engineering :: Astronomy',
],
packages=[ 'skyfield', 'skyfield.tests' ],
install_requires=['de421', 'jplephem', 'numpy', 'sgp4'],
)
| <commit_before>from distutils.core import setup
import skyfield # to learn the version
setup(
name='skyfield',
version=skyfield.__version__,
description=skyfield.__doc__,
long_description=open('README.rst').read(),
license='MIT',
author='Brandon Rhodes',
author_email='brandon@rhodesmill.org',
url='http://github.com/brandon-rhodes/python-skyfield/',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Topic :: Scientific/Engineering :: Astronomy',
],
packages=[ 'skyfield', 'skyfield.tests' ],
install_requires=['jplephem', 'numpy', 'sgp4'],
)
<commit_msg>Add de421 as an install dependency<commit_after> | from distutils.core import setup
import skyfield # to learn the version
setup(
name='skyfield',
version=skyfield.__version__,
description=skyfield.__doc__,
long_description=open('README.rst').read(),
license='MIT',
author='Brandon Rhodes',
author_email='brandon@rhodesmill.org',
url='http://github.com/brandon-rhodes/python-skyfield/',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Topic :: Scientific/Engineering :: Astronomy',
],
packages=[ 'skyfield', 'skyfield.tests' ],
install_requires=['de421', 'jplephem', 'numpy', 'sgp4'],
)
| from distutils.core import setup
import skyfield # to learn the version
setup(
name='skyfield',
version=skyfield.__version__,
description=skyfield.__doc__,
long_description=open('README.rst').read(),
license='MIT',
author='Brandon Rhodes',
author_email='brandon@rhodesmill.org',
url='http://github.com/brandon-rhodes/python-skyfield/',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Topic :: Scientific/Engineering :: Astronomy',
],
packages=[ 'skyfield', 'skyfield.tests' ],
install_requires=['jplephem', 'numpy', 'sgp4'],
)
Add de421 as an install dependencyfrom distutils.core import setup
import skyfield # to learn the version
setup(
name='skyfield',
version=skyfield.__version__,
description=skyfield.__doc__,
long_description=open('README.rst').read(),
license='MIT',
author='Brandon Rhodes',
author_email='brandon@rhodesmill.org',
url='http://github.com/brandon-rhodes/python-skyfield/',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Topic :: Scientific/Engineering :: Astronomy',
],
packages=[ 'skyfield', 'skyfield.tests' ],
install_requires=['de421', 'jplephem', 'numpy', 'sgp4'],
)
| <commit_before>from distutils.core import setup
import skyfield # to learn the version
setup(
name='skyfield',
version=skyfield.__version__,
description=skyfield.__doc__,
long_description=open('README.rst').read(),
license='MIT',
author='Brandon Rhodes',
author_email='brandon@rhodesmill.org',
url='http://github.com/brandon-rhodes/python-skyfield/',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Topic :: Scientific/Engineering :: Astronomy',
],
packages=[ 'skyfield', 'skyfield.tests' ],
install_requires=['jplephem', 'numpy', 'sgp4'],
)
<commit_msg>Add de421 as an install dependency<commit_after>from distutils.core import setup
import skyfield # to learn the version
setup(
name='skyfield',
version=skyfield.__version__,
description=skyfield.__doc__,
long_description=open('README.rst').read(),
license='MIT',
author='Brandon Rhodes',
author_email='brandon@rhodesmill.org',
url='http://github.com/brandon-rhodes/python-skyfield/',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Topic :: Scientific/Engineering :: Astronomy',
],
packages=[ 'skyfield', 'skyfield.tests' ],
install_requires=['de421', 'jplephem', 'numpy', 'sgp4'],
)
|
4542499e53a228665caa519585ef8afe584ef388 | setup.py | setup.py | from setuptools import setup, find_packages
setup(
name='cosmo',
version='0.0.1',
description='Monitors for HST/COS',
keywords=['astronomy'],
classifiers=[
'Programming Language :: Python :: 3',
'License :: BSD-3 :: Association of Universities for Research in Astronomy',
'Operating System :: Linux'
],
python_requires='~=3.7', # 3.7 and higher, but not 4
packages=find_packages(),
install_requires=[
'setuptools',
'numpy>=1.11.1',
'astropy>=1.0.1',
'plotly>=4.0.0',
'scipy',
'pyfastcopy',
'dask',
'pandas',
'pytest',
'pyyaml',
'peewee',
'calcos',
'crds',
'monitorframe @ git+https://github.com/spacetelescope/monitor-framework#egg=monitorframe'
]
)
| from setuptools import setup, find_packages
setup(
name='cosmo',
version='0.0.1',
description='Monitors for HST/COS',
keywords=['astronomy'],
classifiers=[
'Programming Language :: Python :: 3',
'License :: BSD-3 :: Association of Universities for Research in Astronomy',
'Operating System :: Linux'
],
python_requires='~=3.7', # 3.7 and higher, but not 4
packages=find_packages(),
install_requires=[
'setuptools',
'numpy>=1.11.1',
'astropy>=1.0.1',
'plotly>=4.0.0',
'scipy',
'pyfastcopy',
'dask',
'pandas',
'pytest',
'pyyaml',
'peewee',
'calcos',
'crds',
'monitorframe @ git+https://github.com/spacetelescope/monitor-framework#egg=monitorframe'
],
entry_points={
'console_scripts':
['run_monitors=cosmo.run_monitors:runner']
},
package_data={'cosmo': ['pytest.ini']}
)
| Add entry point for monitor runner. | Add entry point for monitor runner.
| Python | bsd-3-clause | justincely/cos_monitoring | from setuptools import setup, find_packages
setup(
name='cosmo',
version='0.0.1',
description='Monitors for HST/COS',
keywords=['astronomy'],
classifiers=[
'Programming Language :: Python :: 3',
'License :: BSD-3 :: Association of Universities for Research in Astronomy',
'Operating System :: Linux'
],
python_requires='~=3.7', # 3.7 and higher, but not 4
packages=find_packages(),
install_requires=[
'setuptools',
'numpy>=1.11.1',
'astropy>=1.0.1',
'plotly>=4.0.0',
'scipy',
'pyfastcopy',
'dask',
'pandas',
'pytest',
'pyyaml',
'peewee',
'calcos',
'crds',
'monitorframe @ git+https://github.com/spacetelescope/monitor-framework#egg=monitorframe'
]
)
Add entry point for monitor runner. | from setuptools import setup, find_packages
setup(
name='cosmo',
version='0.0.1',
description='Monitors for HST/COS',
keywords=['astronomy'],
classifiers=[
'Programming Language :: Python :: 3',
'License :: BSD-3 :: Association of Universities for Research in Astronomy',
'Operating System :: Linux'
],
python_requires='~=3.7', # 3.7 and higher, but not 4
packages=find_packages(),
install_requires=[
'setuptools',
'numpy>=1.11.1',
'astropy>=1.0.1',
'plotly>=4.0.0',
'scipy',
'pyfastcopy',
'dask',
'pandas',
'pytest',
'pyyaml',
'peewee',
'calcos',
'crds',
'monitorframe @ git+https://github.com/spacetelescope/monitor-framework#egg=monitorframe'
],
entry_points={
'console_scripts':
['run_monitors=cosmo.run_monitors:runner']
},
package_data={'cosmo': ['pytest.ini']}
)
| <commit_before>from setuptools import setup, find_packages
setup(
name='cosmo',
version='0.0.1',
description='Monitors for HST/COS',
keywords=['astronomy'],
classifiers=[
'Programming Language :: Python :: 3',
'License :: BSD-3 :: Association of Universities for Research in Astronomy',
'Operating System :: Linux'
],
python_requires='~=3.7', # 3.7 and higher, but not 4
packages=find_packages(),
install_requires=[
'setuptools',
'numpy>=1.11.1',
'astropy>=1.0.1',
'plotly>=4.0.0',
'scipy',
'pyfastcopy',
'dask',
'pandas',
'pytest',
'pyyaml',
'peewee',
'calcos',
'crds',
'monitorframe @ git+https://github.com/spacetelescope/monitor-framework#egg=monitorframe'
]
)
<commit_msg>Add entry point for monitor runner.<commit_after> | from setuptools import setup, find_packages
setup(
name='cosmo',
version='0.0.1',
description='Monitors for HST/COS',
keywords=['astronomy'],
classifiers=[
'Programming Language :: Python :: 3',
'License :: BSD-3 :: Association of Universities for Research in Astronomy',
'Operating System :: Linux'
],
python_requires='~=3.7', # 3.7 and higher, but not 4
packages=find_packages(),
install_requires=[
'setuptools',
'numpy>=1.11.1',
'astropy>=1.0.1',
'plotly>=4.0.0',
'scipy',
'pyfastcopy',
'dask',
'pandas',
'pytest',
'pyyaml',
'peewee',
'calcos',
'crds',
'monitorframe @ git+https://github.com/spacetelescope/monitor-framework#egg=monitorframe'
],
entry_points={
'console_scripts':
['run_monitors=cosmo.run_monitors:runner']
},
package_data={'cosmo': ['pytest.ini']}
)
| from setuptools import setup, find_packages
setup(
name='cosmo',
version='0.0.1',
description='Monitors for HST/COS',
keywords=['astronomy'],
classifiers=[
'Programming Language :: Python :: 3',
'License :: BSD-3 :: Association of Universities for Research in Astronomy',
'Operating System :: Linux'
],
python_requires='~=3.7', # 3.7 and higher, but not 4
packages=find_packages(),
install_requires=[
'setuptools',
'numpy>=1.11.1',
'astropy>=1.0.1',
'plotly>=4.0.0',
'scipy',
'pyfastcopy',
'dask',
'pandas',
'pytest',
'pyyaml',
'peewee',
'calcos',
'crds',
'monitorframe @ git+https://github.com/spacetelescope/monitor-framework#egg=monitorframe'
]
)
Add entry point for monitor runner.from setuptools import setup, find_packages
setup(
name='cosmo',
version='0.0.1',
description='Monitors for HST/COS',
keywords=['astronomy'],
classifiers=[
'Programming Language :: Python :: 3',
'License :: BSD-3 :: Association of Universities for Research in Astronomy',
'Operating System :: Linux'
],
python_requires='~=3.7', # 3.7 and higher, but not 4
packages=find_packages(),
install_requires=[
'setuptools',
'numpy>=1.11.1',
'astropy>=1.0.1',
'plotly>=4.0.0',
'scipy',
'pyfastcopy',
'dask',
'pandas',
'pytest',
'pyyaml',
'peewee',
'calcos',
'crds',
'monitorframe @ git+https://github.com/spacetelescope/monitor-framework#egg=monitorframe'
],
entry_points={
'console_scripts':
['run_monitors=cosmo.run_monitors:runner']
},
package_data={'cosmo': ['pytest.ini']}
)
| <commit_before>from setuptools import setup, find_packages
setup(
name='cosmo',
version='0.0.1',
description='Monitors for HST/COS',
keywords=['astronomy'],
classifiers=[
'Programming Language :: Python :: 3',
'License :: BSD-3 :: Association of Universities for Research in Astronomy',
'Operating System :: Linux'
],
python_requires='~=3.7', # 3.7 and higher, but not 4
packages=find_packages(),
install_requires=[
'setuptools',
'numpy>=1.11.1',
'astropy>=1.0.1',
'plotly>=4.0.0',
'scipy',
'pyfastcopy',
'dask',
'pandas',
'pytest',
'pyyaml',
'peewee',
'calcos',
'crds',
'monitorframe @ git+https://github.com/spacetelescope/monitor-framework#egg=monitorframe'
]
)
<commit_msg>Add entry point for monitor runner.<commit_after>from setuptools import setup, find_packages
setup(
name='cosmo',
version='0.0.1',
description='Monitors for HST/COS',
keywords=['astronomy'],
classifiers=[
'Programming Language :: Python :: 3',
'License :: BSD-3 :: Association of Universities for Research in Astronomy',
'Operating System :: Linux'
],
python_requires='~=3.7', # 3.7 and higher, but not 4
packages=find_packages(),
install_requires=[
'setuptools',
'numpy>=1.11.1',
'astropy>=1.0.1',
'plotly>=4.0.0',
'scipy',
'pyfastcopy',
'dask',
'pandas',
'pytest',
'pyyaml',
'peewee',
'calcos',
'crds',
'monitorframe @ git+https://github.com/spacetelescope/monitor-framework#egg=monitorframe'
],
entry_points={
'console_scripts':
['run_monitors=cosmo.run_monitors:runner']
},
package_data={'cosmo': ['pytest.ini']}
)
|
c4f2946f67784c24c2364821a2ba93773ac96e88 | setup.py | setup.py | try:
from setuptools import setup
except:
from distutils.core import setup
import forms
setup(
name='forms',
version=forms.version,
description='HTML forms framework for Nevow',
author='Matt Goodall',
author_email='matt@pollenation.net',
packages=['forms', 'forms.test'],
)
| try:
from setuptools import setup
except:
from distutils.core import setup
from distutils.command import install
import forms
for scheme in install.INSTALL_SCHEMES.values():
scheme['data'] = scheme['purelib']
setup(
name='forms',
version=forms.version,
description='HTML forms framework for Nevow',
author='Matt Goodall',
author_email='matt@pollenation.net',
packages=['forms', 'forms.test'],
data_files=[
['forms', ['forms/forms.css']],
]
)
| Include forms.css in the package | Include forms.css in the package | Python | mit | emgee/formal,emgee/formal,emgee/formal | try:
from setuptools import setup
except:
from distutils.core import setup
import forms
setup(
name='forms',
version=forms.version,
description='HTML forms framework for Nevow',
author='Matt Goodall',
author_email='matt@pollenation.net',
packages=['forms', 'forms.test'],
)
Include forms.css in the package | try:
from setuptools import setup
except:
from distutils.core import setup
from distutils.command import install
import forms
for scheme in install.INSTALL_SCHEMES.values():
scheme['data'] = scheme['purelib']
setup(
name='forms',
version=forms.version,
description='HTML forms framework for Nevow',
author='Matt Goodall',
author_email='matt@pollenation.net',
packages=['forms', 'forms.test'],
data_files=[
['forms', ['forms/forms.css']],
]
)
| <commit_before>try:
from setuptools import setup
except:
from distutils.core import setup
import forms
setup(
name='forms',
version=forms.version,
description='HTML forms framework for Nevow',
author='Matt Goodall',
author_email='matt@pollenation.net',
packages=['forms', 'forms.test'],
)
<commit_msg>Include forms.css in the package<commit_after> | try:
from setuptools import setup
except:
from distutils.core import setup
from distutils.command import install
import forms
for scheme in install.INSTALL_SCHEMES.values():
scheme['data'] = scheme['purelib']
setup(
name='forms',
version=forms.version,
description='HTML forms framework for Nevow',
author='Matt Goodall',
author_email='matt@pollenation.net',
packages=['forms', 'forms.test'],
data_files=[
['forms', ['forms/forms.css']],
]
)
| try:
from setuptools import setup
except:
from distutils.core import setup
import forms
setup(
name='forms',
version=forms.version,
description='HTML forms framework for Nevow',
author='Matt Goodall',
author_email='matt@pollenation.net',
packages=['forms', 'forms.test'],
)
Include forms.css in the packagetry:
from setuptools import setup
except:
from distutils.core import setup
from distutils.command import install
import forms
for scheme in install.INSTALL_SCHEMES.values():
scheme['data'] = scheme['purelib']
setup(
name='forms',
version=forms.version,
description='HTML forms framework for Nevow',
author='Matt Goodall',
author_email='matt@pollenation.net',
packages=['forms', 'forms.test'],
data_files=[
['forms', ['forms/forms.css']],
]
)
| <commit_before>try:
from setuptools import setup
except:
from distutils.core import setup
import forms
setup(
name='forms',
version=forms.version,
description='HTML forms framework for Nevow',
author='Matt Goodall',
author_email='matt@pollenation.net',
packages=['forms', 'forms.test'],
)
<commit_msg>Include forms.css in the package<commit_after>try:
from setuptools import setup
except:
from distutils.core import setup
from distutils.command import install
import forms
for scheme in install.INSTALL_SCHEMES.values():
scheme['data'] = scheme['purelib']
setup(
name='forms',
version=forms.version,
description='HTML forms framework for Nevow',
author='Matt Goodall',
author_email='matt@pollenation.net',
packages=['forms', 'forms.test'],
data_files=[
['forms', ['forms/forms.css']],
]
)
|
bd255f89c5958e73fcb715a78950e7065facb7cf | setup.py | setup.py | """ Setup script for PyPI """
import os
import sys
from setuptools import setup
from ConfigParser import SafeConfigParser
settings = SafeConfigParser()
settings.read(os.path.realpath('dynamic_dynamodb/dynamic-dynamodb.conf'))
def return_requires():
install_requires = [
'boto >= 2.29.1',
'requests >= 0.14.1',
'logutils >= 0.3.3',
'retrying >= 1.3.3'
],
if sys.version_info < (2, 7):
install_requires.append('ordereddict >= 1.1')
return install_requires
setup(
name='dynamic-dynamodb',
version=settings.get('general', 'version'),
license='Apache License, Version 2.0',
description='Automatic provisioning for AWS DynamoDB tables',
author='Sebastian Dahlgren',
author_email='sebastian.dahlgren@gmail.com',
url='http://sebdah.github.com/dynamic-dynamodb/',
keywords="dynamodb aws provisioning amazon web services",
platforms=['Any'],
packages=['dynamic_dynamodb'],
scripts=['dynamic-dynamodb'],
include_package_data=True,
zip_safe=False,
install_requires=return_requires(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python'
]
)
| """ Setup script for PyPI """
import os
import sys
from setuptools import setup
from ConfigParser import SafeConfigParser
settings = SafeConfigParser()
settings.read(os.path.realpath('dynamic_dynamodb/dynamic-dynamodb.conf'))
def return_requires():
install_requires = [
'boto >= 2.29.1',
'requests >= 0.14.1',
'logutils >= 0.3.3',
'retrying >= 1.3.3'
]
if sys.version_info < (2, 7):
install_requires.append('ordereddict >= 1.1')
return install_requires
setup(
name='dynamic-dynamodb',
version=settings.get('general', 'version'),
license='Apache License, Version 2.0',
description='Automatic provisioning for AWS DynamoDB tables',
author='Sebastian Dahlgren',
author_email='sebastian.dahlgren@gmail.com',
url='http://sebdah.github.com/dynamic-dynamodb/',
keywords="dynamodb aws provisioning amazon web services",
platforms=['Any'],
packages=['dynamic_dynamodb'],
scripts=['dynamic-dynamodb'],
include_package_data=True,
zip_safe=False,
install_requires=return_requires(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python'
]
)
| Fix installation on Python < 2.7 | Fix installation on Python < 2.7
When the install_requires array was moved, a erroneous trailing comma
was left, turning it into a tuple which cannot be appended to. Removing
the comma allows installation on Python 2.6 which is what CloudFormation
uses. | Python | apache-2.0 | sebdah/dynamic-dynamodb,acquiachrisnagy/dynamic-dynamodb,Lead-iD/dynamic-dynamodb,qqshfox/dynamic-dynamodb,Spokeo/dynamic-dynamodb | """ Setup script for PyPI """
import os
import sys
from setuptools import setup
from ConfigParser import SafeConfigParser
settings = SafeConfigParser()
settings.read(os.path.realpath('dynamic_dynamodb/dynamic-dynamodb.conf'))
def return_requires():
install_requires = [
'boto >= 2.29.1',
'requests >= 0.14.1',
'logutils >= 0.3.3',
'retrying >= 1.3.3'
],
if sys.version_info < (2, 7):
install_requires.append('ordereddict >= 1.1')
return install_requires
setup(
name='dynamic-dynamodb',
version=settings.get('general', 'version'),
license='Apache License, Version 2.0',
description='Automatic provisioning for AWS DynamoDB tables',
author='Sebastian Dahlgren',
author_email='sebastian.dahlgren@gmail.com',
url='http://sebdah.github.com/dynamic-dynamodb/',
keywords="dynamodb aws provisioning amazon web services",
platforms=['Any'],
packages=['dynamic_dynamodb'],
scripts=['dynamic-dynamodb'],
include_package_data=True,
zip_safe=False,
install_requires=return_requires(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python'
]
)
Fix installation on Python < 2.7
When the install_requires array was moved, a erroneous trailing comma
was left, turning it into a tuple which cannot be appended to. Removing
the comma allows installation on Python 2.6 which is what CloudFormation
uses. | """ Setup script for PyPI """
import os
import sys
from setuptools import setup
from ConfigParser import SafeConfigParser
settings = SafeConfigParser()
settings.read(os.path.realpath('dynamic_dynamodb/dynamic-dynamodb.conf'))
def return_requires():
install_requires = [
'boto >= 2.29.1',
'requests >= 0.14.1',
'logutils >= 0.3.3',
'retrying >= 1.3.3'
]
if sys.version_info < (2, 7):
install_requires.append('ordereddict >= 1.1')
return install_requires
setup(
name='dynamic-dynamodb',
version=settings.get('general', 'version'),
license='Apache License, Version 2.0',
description='Automatic provisioning for AWS DynamoDB tables',
author='Sebastian Dahlgren',
author_email='sebastian.dahlgren@gmail.com',
url='http://sebdah.github.com/dynamic-dynamodb/',
keywords="dynamodb aws provisioning amazon web services",
platforms=['Any'],
packages=['dynamic_dynamodb'],
scripts=['dynamic-dynamodb'],
include_package_data=True,
zip_safe=False,
install_requires=return_requires(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python'
]
)
| <commit_before>""" Setup script for PyPI """
import os
import sys
from setuptools import setup
from ConfigParser import SafeConfigParser
settings = SafeConfigParser()
settings.read(os.path.realpath('dynamic_dynamodb/dynamic-dynamodb.conf'))
def return_requires():
install_requires = [
'boto >= 2.29.1',
'requests >= 0.14.1',
'logutils >= 0.3.3',
'retrying >= 1.3.3'
],
if sys.version_info < (2, 7):
install_requires.append('ordereddict >= 1.1')
return install_requires
setup(
name='dynamic-dynamodb',
version=settings.get('general', 'version'),
license='Apache License, Version 2.0',
description='Automatic provisioning for AWS DynamoDB tables',
author='Sebastian Dahlgren',
author_email='sebastian.dahlgren@gmail.com',
url='http://sebdah.github.com/dynamic-dynamodb/',
keywords="dynamodb aws provisioning amazon web services",
platforms=['Any'],
packages=['dynamic_dynamodb'],
scripts=['dynamic-dynamodb'],
include_package_data=True,
zip_safe=False,
install_requires=return_requires(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python'
]
)
<commit_msg>Fix installation on Python < 2.7
When the install_requires array was moved, a erroneous trailing comma
was left, turning it into a tuple which cannot be appended to. Removing
the comma allows installation on Python 2.6 which is what CloudFormation
uses.<commit_after> | """ Setup script for PyPI """
import os
import sys
from setuptools import setup
from ConfigParser import SafeConfigParser
settings = SafeConfigParser()
settings.read(os.path.realpath('dynamic_dynamodb/dynamic-dynamodb.conf'))
def return_requires():
install_requires = [
'boto >= 2.29.1',
'requests >= 0.14.1',
'logutils >= 0.3.3',
'retrying >= 1.3.3'
]
if sys.version_info < (2, 7):
install_requires.append('ordereddict >= 1.1')
return install_requires
setup(
name='dynamic-dynamodb',
version=settings.get('general', 'version'),
license='Apache License, Version 2.0',
description='Automatic provisioning for AWS DynamoDB tables',
author='Sebastian Dahlgren',
author_email='sebastian.dahlgren@gmail.com',
url='http://sebdah.github.com/dynamic-dynamodb/',
keywords="dynamodb aws provisioning amazon web services",
platforms=['Any'],
packages=['dynamic_dynamodb'],
scripts=['dynamic-dynamodb'],
include_package_data=True,
zip_safe=False,
install_requires=return_requires(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python'
]
)
| """ Setup script for PyPI """
import os
import sys
from setuptools import setup
from ConfigParser import SafeConfigParser
settings = SafeConfigParser()
settings.read(os.path.realpath('dynamic_dynamodb/dynamic-dynamodb.conf'))
def return_requires():
install_requires = [
'boto >= 2.29.1',
'requests >= 0.14.1',
'logutils >= 0.3.3',
'retrying >= 1.3.3'
],
if sys.version_info < (2, 7):
install_requires.append('ordereddict >= 1.1')
return install_requires
setup(
name='dynamic-dynamodb',
version=settings.get('general', 'version'),
license='Apache License, Version 2.0',
description='Automatic provisioning for AWS DynamoDB tables',
author='Sebastian Dahlgren',
author_email='sebastian.dahlgren@gmail.com',
url='http://sebdah.github.com/dynamic-dynamodb/',
keywords="dynamodb aws provisioning amazon web services",
platforms=['Any'],
packages=['dynamic_dynamodb'],
scripts=['dynamic-dynamodb'],
include_package_data=True,
zip_safe=False,
install_requires=return_requires(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python'
]
)
Fix installation on Python < 2.7
When the install_requires array was moved, a erroneous trailing comma
was left, turning it into a tuple which cannot be appended to. Removing
the comma allows installation on Python 2.6 which is what CloudFormation
uses.""" Setup script for PyPI """
import os
import sys
from setuptools import setup
from ConfigParser import SafeConfigParser
settings = SafeConfigParser()
settings.read(os.path.realpath('dynamic_dynamodb/dynamic-dynamodb.conf'))
def return_requires():
install_requires = [
'boto >= 2.29.1',
'requests >= 0.14.1',
'logutils >= 0.3.3',
'retrying >= 1.3.3'
]
if sys.version_info < (2, 7):
install_requires.append('ordereddict >= 1.1')
return install_requires
setup(
name='dynamic-dynamodb',
version=settings.get('general', 'version'),
license='Apache License, Version 2.0',
description='Automatic provisioning for AWS DynamoDB tables',
author='Sebastian Dahlgren',
author_email='sebastian.dahlgren@gmail.com',
url='http://sebdah.github.com/dynamic-dynamodb/',
keywords="dynamodb aws provisioning amazon web services",
platforms=['Any'],
packages=['dynamic_dynamodb'],
scripts=['dynamic-dynamodb'],
include_package_data=True,
zip_safe=False,
install_requires=return_requires(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python'
]
)
| <commit_before>""" Setup script for PyPI """
import os
import sys
from setuptools import setup
from ConfigParser import SafeConfigParser
settings = SafeConfigParser()
settings.read(os.path.realpath('dynamic_dynamodb/dynamic-dynamodb.conf'))
def return_requires():
install_requires = [
'boto >= 2.29.1',
'requests >= 0.14.1',
'logutils >= 0.3.3',
'retrying >= 1.3.3'
],
if sys.version_info < (2, 7):
install_requires.append('ordereddict >= 1.1')
return install_requires
setup(
name='dynamic-dynamodb',
version=settings.get('general', 'version'),
license='Apache License, Version 2.0',
description='Automatic provisioning for AWS DynamoDB tables',
author='Sebastian Dahlgren',
author_email='sebastian.dahlgren@gmail.com',
url='http://sebdah.github.com/dynamic-dynamodb/',
keywords="dynamodb aws provisioning amazon web services",
platforms=['Any'],
packages=['dynamic_dynamodb'],
scripts=['dynamic-dynamodb'],
include_package_data=True,
zip_safe=False,
install_requires=return_requires(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python'
]
)
<commit_msg>Fix installation on Python < 2.7
When the install_requires array was moved, a erroneous trailing comma
was left, turning it into a tuple which cannot be appended to. Removing
the comma allows installation on Python 2.6 which is what CloudFormation
uses.<commit_after>""" Setup script for PyPI """
import os
import sys
from setuptools import setup
from ConfigParser import SafeConfigParser
settings = SafeConfigParser()
settings.read(os.path.realpath('dynamic_dynamodb/dynamic-dynamodb.conf'))
def return_requires():
install_requires = [
'boto >= 2.29.1',
'requests >= 0.14.1',
'logutils >= 0.3.3',
'retrying >= 1.3.3'
]
if sys.version_info < (2, 7):
install_requires.append('ordereddict >= 1.1')
return install_requires
setup(
name='dynamic-dynamodb',
version=settings.get('general', 'version'),
license='Apache License, Version 2.0',
description='Automatic provisioning for AWS DynamoDB tables',
author='Sebastian Dahlgren',
author_email='sebastian.dahlgren@gmail.com',
url='http://sebdah.github.com/dynamic-dynamodb/',
keywords="dynamodb aws provisioning amazon web services",
platforms=['Any'],
packages=['dynamic_dynamodb'],
scripts=['dynamic-dynamodb'],
include_package_data=True,
zip_safe=False,
install_requires=return_requires(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python'
]
)
|
9802a48572c939d80ba226c9d2c903825938653f | setup.py | setup.py | from setuptools import find_packages, setup
version = '6.0.0'
install_requires = (
'djangorestframework>=2.4.4,<3',
'incuna_mail>=2.0.0,<3',
)
extras_require = {
'avatar': [
'django-imagekit>=3.2',
],
'utils': [
'raven>=5.1.1',
],
}
setup(
name='django-user-management',
packages=find_packages(),
include_package_data=True,
version=version,
description='User management model mixins and api views.',
long_description='',
keywords='django rest framework user management api',
author='Incuna',
author_email='admin@incuna.com',
url='https://github.com/incuna/django-user-management/',
install_requires=install_requires,
extras_require=extras_require,
zip_safe=False,
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development',
'Topic :: Utilities',
],
)
| from setuptools import find_packages, setup
version = '6.0.0'
install_requires = (
'djangorestframework>=2.4.4,<3',
'incuna_mail>=2.0.0,<=3.0.0',
)
extras_require = {
'avatar': [
'django-imagekit>=3.2',
],
'utils': [
'raven>=5.1.1',
],
}
setup(
name='django-user-management',
packages=find_packages(),
include_package_data=True,
version=version,
description='User management model mixins and api views.',
long_description='',
keywords='django rest framework user management api',
author='Incuna',
author_email='admin@incuna.com',
url='https://github.com/incuna/django-user-management/',
install_requires=install_requires,
extras_require=extras_require,
zip_safe=False,
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development',
'Topic :: Utilities',
],
)
| Add support for incuna_mail 3.0.0 | Add support for incuna_mail 3.0.0
| Python | bsd-2-clause | incuna/django-user-management,incuna/django-user-management | from setuptools import find_packages, setup
version = '6.0.0'
install_requires = (
'djangorestframework>=2.4.4,<3',
'incuna_mail>=2.0.0,<3',
)
extras_require = {
'avatar': [
'django-imagekit>=3.2',
],
'utils': [
'raven>=5.1.1',
],
}
setup(
name='django-user-management',
packages=find_packages(),
include_package_data=True,
version=version,
description='User management model mixins and api views.',
long_description='',
keywords='django rest framework user management api',
author='Incuna',
author_email='admin@incuna.com',
url='https://github.com/incuna/django-user-management/',
install_requires=install_requires,
extras_require=extras_require,
zip_safe=False,
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development',
'Topic :: Utilities',
],
)
Add support for incuna_mail 3.0.0 | from setuptools import find_packages, setup
version = '6.0.0'
install_requires = (
'djangorestframework>=2.4.4,<3',
'incuna_mail>=2.0.0,<=3.0.0',
)
extras_require = {
'avatar': [
'django-imagekit>=3.2',
],
'utils': [
'raven>=5.1.1',
],
}
setup(
name='django-user-management',
packages=find_packages(),
include_package_data=True,
version=version,
description='User management model mixins and api views.',
long_description='',
keywords='django rest framework user management api',
author='Incuna',
author_email='admin@incuna.com',
url='https://github.com/incuna/django-user-management/',
install_requires=install_requires,
extras_require=extras_require,
zip_safe=False,
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development',
'Topic :: Utilities',
],
)
| <commit_before>from setuptools import find_packages, setup
version = '6.0.0'
install_requires = (
'djangorestframework>=2.4.4,<3',
'incuna_mail>=2.0.0,<3',
)
extras_require = {
'avatar': [
'django-imagekit>=3.2',
],
'utils': [
'raven>=5.1.1',
],
}
setup(
name='django-user-management',
packages=find_packages(),
include_package_data=True,
version=version,
description='User management model mixins and api views.',
long_description='',
keywords='django rest framework user management api',
author='Incuna',
author_email='admin@incuna.com',
url='https://github.com/incuna/django-user-management/',
install_requires=install_requires,
extras_require=extras_require,
zip_safe=False,
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development',
'Topic :: Utilities',
],
)
<commit_msg>Add support for incuna_mail 3.0.0<commit_after> | from setuptools import find_packages, setup
version = '6.0.0'
install_requires = (
'djangorestframework>=2.4.4,<3',
'incuna_mail>=2.0.0,<=3.0.0',
)
extras_require = {
'avatar': [
'django-imagekit>=3.2',
],
'utils': [
'raven>=5.1.1',
],
}
setup(
name='django-user-management',
packages=find_packages(),
include_package_data=True,
version=version,
description='User management model mixins and api views.',
long_description='',
keywords='django rest framework user management api',
author='Incuna',
author_email='admin@incuna.com',
url='https://github.com/incuna/django-user-management/',
install_requires=install_requires,
extras_require=extras_require,
zip_safe=False,
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development',
'Topic :: Utilities',
],
)
| from setuptools import find_packages, setup
version = '6.0.0'
install_requires = (
'djangorestframework>=2.4.4,<3',
'incuna_mail>=2.0.0,<3',
)
extras_require = {
'avatar': [
'django-imagekit>=3.2',
],
'utils': [
'raven>=5.1.1',
],
}
setup(
name='django-user-management',
packages=find_packages(),
include_package_data=True,
version=version,
description='User management model mixins and api views.',
long_description='',
keywords='django rest framework user management api',
author='Incuna',
author_email='admin@incuna.com',
url='https://github.com/incuna/django-user-management/',
install_requires=install_requires,
extras_require=extras_require,
zip_safe=False,
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development',
'Topic :: Utilities',
],
)
Add support for incuna_mail 3.0.0from setuptools import find_packages, setup
version = '6.0.0'
install_requires = (
'djangorestframework>=2.4.4,<3',
'incuna_mail>=2.0.0,<=3.0.0',
)
extras_require = {
'avatar': [
'django-imagekit>=3.2',
],
'utils': [
'raven>=5.1.1',
],
}
setup(
name='django-user-management',
packages=find_packages(),
include_package_data=True,
version=version,
description='User management model mixins and api views.',
long_description='',
keywords='django rest framework user management api',
author='Incuna',
author_email='admin@incuna.com',
url='https://github.com/incuna/django-user-management/',
install_requires=install_requires,
extras_require=extras_require,
zip_safe=False,
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development',
'Topic :: Utilities',
],
)
| <commit_before>from setuptools import find_packages, setup
version = '6.0.0'
install_requires = (
'djangorestframework>=2.4.4,<3',
'incuna_mail>=2.0.0,<3',
)
extras_require = {
'avatar': [
'django-imagekit>=3.2',
],
'utils': [
'raven>=5.1.1',
],
}
setup(
name='django-user-management',
packages=find_packages(),
include_package_data=True,
version=version,
description='User management model mixins and api views.',
long_description='',
keywords='django rest framework user management api',
author='Incuna',
author_email='admin@incuna.com',
url='https://github.com/incuna/django-user-management/',
install_requires=install_requires,
extras_require=extras_require,
zip_safe=False,
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development',
'Topic :: Utilities',
],
)
<commit_msg>Add support for incuna_mail 3.0.0<commit_after>from setuptools import find_packages, setup
version = '6.0.0'
install_requires = (
'djangorestframework>=2.4.4,<3',
'incuna_mail>=2.0.0,<=3.0.0',
)
extras_require = {
'avatar': [
'django-imagekit>=3.2',
],
'utils': [
'raven>=5.1.1',
],
}
setup(
name='django-user-management',
packages=find_packages(),
include_package_data=True,
version=version,
description='User management model mixins and api views.',
long_description='',
keywords='django rest framework user management api',
author='Incuna',
author_email='admin@incuna.com',
url='https://github.com/incuna/django-user-management/',
install_requires=install_requires,
extras_require=extras_require,
zip_safe=False,
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development',
'Topic :: Utilities',
],
)
|
8a3aedad773a0899ceb85377faab658bc0dfdcb4 | setup.py | setup.py | from setuptools import setup, find_packages
VERSION = '1.1.4'
setup(
name="puresnmp",
version=VERSION,
description="Pure Python SNMP implementation",
long_description=open("README.rst").read(),
author="Michel Albert",
author_email="michel@albert.lu",
provides=['puresnmp'],
license="MIT",
include_package_data=True,
install_requires=[
'typing',
],
extras_require={
'dev': [],
'test': ['pytest-xdist', 'pytest', 'pytest-coverage']
},
packages=find_packages(exclude=["tests.*", "tests", "docs"]),
url="https://github.com/exhuma/puresnmp",
keywords="networking snmp",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Topic :: System :: Networking',
'Topic :: System :: Networking :: Monitoring',
'Topic :: System :: Systems Administration',
]
)
| from setuptools import setup, find_packages
from os.path import dirname, abspath
HERE = abspath(dirname(__file__))
VERSION = open(HERE + '/puresnmp/version.txt').read().strip()
setup(
name="puresnmp",
version=VERSION,
description="Pure Python SNMP implementation",
long_description=open(HERE + "/README.rst").read(),
author="Michel Albert",
author_email="michel@albert.lu",
provides=['puresnmp'],
license="MIT",
include_package_data=True,
install_requires=[
'typing',
],
extras_require={
'dev': [],
'test': ['pytest-xdist', 'pytest', 'pytest-coverage']
},
packages=find_packages(exclude=["tests.*", "tests", "docs"]),
url="https://github.com/exhuma/puresnmp",
keywords="networking snmp",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Topic :: System :: Networking',
'Topic :: System :: Networking :: Monitoring',
'Topic :: System :: Systems Administration',
]
)
| Revert "Another attempt to fix the RTD build." | Revert "Another attempt to fix the RTD build."
This reverts commit 43807c085493962ca0f79105b64b3be8ddc6fc39.
| Python | mit | exhuma/puresnmp,exhuma/puresnmp | from setuptools import setup, find_packages
VERSION = '1.1.4'
setup(
name="puresnmp",
version=VERSION,
description="Pure Python SNMP implementation",
long_description=open("README.rst").read(),
author="Michel Albert",
author_email="michel@albert.lu",
provides=['puresnmp'],
license="MIT",
include_package_data=True,
install_requires=[
'typing',
],
extras_require={
'dev': [],
'test': ['pytest-xdist', 'pytest', 'pytest-coverage']
},
packages=find_packages(exclude=["tests.*", "tests", "docs"]),
url="https://github.com/exhuma/puresnmp",
keywords="networking snmp",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Topic :: System :: Networking',
'Topic :: System :: Networking :: Monitoring',
'Topic :: System :: Systems Administration',
]
)
Revert "Another attempt to fix the RTD build."
This reverts commit 43807c085493962ca0f79105b64b3be8ddc6fc39. | from setuptools import setup, find_packages
from os.path import dirname, abspath
HERE = abspath(dirname(__file__))
VERSION = open(HERE + '/puresnmp/version.txt').read().strip()
setup(
name="puresnmp",
version=VERSION,
description="Pure Python SNMP implementation",
long_description=open(HERE + "/README.rst").read(),
author="Michel Albert",
author_email="michel@albert.lu",
provides=['puresnmp'],
license="MIT",
include_package_data=True,
install_requires=[
'typing',
],
extras_require={
'dev': [],
'test': ['pytest-xdist', 'pytest', 'pytest-coverage']
},
packages=find_packages(exclude=["tests.*", "tests", "docs"]),
url="https://github.com/exhuma/puresnmp",
keywords="networking snmp",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Topic :: System :: Networking',
'Topic :: System :: Networking :: Monitoring',
'Topic :: System :: Systems Administration',
]
)
| <commit_before>from setuptools import setup, find_packages
VERSION = '1.1.4'
setup(
name="puresnmp",
version=VERSION,
description="Pure Python SNMP implementation",
long_description=open("README.rst").read(),
author="Michel Albert",
author_email="michel@albert.lu",
provides=['puresnmp'],
license="MIT",
include_package_data=True,
install_requires=[
'typing',
],
extras_require={
'dev': [],
'test': ['pytest-xdist', 'pytest', 'pytest-coverage']
},
packages=find_packages(exclude=["tests.*", "tests", "docs"]),
url="https://github.com/exhuma/puresnmp",
keywords="networking snmp",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Topic :: System :: Networking',
'Topic :: System :: Networking :: Monitoring',
'Topic :: System :: Systems Administration',
]
)
<commit_msg>Revert "Another attempt to fix the RTD build."
This reverts commit 43807c085493962ca0f79105b64b3be8ddc6fc39.<commit_after> | from setuptools import setup, find_packages
from os.path import dirname, abspath
HERE = abspath(dirname(__file__))
VERSION = open(HERE + '/puresnmp/version.txt').read().strip()
setup(
name="puresnmp",
version=VERSION,
description="Pure Python SNMP implementation",
long_description=open(HERE + "/README.rst").read(),
author="Michel Albert",
author_email="michel@albert.lu",
provides=['puresnmp'],
license="MIT",
include_package_data=True,
install_requires=[
'typing',
],
extras_require={
'dev': [],
'test': ['pytest-xdist', 'pytest', 'pytest-coverage']
},
packages=find_packages(exclude=["tests.*", "tests", "docs"]),
url="https://github.com/exhuma/puresnmp",
keywords="networking snmp",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Topic :: System :: Networking',
'Topic :: System :: Networking :: Monitoring',
'Topic :: System :: Systems Administration',
]
)
| from setuptools import setup, find_packages
VERSION = '1.1.4'
setup(
name="puresnmp",
version=VERSION,
description="Pure Python SNMP implementation",
long_description=open("README.rst").read(),
author="Michel Albert",
author_email="michel@albert.lu",
provides=['puresnmp'],
license="MIT",
include_package_data=True,
install_requires=[
'typing',
],
extras_require={
'dev': [],
'test': ['pytest-xdist', 'pytest', 'pytest-coverage']
},
packages=find_packages(exclude=["tests.*", "tests", "docs"]),
url="https://github.com/exhuma/puresnmp",
keywords="networking snmp",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Topic :: System :: Networking',
'Topic :: System :: Networking :: Monitoring',
'Topic :: System :: Systems Administration',
]
)
Revert "Another attempt to fix the RTD build."
This reverts commit 43807c085493962ca0f79105b64b3be8ddc6fc39.from setuptools import setup, find_packages
from os.path import dirname, abspath
HERE = abspath(dirname(__file__))
VERSION = open(HERE + '/puresnmp/version.txt').read().strip()
setup(
name="puresnmp",
version=VERSION,
description="Pure Python SNMP implementation",
long_description=open(HERE + "/README.rst").read(),
author="Michel Albert",
author_email="michel@albert.lu",
provides=['puresnmp'],
license="MIT",
include_package_data=True,
install_requires=[
'typing',
],
extras_require={
'dev': [],
'test': ['pytest-xdist', 'pytest', 'pytest-coverage']
},
packages=find_packages(exclude=["tests.*", "tests", "docs"]),
url="https://github.com/exhuma/puresnmp",
keywords="networking snmp",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Topic :: System :: Networking',
'Topic :: System :: Networking :: Monitoring',
'Topic :: System :: Systems Administration',
]
)
| <commit_before>from setuptools import setup, find_packages
VERSION = '1.1.4'
setup(
name="puresnmp",
version=VERSION,
description="Pure Python SNMP implementation",
long_description=open("README.rst").read(),
author="Michel Albert",
author_email="michel@albert.lu",
provides=['puresnmp'],
license="MIT",
include_package_data=True,
install_requires=[
'typing',
],
extras_require={
'dev': [],
'test': ['pytest-xdist', 'pytest', 'pytest-coverage']
},
packages=find_packages(exclude=["tests.*", "tests", "docs"]),
url="https://github.com/exhuma/puresnmp",
keywords="networking snmp",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Topic :: System :: Networking',
'Topic :: System :: Networking :: Monitoring',
'Topic :: System :: Systems Administration',
]
)
<commit_msg>Revert "Another attempt to fix the RTD build."
This reverts commit 43807c085493962ca0f79105b64b3be8ddc6fc39.<commit_after>from setuptools import setup, find_packages
from os.path import dirname, abspath
HERE = abspath(dirname(__file__))
VERSION = open(HERE + '/puresnmp/version.txt').read().strip()
setup(
name="puresnmp",
version=VERSION,
description="Pure Python SNMP implementation",
long_description=open(HERE + "/README.rst").read(),
author="Michel Albert",
author_email="michel@albert.lu",
provides=['puresnmp'],
license="MIT",
include_package_data=True,
install_requires=[
'typing',
],
extras_require={
'dev': [],
'test': ['pytest-xdist', 'pytest', 'pytest-coverage']
},
packages=find_packages(exclude=["tests.*", "tests", "docs"]),
url="https://github.com/exhuma/puresnmp",
keywords="networking snmp",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Topic :: System :: Networking',
'Topic :: System :: Networking :: Monitoring',
'Topic :: System :: Systems Administration',
]
)
|
26c3f786064923f0ac099de645289ff75014b354 | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
from setuptools import setup
this_dir = os.path.abspath(os.path.dirname(__file__))
VERSIONFILE = os.path.join(this_dir, "textx", "__init__.py")
VERSION = None
for line in open(VERSIONFILE, "r").readlines():
if line.startswith('__version__'):
VERSION = line.split('"')[1]
if not VERSION:
raise RuntimeError('No version defined in textx.__init__.py')
if sys.argv[-1].startswith('publish'):
if os.system("pip list | grep wheel"):
print("wheel not installed.\nUse `pip install wheel`.\nExiting.")
sys.exit()
if os.system("pip list | grep twine"):
print("twine not installed.\nUse `pip install twine`.\nExiting.")
sys.exit()
os.system("python setup.py sdist bdist_wheel")
if sys.argv[-1] == 'publishtest':
os.system("twine upload -r test dist/*")
else:
os.system("twine upload dist/*")
sys.exit()
setup(version=VERSION)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
from setuptools import setup
this_dir = os.path.abspath(os.path.dirname(__file__))
VERSIONFILE = os.path.join(this_dir, "textx", "__init__.py")
VERSION = None
for line in open(VERSIONFILE, "r").readlines():
if line.startswith('__version__'):
VERSION = line.split('"')[1]
if not VERSION:
raise RuntimeError('No version defined in textx.__init__.py')
if sys.argv[-1].startswith('publish'):
if os.system("pip list | grep wheel"):
print("wheel not installed.\nUse `pip install wheel`.\nExiting.")
sys.exit()
if os.system("pip list | grep twine"):
print("twine not installed.\nUse `pip install twine`.\nExiting.")
sys.exit()
os.system("python setup.py sdist bdist_wheel")
if sys.argv[-1] == 'publishtest':
os.system("twine upload -r test dist/*")
else:
os.system("twine upload dist/*")
print("You probably want to also tag the version now:")
print(" git tag -a {0} -m 'version {0}'".format(VERSION))
print(" git push --tags")
sys.exit()
setup(version=VERSION)
| Revert note to tag the version after publish | Revert note to tag the version after publish
| Python | mit | igordejanovic/textX,igordejanovic/textX,igordejanovic/textX | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
from setuptools import setup
this_dir = os.path.abspath(os.path.dirname(__file__))
VERSIONFILE = os.path.join(this_dir, "textx", "__init__.py")
VERSION = None
for line in open(VERSIONFILE, "r").readlines():
if line.startswith('__version__'):
VERSION = line.split('"')[1]
if not VERSION:
raise RuntimeError('No version defined in textx.__init__.py')
if sys.argv[-1].startswith('publish'):
if os.system("pip list | grep wheel"):
print("wheel not installed.\nUse `pip install wheel`.\nExiting.")
sys.exit()
if os.system("pip list | grep twine"):
print("twine not installed.\nUse `pip install twine`.\nExiting.")
sys.exit()
os.system("python setup.py sdist bdist_wheel")
if sys.argv[-1] == 'publishtest':
os.system("twine upload -r test dist/*")
else:
os.system("twine upload dist/*")
sys.exit()
setup(version=VERSION)
Revert note to tag the version after publish | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
from setuptools import setup
this_dir = os.path.abspath(os.path.dirname(__file__))
VERSIONFILE = os.path.join(this_dir, "textx", "__init__.py")
VERSION = None
for line in open(VERSIONFILE, "r").readlines():
if line.startswith('__version__'):
VERSION = line.split('"')[1]
if not VERSION:
raise RuntimeError('No version defined in textx.__init__.py')
if sys.argv[-1].startswith('publish'):
if os.system("pip list | grep wheel"):
print("wheel not installed.\nUse `pip install wheel`.\nExiting.")
sys.exit()
if os.system("pip list | grep twine"):
print("twine not installed.\nUse `pip install twine`.\nExiting.")
sys.exit()
os.system("python setup.py sdist bdist_wheel")
if sys.argv[-1] == 'publishtest':
os.system("twine upload -r test dist/*")
else:
os.system("twine upload dist/*")
print("You probably want to also tag the version now:")
print(" git tag -a {0} -m 'version {0}'".format(VERSION))
print(" git push --tags")
sys.exit()
setup(version=VERSION)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
from setuptools import setup
this_dir = os.path.abspath(os.path.dirname(__file__))
VERSIONFILE = os.path.join(this_dir, "textx", "__init__.py")
VERSION = None
for line in open(VERSIONFILE, "r").readlines():
if line.startswith('__version__'):
VERSION = line.split('"')[1]
if not VERSION:
raise RuntimeError('No version defined in textx.__init__.py')
if sys.argv[-1].startswith('publish'):
if os.system("pip list | grep wheel"):
print("wheel not installed.\nUse `pip install wheel`.\nExiting.")
sys.exit()
if os.system("pip list | grep twine"):
print("twine not installed.\nUse `pip install twine`.\nExiting.")
sys.exit()
os.system("python setup.py sdist bdist_wheel")
if sys.argv[-1] == 'publishtest':
os.system("twine upload -r test dist/*")
else:
os.system("twine upload dist/*")
sys.exit()
setup(version=VERSION)
<commit_msg>Revert note to tag the version after publish<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
from setuptools import setup
this_dir = os.path.abspath(os.path.dirname(__file__))
VERSIONFILE = os.path.join(this_dir, "textx", "__init__.py")
VERSION = None
for line in open(VERSIONFILE, "r").readlines():
if line.startswith('__version__'):
VERSION = line.split('"')[1]
if not VERSION:
raise RuntimeError('No version defined in textx.__init__.py')
if sys.argv[-1].startswith('publish'):
if os.system("pip list | grep wheel"):
print("wheel not installed.\nUse `pip install wheel`.\nExiting.")
sys.exit()
if os.system("pip list | grep twine"):
print("twine not installed.\nUse `pip install twine`.\nExiting.")
sys.exit()
os.system("python setup.py sdist bdist_wheel")
if sys.argv[-1] == 'publishtest':
os.system("twine upload -r test dist/*")
else:
os.system("twine upload dist/*")
print("You probably want to also tag the version now:")
print(" git tag -a {0} -m 'version {0}'".format(VERSION))
print(" git push --tags")
sys.exit()
setup(version=VERSION)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
from setuptools import setup
this_dir = os.path.abspath(os.path.dirname(__file__))
VERSIONFILE = os.path.join(this_dir, "textx", "__init__.py")
VERSION = None
for line in open(VERSIONFILE, "r").readlines():
if line.startswith('__version__'):
VERSION = line.split('"')[1]
if not VERSION:
raise RuntimeError('No version defined in textx.__init__.py')
if sys.argv[-1].startswith('publish'):
if os.system("pip list | grep wheel"):
print("wheel not installed.\nUse `pip install wheel`.\nExiting.")
sys.exit()
if os.system("pip list | grep twine"):
print("twine not installed.\nUse `pip install twine`.\nExiting.")
sys.exit()
os.system("python setup.py sdist bdist_wheel")
if sys.argv[-1] == 'publishtest':
os.system("twine upload -r test dist/*")
else:
os.system("twine upload dist/*")
sys.exit()
setup(version=VERSION)
Revert note to tag the version after publish#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
from setuptools import setup
this_dir = os.path.abspath(os.path.dirname(__file__))
VERSIONFILE = os.path.join(this_dir, "textx", "__init__.py")
VERSION = None
for line in open(VERSIONFILE, "r").readlines():
if line.startswith('__version__'):
VERSION = line.split('"')[1]
if not VERSION:
raise RuntimeError('No version defined in textx.__init__.py')
if sys.argv[-1].startswith('publish'):
if os.system("pip list | grep wheel"):
print("wheel not installed.\nUse `pip install wheel`.\nExiting.")
sys.exit()
if os.system("pip list | grep twine"):
print("twine not installed.\nUse `pip install twine`.\nExiting.")
sys.exit()
os.system("python setup.py sdist bdist_wheel")
if sys.argv[-1] == 'publishtest':
os.system("twine upload -r test dist/*")
else:
os.system("twine upload dist/*")
print("You probably want to also tag the version now:")
print(" git tag -a {0} -m 'version {0}'".format(VERSION))
print(" git push --tags")
sys.exit()
setup(version=VERSION)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
from setuptools import setup
this_dir = os.path.abspath(os.path.dirname(__file__))
VERSIONFILE = os.path.join(this_dir, "textx", "__init__.py")
VERSION = None
for line in open(VERSIONFILE, "r").readlines():
if line.startswith('__version__'):
VERSION = line.split('"')[1]
if not VERSION:
raise RuntimeError('No version defined in textx.__init__.py')
if sys.argv[-1].startswith('publish'):
if os.system("pip list | grep wheel"):
print("wheel not installed.\nUse `pip install wheel`.\nExiting.")
sys.exit()
if os.system("pip list | grep twine"):
print("twine not installed.\nUse `pip install twine`.\nExiting.")
sys.exit()
os.system("python setup.py sdist bdist_wheel")
if sys.argv[-1] == 'publishtest':
os.system("twine upload -r test dist/*")
else:
os.system("twine upload dist/*")
sys.exit()
setup(version=VERSION)
<commit_msg>Revert note to tag the version after publish<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
from setuptools import setup
this_dir = os.path.abspath(os.path.dirname(__file__))
VERSIONFILE = os.path.join(this_dir, "textx", "__init__.py")
VERSION = None
for line in open(VERSIONFILE, "r").readlines():
if line.startswith('__version__'):
VERSION = line.split('"')[1]
if not VERSION:
raise RuntimeError('No version defined in textx.__init__.py')
if sys.argv[-1].startswith('publish'):
if os.system("pip list | grep wheel"):
print("wheel not installed.\nUse `pip install wheel`.\nExiting.")
sys.exit()
if os.system("pip list | grep twine"):
print("twine not installed.\nUse `pip install twine`.\nExiting.")
sys.exit()
os.system("python setup.py sdist bdist_wheel")
if sys.argv[-1] == 'publishtest':
os.system("twine upload -r test dist/*")
else:
os.system("twine upload dist/*")
print("You probably want to also tag the version now:")
print(" git tag -a {0} -m 'version {0}'".format(VERSION))
print(" git push --tags")
sys.exit()
setup(version=VERSION)
|
6f965f0014922560de20f725c76ca4060856cb77 | setup.py | setup.py | #!/usr/bin/env python
import sys
import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import bottle
setup(name='Bottle-Async',
version=bottle.__version__,
description='A fork of Bottle to add support for asyncio.',
long_description=bottle.__doc__,
author=bottle.__author__ + ", Don Brown",
author_email='mrdon@twdata.org',
url='https://github.com/mrdon/bottle',
py_modules=['bottle'],
scripts=['bottle.py'],
license='MIT',
install_requires=[
'aiohttp'
],
platforms = 'any',
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries',
'Topic :: Internet :: WWW/HTTP :: HTTP Servers',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Server',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
| #!/usr/bin/env python
import sys
import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name='Bottle-Async',
version="0.13-dev",
description='A fork of Bottle to add support for asyncio.',
long_description="",
author="Marcel Hellkamp, Don Brown",
author_email='mrdon@twdata.org',
url='https://github.com/mrdon/bottle',
py_modules=['bottle'],
scripts=['bottle.py'],
license='MIT',
install_requires=[
'aiohttp'
],
platforms = 'any',
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries',
'Topic :: Internet :: WWW/HTTP :: HTTP Servers',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Server',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
| Fix bottle import that won't work with deps | Fix bottle import that won't work with deps
| Python | mit | mrdon/bottle,mrdon/bottle | #!/usr/bin/env python
import sys
import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import bottle
setup(name='Bottle-Async',
version=bottle.__version__,
description='A fork of Bottle to add support for asyncio.',
long_description=bottle.__doc__,
author=bottle.__author__ + ", Don Brown",
author_email='mrdon@twdata.org',
url='https://github.com/mrdon/bottle',
py_modules=['bottle'],
scripts=['bottle.py'],
license='MIT',
install_requires=[
'aiohttp'
],
platforms = 'any',
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries',
'Topic :: Internet :: WWW/HTTP :: HTTP Servers',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Server',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
Fix bottle import that won't work with deps | #!/usr/bin/env python
import sys
import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name='Bottle-Async',
version="0.13-dev",
description='A fork of Bottle to add support for asyncio.',
long_description="",
author="Marcel Hellkamp, Don Brown",
author_email='mrdon@twdata.org',
url='https://github.com/mrdon/bottle',
py_modules=['bottle'],
scripts=['bottle.py'],
license='MIT',
install_requires=[
'aiohttp'
],
platforms = 'any',
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries',
'Topic :: Internet :: WWW/HTTP :: HTTP Servers',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Server',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
| <commit_before>#!/usr/bin/env python
import sys
import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import bottle
setup(name='Bottle-Async',
version=bottle.__version__,
description='A fork of Bottle to add support for asyncio.',
long_description=bottle.__doc__,
author=bottle.__author__ + ", Don Brown",
author_email='mrdon@twdata.org',
url='https://github.com/mrdon/bottle',
py_modules=['bottle'],
scripts=['bottle.py'],
license='MIT',
install_requires=[
'aiohttp'
],
platforms = 'any',
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries',
'Topic :: Internet :: WWW/HTTP :: HTTP Servers',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Server',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
<commit_msg>Fix bottle import that won't work with deps<commit_after> | #!/usr/bin/env python
import sys
import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name='Bottle-Async',
version="0.13-dev",
description='A fork of Bottle to add support for asyncio.',
long_description="",
author="Marcel Hellkamp, Don Brown",
author_email='mrdon@twdata.org',
url='https://github.com/mrdon/bottle',
py_modules=['bottle'],
scripts=['bottle.py'],
license='MIT',
install_requires=[
'aiohttp'
],
platforms = 'any',
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries',
'Topic :: Internet :: WWW/HTTP :: HTTP Servers',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Server',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
| #!/usr/bin/env python
import sys
import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import bottle
setup(name='Bottle-Async',
version=bottle.__version__,
description='A fork of Bottle to add support for asyncio.',
long_description=bottle.__doc__,
author=bottle.__author__ + ", Don Brown",
author_email='mrdon@twdata.org',
url='https://github.com/mrdon/bottle',
py_modules=['bottle'],
scripts=['bottle.py'],
license='MIT',
install_requires=[
'aiohttp'
],
platforms = 'any',
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries',
'Topic :: Internet :: WWW/HTTP :: HTTP Servers',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Server',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
Fix bottle import that won't work with deps#!/usr/bin/env python
import sys
import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name='Bottle-Async',
version="0.13-dev",
description='A fork of Bottle to add support for asyncio.',
long_description="",
author="Marcel Hellkamp, Don Brown",
author_email='mrdon@twdata.org',
url='https://github.com/mrdon/bottle',
py_modules=['bottle'],
scripts=['bottle.py'],
license='MIT',
install_requires=[
'aiohttp'
],
platforms = 'any',
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries',
'Topic :: Internet :: WWW/HTTP :: HTTP Servers',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Server',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
| <commit_before>#!/usr/bin/env python
import sys
import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import bottle
setup(name='Bottle-Async',
version=bottle.__version__,
description='A fork of Bottle to add support for asyncio.',
long_description=bottle.__doc__,
author=bottle.__author__ + ", Don Brown",
author_email='mrdon@twdata.org',
url='https://github.com/mrdon/bottle',
py_modules=['bottle'],
scripts=['bottle.py'],
license='MIT',
install_requires=[
'aiohttp'
],
platforms = 'any',
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries',
'Topic :: Internet :: WWW/HTTP :: HTTP Servers',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Server',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
<commit_msg>Fix bottle import that won't work with deps<commit_after>#!/usr/bin/env python
import sys
import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name='Bottle-Async',
version="0.13-dev",
description='A fork of Bottle to add support for asyncio.',
long_description="",
author="Marcel Hellkamp, Don Brown",
author_email='mrdon@twdata.org',
url='https://github.com/mrdon/bottle',
py_modules=['bottle'],
scripts=['bottle.py'],
license='MIT',
install_requires=[
'aiohttp'
],
platforms = 'any',
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries',
'Topic :: Internet :: WWW/HTTP :: HTTP Servers',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Server',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
|
e9fe645af28bd93a6ee2b38184254c8295b70d3d | sn.py | sn.py | import tables as tb
hdf5_filename = 'hdf5/sn_data.h5'
class SN(object):
"""A supernova is the explosion that ends the life of a star
The SN needs to be conatained within the HDF5 database before it is used
by SNoBoL. Once there, simply create a supernova by calling the constructor
with the name of the SN as a string of the form "sn[YEAR][Letter(s)]"
For example:
sn1987A = SN('sn1987a')
sn1999em = SN('sn1999em')
Attributes
----------
name : Name of the supernova, "SN" followed by the year of first observation
along with a letter designating the order of observation in that
year. "SN1987A" was the first SN observed in 1987. "SN2000cb" was the
eightieth SN observed in 2000.
"""
def __init__(self, name):
"""Initializes the SN with supplied value for [name]"""
self.name = name
# Load SN data from HDF5 file
self.read_hdf5()
def read_hdf5(self):
h5file = tb.open_file(hdf5_filename)
# Check that the desired SN is in the HDF5 file
if self.name in h5file.list_nodes('/sn')._v_name:
print "Yay!"
else:
print "Boo!"
| import tables as tb
hdf5_filename = 'hdf5/sn_data.h5'
class SN(object):
"""A supernova is the explosion that ends the life of a star
The SN needs to be conatained within the HDF5 database before it is used
by SNoBoL. Once there, simply create a supernova by calling the constructor
with the name of the SN as a string of the form "sn[YEAR][Letter(s)]"
For example:
sn1987A = SN('sn1987a')
sn1999em = SN('sn1999em')
Attributes
----------
name : Name of the supernova, "SN" followed by the year of first observation
along with a letter designating the order of observation in that
year. "SN1987A" was the first SN observed in 1987. "SN2000cb" was the
eightieth SN observed in 2000.
"""
def __init__(self, name):
"""Initializes the SN with supplied value for [name]"""
self.name = name
# Load SN data from HDF5 file
self.read_hdf5()
def read_hdf5(self):
h5file = tb.open_file(hdf5_filename)
# Get the desired node from the HDF5 file
sn_node = h5file.get_node('/sn', self.name)
| Add method to check for the SN in the HDF5 file | Add method to check for the SN in the HDF5 file
| Python | mit | JALusk/SuperBoL,JALusk/SNoBoL,JALusk/SNoBoL | import tables as tb
hdf5_filename = 'hdf5/sn_data.h5'
class SN(object):
"""A supernova is the explosion that ends the life of a star
The SN needs to be conatained within the HDF5 database before it is used
by SNoBoL. Once there, simply create a supernova by calling the constructor
with the name of the SN as a string of the form "sn[YEAR][Letter(s)]"
For example:
sn1987A = SN('sn1987a')
sn1999em = SN('sn1999em')
Attributes
----------
name : Name of the supernova, "SN" followed by the year of first observation
along with a letter designating the order of observation in that
year. "SN1987A" was the first SN observed in 1987. "SN2000cb" was the
eightieth SN observed in 2000.
"""
def __init__(self, name):
"""Initializes the SN with supplied value for [name]"""
self.name = name
# Load SN data from HDF5 file
self.read_hdf5()
def read_hdf5(self):
h5file = tb.open_file(hdf5_filename)
# Check that the desired SN is in the HDF5 file
if self.name in h5file.list_nodes('/sn')._v_name:
print "Yay!"
else:
print "Boo!"
Add method to check for the SN in the HDF5 file | import tables as tb
hdf5_filename = 'hdf5/sn_data.h5'
class SN(object):
"""A supernova is the explosion that ends the life of a star
The SN needs to be conatained within the HDF5 database before it is used
by SNoBoL. Once there, simply create a supernova by calling the constructor
with the name of the SN as a string of the form "sn[YEAR][Letter(s)]"
For example:
sn1987A = SN('sn1987a')
sn1999em = SN('sn1999em')
Attributes
----------
name : Name of the supernova, "SN" followed by the year of first observation
along with a letter designating the order of observation in that
year. "SN1987A" was the first SN observed in 1987. "SN2000cb" was the
eightieth SN observed in 2000.
"""
def __init__(self, name):
"""Initializes the SN with supplied value for [name]"""
self.name = name
# Load SN data from HDF5 file
self.read_hdf5()
def read_hdf5(self):
h5file = tb.open_file(hdf5_filename)
# Get the desired node from the HDF5 file
sn_node = h5file.get_node('/sn', self.name)
| <commit_before>import tables as tb
hdf5_filename = 'hdf5/sn_data.h5'
class SN(object):
"""A supernova is the explosion that ends the life of a star
The SN needs to be conatained within the HDF5 database before it is used
by SNoBoL. Once there, simply create a supernova by calling the constructor
with the name of the SN as a string of the form "sn[YEAR][Letter(s)]"
For example:
sn1987A = SN('sn1987a')
sn1999em = SN('sn1999em')
Attributes
----------
name : Name of the supernova, "SN" followed by the year of first observation
along with a letter designating the order of observation in that
year. "SN1987A" was the first SN observed in 1987. "SN2000cb" was the
eightieth SN observed in 2000.
"""
def __init__(self, name):
"""Initializes the SN with supplied value for [name]"""
self.name = name
# Load SN data from HDF5 file
self.read_hdf5()
def read_hdf5(self):
h5file = tb.open_file(hdf5_filename)
# Check that the desired SN is in the HDF5 file
if self.name in h5file.list_nodes('/sn')._v_name:
print "Yay!"
else:
print "Boo!"
<commit_msg>Add method to check for the SN in the HDF5 file<commit_after> | import tables as tb
hdf5_filename = 'hdf5/sn_data.h5'
class SN(object):
"""A supernova is the explosion that ends the life of a star
The SN needs to be conatained within the HDF5 database before it is used
by SNoBoL. Once there, simply create a supernova by calling the constructor
with the name of the SN as a string of the form "sn[YEAR][Letter(s)]"
For example:
sn1987A = SN('sn1987a')
sn1999em = SN('sn1999em')
Attributes
----------
name : Name of the supernova, "SN" followed by the year of first observation
along with a letter designating the order of observation in that
year. "SN1987A" was the first SN observed in 1987. "SN2000cb" was the
eightieth SN observed in 2000.
"""
def __init__(self, name):
"""Initializes the SN with supplied value for [name]"""
self.name = name
# Load SN data from HDF5 file
self.read_hdf5()
def read_hdf5(self):
h5file = tb.open_file(hdf5_filename)
# Get the desired node from the HDF5 file
sn_node = h5file.get_node('/sn', self.name)
| import tables as tb
hdf5_filename = 'hdf5/sn_data.h5'
class SN(object):
"""A supernova is the explosion that ends the life of a star
The SN needs to be conatained within the HDF5 database before it is used
by SNoBoL. Once there, simply create a supernova by calling the constructor
with the name of the SN as a string of the form "sn[YEAR][Letter(s)]"
For example:
sn1987A = SN('sn1987a')
sn1999em = SN('sn1999em')
Attributes
----------
name : Name of the supernova, "SN" followed by the year of first observation
along with a letter designating the order of observation in that
year. "SN1987A" was the first SN observed in 1987. "SN2000cb" was the
eightieth SN observed in 2000.
"""
def __init__(self, name):
"""Initializes the SN with supplied value for [name]"""
self.name = name
# Load SN data from HDF5 file
self.read_hdf5()
def read_hdf5(self):
h5file = tb.open_file(hdf5_filename)
# Check that the desired SN is in the HDF5 file
if self.name in h5file.list_nodes('/sn')._v_name:
print "Yay!"
else:
print "Boo!"
Add method to check for the SN in the HDF5 fileimport tables as tb
hdf5_filename = 'hdf5/sn_data.h5'
class SN(object):
"""A supernova is the explosion that ends the life of a star
The SN needs to be conatained within the HDF5 database before it is used
by SNoBoL. Once there, simply create a supernova by calling the constructor
with the name of the SN as a string of the form "sn[YEAR][Letter(s)]"
For example:
sn1987A = SN('sn1987a')
sn1999em = SN('sn1999em')
Attributes
----------
name : Name of the supernova, "SN" followed by the year of first observation
along with a letter designating the order of observation in that
year. "SN1987A" was the first SN observed in 1987. "SN2000cb" was the
eightieth SN observed in 2000.
"""
def __init__(self, name):
"""Initializes the SN with supplied value for [name]"""
self.name = name
# Load SN data from HDF5 file
self.read_hdf5()
def read_hdf5(self):
h5file = tb.open_file(hdf5_filename)
# Get the desired node from the HDF5 file
sn_node = h5file.get_node('/sn', self.name)
| <commit_before>import tables as tb
hdf5_filename = 'hdf5/sn_data.h5'
class SN(object):
"""A supernova is the explosion that ends the life of a star
The SN needs to be conatained within the HDF5 database before it is used
by SNoBoL. Once there, simply create a supernova by calling the constructor
with the name of the SN as a string of the form "sn[YEAR][Letter(s)]"
For example:
sn1987A = SN('sn1987a')
sn1999em = SN('sn1999em')
Attributes
----------
name : Name of the supernova, "SN" followed by the year of first observation
along with a letter designating the order of observation in that
year. "SN1987A" was the first SN observed in 1987. "SN2000cb" was the
eightieth SN observed in 2000.
"""
def __init__(self, name):
"""Initializes the SN with supplied value for [name]"""
self.name = name
# Load SN data from HDF5 file
self.read_hdf5()
def read_hdf5(self):
h5file = tb.open_file(hdf5_filename)
# Check that the desired SN is in the HDF5 file
if self.name in h5file.list_nodes('/sn')._v_name:
print "Yay!"
else:
print "Boo!"
<commit_msg>Add method to check for the SN in the HDF5 file<commit_after>import tables as tb
hdf5_filename = 'hdf5/sn_data.h5'
class SN(object):
"""A supernova is the explosion that ends the life of a star
The SN needs to be conatained within the HDF5 database before it is used
by SNoBoL. Once there, simply create a supernova by calling the constructor
with the name of the SN as a string of the form "sn[YEAR][Letter(s)]"
For example:
sn1987A = SN('sn1987a')
sn1999em = SN('sn1999em')
Attributes
----------
name : Name of the supernova, "SN" followed by the year of first observation
along with a letter designating the order of observation in that
year. "SN1987A" was the first SN observed in 1987. "SN2000cb" was the
eightieth SN observed in 2000.
"""
def __init__(self, name):
"""Initializes the SN with supplied value for [name]"""
self.name = name
# Load SN data from HDF5 file
self.read_hdf5()
def read_hdf5(self):
h5file = tb.open_file(hdf5_filename)
# Get the desired node from the HDF5 file
sn_node = h5file.get_node('/sn', self.name)
|
0524a403bb2d4d26f28f535bcadcfc3fdd0a9484 | hr_attendance_calendar/__manifest__.py | hr_attendance_calendar/__manifest__.py | # -*- coding: utf-8 -*-
# © 2016 Coninckx David (Open Net Sarl)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'Attendance - Calendar',
'summary': 'Compute extra hours based on attendances',
'category': 'Human Resources',
'author': "CompassionCH, Open Net Sàrl",
'depends': [
'hr',
'hr_employee',
'hr_attendance',
'hr_holidays',
'hr_public_holidays'
],
'version': '10.0.1.0.0',
'auto_install': False,
'website': 'http://open-net.ch',
'license': 'AGPL-3',
'images': [],
'data': [
'security/ir.model.access.csv',
'views/hr_attendance_calendar_view.xml',
'views/hr_attendance_day_view.xml',
'views/hr_attendance_view.xml',
'views/hr_employee.xml',
'views/hr_holidays_status_views.xml',
'wizard/create_hr_attendance_day_view.xml',
'data/attendance_computation_cron.xml'
],
'installable': True
}
| # -*- coding: utf-8 -*-
# © 2016 Coninckx David (Open Net Sarl)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'Attendance - Calendar',
'summary': 'Compute extra hours based on attendances',
'category': 'Human Resources',
'author': "CompassionCH, Open Net Sàrl",
'depends': [
'hr',
'hr_attendance',
'hr_holidays',
'hr_public_holidays'
],
'version': '10.0.1.0.0',
'auto_install': False,
'website': 'http://open-net.ch',
'license': 'AGPL-3',
'images': [],
'data': [
'security/ir.model.access.csv',
'views/hr_attendance_calendar_view.xml',
'views/hr_attendance_day_view.xml',
'views/hr_attendance_view.xml',
'views/hr_employee.xml',
'views/hr_holidays_status_views.xml',
'wizard/create_hr_attendance_day_view.xml',
'data/attendance_computation_cron.xml'
],
'installable': True
}
| FIX - remove inconsistent dependence | FIX - remove inconsistent dependence
| Python | agpl-3.0 | maxime-beck/compassion-modules,ecino/compassion-modules,eicher31/compassion-modules,maxime-beck/compassion-modules,CompassionCH/compassion-modules,eicher31/compassion-modules,maxime-beck/compassion-modules,ecino/compassion-modules,CompassionCH/compassion-modules,eicher31/compassion-modules,eicher31/compassion-modules,CompassionCH/compassion-modules,ecino/compassion-modules,eicher31/compassion-modules,maxime-beck/compassion-modules,ecino/compassion-modules,ecino/compassion-modules,CompassionCH/compassion-modules | # -*- coding: utf-8 -*-
# © 2016 Coninckx David (Open Net Sarl)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'Attendance - Calendar',
'summary': 'Compute extra hours based on attendances',
'category': 'Human Resources',
'author': "CompassionCH, Open Net Sàrl",
'depends': [
'hr',
'hr_employee',
'hr_attendance',
'hr_holidays',
'hr_public_holidays'
],
'version': '10.0.1.0.0',
'auto_install': False,
'website': 'http://open-net.ch',
'license': 'AGPL-3',
'images': [],
'data': [
'security/ir.model.access.csv',
'views/hr_attendance_calendar_view.xml',
'views/hr_attendance_day_view.xml',
'views/hr_attendance_view.xml',
'views/hr_employee.xml',
'views/hr_holidays_status_views.xml',
'wizard/create_hr_attendance_day_view.xml',
'data/attendance_computation_cron.xml'
],
'installable': True
}
FIX - remove inconsistent dependence | # -*- coding: utf-8 -*-
# © 2016 Coninckx David (Open Net Sarl)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'Attendance - Calendar',
'summary': 'Compute extra hours based on attendances',
'category': 'Human Resources',
'author': "CompassionCH, Open Net Sàrl",
'depends': [
'hr',
'hr_attendance',
'hr_holidays',
'hr_public_holidays'
],
'version': '10.0.1.0.0',
'auto_install': False,
'website': 'http://open-net.ch',
'license': 'AGPL-3',
'images': [],
'data': [
'security/ir.model.access.csv',
'views/hr_attendance_calendar_view.xml',
'views/hr_attendance_day_view.xml',
'views/hr_attendance_view.xml',
'views/hr_employee.xml',
'views/hr_holidays_status_views.xml',
'wizard/create_hr_attendance_day_view.xml',
'data/attendance_computation_cron.xml'
],
'installable': True
}
| <commit_before># -*- coding: utf-8 -*-
# © 2016 Coninckx David (Open Net Sarl)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'Attendance - Calendar',
'summary': 'Compute extra hours based on attendances',
'category': 'Human Resources',
'author': "CompassionCH, Open Net Sàrl",
'depends': [
'hr',
'hr_employee',
'hr_attendance',
'hr_holidays',
'hr_public_holidays'
],
'version': '10.0.1.0.0',
'auto_install': False,
'website': 'http://open-net.ch',
'license': 'AGPL-3',
'images': [],
'data': [
'security/ir.model.access.csv',
'views/hr_attendance_calendar_view.xml',
'views/hr_attendance_day_view.xml',
'views/hr_attendance_view.xml',
'views/hr_employee.xml',
'views/hr_holidays_status_views.xml',
'wizard/create_hr_attendance_day_view.xml',
'data/attendance_computation_cron.xml'
],
'installable': True
}
<commit_msg>FIX - remove inconsistent dependence<commit_after> | # -*- coding: utf-8 -*-
# © 2016 Coninckx David (Open Net Sarl)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'Attendance - Calendar',
'summary': 'Compute extra hours based on attendances',
'category': 'Human Resources',
'author': "CompassionCH, Open Net Sàrl",
'depends': [
'hr',
'hr_attendance',
'hr_holidays',
'hr_public_holidays'
],
'version': '10.0.1.0.0',
'auto_install': False,
'website': 'http://open-net.ch',
'license': 'AGPL-3',
'images': [],
'data': [
'security/ir.model.access.csv',
'views/hr_attendance_calendar_view.xml',
'views/hr_attendance_day_view.xml',
'views/hr_attendance_view.xml',
'views/hr_employee.xml',
'views/hr_holidays_status_views.xml',
'wizard/create_hr_attendance_day_view.xml',
'data/attendance_computation_cron.xml'
],
'installable': True
}
| # -*- coding: utf-8 -*-
# © 2016 Coninckx David (Open Net Sarl)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'Attendance - Calendar',
'summary': 'Compute extra hours based on attendances',
'category': 'Human Resources',
'author': "CompassionCH, Open Net Sàrl",
'depends': [
'hr',
'hr_employee',
'hr_attendance',
'hr_holidays',
'hr_public_holidays'
],
'version': '10.0.1.0.0',
'auto_install': False,
'website': 'http://open-net.ch',
'license': 'AGPL-3',
'images': [],
'data': [
'security/ir.model.access.csv',
'views/hr_attendance_calendar_view.xml',
'views/hr_attendance_day_view.xml',
'views/hr_attendance_view.xml',
'views/hr_employee.xml',
'views/hr_holidays_status_views.xml',
'wizard/create_hr_attendance_day_view.xml',
'data/attendance_computation_cron.xml'
],
'installable': True
}
FIX - remove inconsistent dependence# -*- coding: utf-8 -*-
# © 2016 Coninckx David (Open Net Sarl)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'Attendance - Calendar',
'summary': 'Compute extra hours based on attendances',
'category': 'Human Resources',
'author': "CompassionCH, Open Net Sàrl",
'depends': [
'hr',
'hr_attendance',
'hr_holidays',
'hr_public_holidays'
],
'version': '10.0.1.0.0',
'auto_install': False,
'website': 'http://open-net.ch',
'license': 'AGPL-3',
'images': [],
'data': [
'security/ir.model.access.csv',
'views/hr_attendance_calendar_view.xml',
'views/hr_attendance_day_view.xml',
'views/hr_attendance_view.xml',
'views/hr_employee.xml',
'views/hr_holidays_status_views.xml',
'wizard/create_hr_attendance_day_view.xml',
'data/attendance_computation_cron.xml'
],
'installable': True
}
| <commit_before># -*- coding: utf-8 -*-
# © 2016 Coninckx David (Open Net Sarl)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'Attendance - Calendar',
'summary': 'Compute extra hours based on attendances',
'category': 'Human Resources',
'author': "CompassionCH, Open Net Sàrl",
'depends': [
'hr',
'hr_employee',
'hr_attendance',
'hr_holidays',
'hr_public_holidays'
],
'version': '10.0.1.0.0',
'auto_install': False,
'website': 'http://open-net.ch',
'license': 'AGPL-3',
'images': [],
'data': [
'security/ir.model.access.csv',
'views/hr_attendance_calendar_view.xml',
'views/hr_attendance_day_view.xml',
'views/hr_attendance_view.xml',
'views/hr_employee.xml',
'views/hr_holidays_status_views.xml',
'wizard/create_hr_attendance_day_view.xml',
'data/attendance_computation_cron.xml'
],
'installable': True
}
<commit_msg>FIX - remove inconsistent dependence<commit_after># -*- coding: utf-8 -*-
# © 2016 Coninckx David (Open Net Sarl)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'Attendance - Calendar',
'summary': 'Compute extra hours based on attendances',
'category': 'Human Resources',
'author': "CompassionCH, Open Net Sàrl",
'depends': [
'hr',
'hr_attendance',
'hr_holidays',
'hr_public_holidays'
],
'version': '10.0.1.0.0',
'auto_install': False,
'website': 'http://open-net.ch',
'license': 'AGPL-3',
'images': [],
'data': [
'security/ir.model.access.csv',
'views/hr_attendance_calendar_view.xml',
'views/hr_attendance_day_view.xml',
'views/hr_attendance_view.xml',
'views/hr_employee.xml',
'views/hr_holidays_status_views.xml',
'wizard/create_hr_attendance_day_view.xml',
'data/attendance_computation_cron.xml'
],
'installable': True
}
|
3fbec65f39295eb45211e33f0452b1076541cbc1 | etl/csv/__init__.py | etl/csv/__init__.py | import overview
import personnel
import match
import events
import statistics
CSV_ETL_CLASSES = {
'Overview': {
'Competitions': overview.CompetitionIngest,
'Clubs': overview.ClubIngest,
'Venues': overview.VenueIngest
},
'Personnel': {
'Players': personnel.PlayerIngest,
'Managers': personnel.ManagerIngest,
'Referees': personnel.RefereeIngest
},
'Match': {
'Matches': match.MatchIngest,
'Lineups': match.MatchLineupIngest,
'Goals': events.GoalIngest,
'Penalties': events.PenaltyIngest,
'Bookables': events.BookableIngest,
'Substitutions': events.SubstitutionIngest,
'PlayerStats': [
statistics.AssistsIngest,
statistics.ClearancesIngest,
statistics.CornerCrossesIngest,
statistics.CornersIngest,
statistics.CrossesIngest,
statistics.DefensivesIngest,
statistics.DisciplineIngest,
statistics.DuelsIngest,
statistics.FoulWinsIngest,
statistics.FreeKicksIngest,
statistics.GKActionsIngest,
statistics.GKAllowedGoalsIngest,
statistics.GKAllowedShotsIngest,
statistics.GKSavesIngest,
statistics.GoalBodyPartsIngest,
statistics.GoalLineClearancesIngest,
statistics.GoalLocationsIngest,
statistics.GoalTotalsIngest,
statistics.ImportantPlaysIngest,
statistics.MatchStatIngest,
statistics.PassDirectionsIngest,
statistics.PassLengthsIngest,
statistics.PassLocationsIngest,
statistics.PassTotalsIngest,
statistics.PenaltyActionsIngest,
statistics.ShotBlocksIngest,
statistics.ShotBodyPartsIngest,
statistics.ShotLocationsIngest,
statistics.ShotPlaysIngest,
statistics.TacklesIngest,
statistics.ThrowinsIngest,
statistics.TouchesIngest,
statistics.TouchLocationsIngest
]
}
}
| Create dictionary to map ETL data files to ingestion classes | Create dictionary to map ETL data files to ingestion classes
| Python | mit | soccermetrics/marcotti | Create dictionary to map ETL data files to ingestion classes | import overview
import personnel
import match
import events
import statistics
CSV_ETL_CLASSES = {
'Overview': {
'Competitions': overview.CompetitionIngest,
'Clubs': overview.ClubIngest,
'Venues': overview.VenueIngest
},
'Personnel': {
'Players': personnel.PlayerIngest,
'Managers': personnel.ManagerIngest,
'Referees': personnel.RefereeIngest
},
'Match': {
'Matches': match.MatchIngest,
'Lineups': match.MatchLineupIngest,
'Goals': events.GoalIngest,
'Penalties': events.PenaltyIngest,
'Bookables': events.BookableIngest,
'Substitutions': events.SubstitutionIngest,
'PlayerStats': [
statistics.AssistsIngest,
statistics.ClearancesIngest,
statistics.CornerCrossesIngest,
statistics.CornersIngest,
statistics.CrossesIngest,
statistics.DefensivesIngest,
statistics.DisciplineIngest,
statistics.DuelsIngest,
statistics.FoulWinsIngest,
statistics.FreeKicksIngest,
statistics.GKActionsIngest,
statistics.GKAllowedGoalsIngest,
statistics.GKAllowedShotsIngest,
statistics.GKSavesIngest,
statistics.GoalBodyPartsIngest,
statistics.GoalLineClearancesIngest,
statistics.GoalLocationsIngest,
statistics.GoalTotalsIngest,
statistics.ImportantPlaysIngest,
statistics.MatchStatIngest,
statistics.PassDirectionsIngest,
statistics.PassLengthsIngest,
statistics.PassLocationsIngest,
statistics.PassTotalsIngest,
statistics.PenaltyActionsIngest,
statistics.ShotBlocksIngest,
statistics.ShotBodyPartsIngest,
statistics.ShotLocationsIngest,
statistics.ShotPlaysIngest,
statistics.TacklesIngest,
statistics.ThrowinsIngest,
statistics.TouchesIngest,
statistics.TouchLocationsIngest
]
}
}
| <commit_before><commit_msg>Create dictionary to map ETL data files to ingestion classes<commit_after> | import overview
import personnel
import match
import events
import statistics
CSV_ETL_CLASSES = {
'Overview': {
'Competitions': overview.CompetitionIngest,
'Clubs': overview.ClubIngest,
'Venues': overview.VenueIngest
},
'Personnel': {
'Players': personnel.PlayerIngest,
'Managers': personnel.ManagerIngest,
'Referees': personnel.RefereeIngest
},
'Match': {
'Matches': match.MatchIngest,
'Lineups': match.MatchLineupIngest,
'Goals': events.GoalIngest,
'Penalties': events.PenaltyIngest,
'Bookables': events.BookableIngest,
'Substitutions': events.SubstitutionIngest,
'PlayerStats': [
statistics.AssistsIngest,
statistics.ClearancesIngest,
statistics.CornerCrossesIngest,
statistics.CornersIngest,
statistics.CrossesIngest,
statistics.DefensivesIngest,
statistics.DisciplineIngest,
statistics.DuelsIngest,
statistics.FoulWinsIngest,
statistics.FreeKicksIngest,
statistics.GKActionsIngest,
statistics.GKAllowedGoalsIngest,
statistics.GKAllowedShotsIngest,
statistics.GKSavesIngest,
statistics.GoalBodyPartsIngest,
statistics.GoalLineClearancesIngest,
statistics.GoalLocationsIngest,
statistics.GoalTotalsIngest,
statistics.ImportantPlaysIngest,
statistics.MatchStatIngest,
statistics.PassDirectionsIngest,
statistics.PassLengthsIngest,
statistics.PassLocationsIngest,
statistics.PassTotalsIngest,
statistics.PenaltyActionsIngest,
statistics.ShotBlocksIngest,
statistics.ShotBodyPartsIngest,
statistics.ShotLocationsIngest,
statistics.ShotPlaysIngest,
statistics.TacklesIngest,
statistics.ThrowinsIngest,
statistics.TouchesIngest,
statistics.TouchLocationsIngest
]
}
}
| Create dictionary to map ETL data files to ingestion classesimport overview
import personnel
import match
import events
import statistics
CSV_ETL_CLASSES = {
'Overview': {
'Competitions': overview.CompetitionIngest,
'Clubs': overview.ClubIngest,
'Venues': overview.VenueIngest
},
'Personnel': {
'Players': personnel.PlayerIngest,
'Managers': personnel.ManagerIngest,
'Referees': personnel.RefereeIngest
},
'Match': {
'Matches': match.MatchIngest,
'Lineups': match.MatchLineupIngest,
'Goals': events.GoalIngest,
'Penalties': events.PenaltyIngest,
'Bookables': events.BookableIngest,
'Substitutions': events.SubstitutionIngest,
'PlayerStats': [
statistics.AssistsIngest,
statistics.ClearancesIngest,
statistics.CornerCrossesIngest,
statistics.CornersIngest,
statistics.CrossesIngest,
statistics.DefensivesIngest,
statistics.DisciplineIngest,
statistics.DuelsIngest,
statistics.FoulWinsIngest,
statistics.FreeKicksIngest,
statistics.GKActionsIngest,
statistics.GKAllowedGoalsIngest,
statistics.GKAllowedShotsIngest,
statistics.GKSavesIngest,
statistics.GoalBodyPartsIngest,
statistics.GoalLineClearancesIngest,
statistics.GoalLocationsIngest,
statistics.GoalTotalsIngest,
statistics.ImportantPlaysIngest,
statistics.MatchStatIngest,
statistics.PassDirectionsIngest,
statistics.PassLengthsIngest,
statistics.PassLocationsIngest,
statistics.PassTotalsIngest,
statistics.PenaltyActionsIngest,
statistics.ShotBlocksIngest,
statistics.ShotBodyPartsIngest,
statistics.ShotLocationsIngest,
statistics.ShotPlaysIngest,
statistics.TacklesIngest,
statistics.ThrowinsIngest,
statistics.TouchesIngest,
statistics.TouchLocationsIngest
]
}
}
| <commit_before><commit_msg>Create dictionary to map ETL data files to ingestion classes<commit_after>import overview
import personnel
import match
import events
import statistics
CSV_ETL_CLASSES = {
'Overview': {
'Competitions': overview.CompetitionIngest,
'Clubs': overview.ClubIngest,
'Venues': overview.VenueIngest
},
'Personnel': {
'Players': personnel.PlayerIngest,
'Managers': personnel.ManagerIngest,
'Referees': personnel.RefereeIngest
},
'Match': {
'Matches': match.MatchIngest,
'Lineups': match.MatchLineupIngest,
'Goals': events.GoalIngest,
'Penalties': events.PenaltyIngest,
'Bookables': events.BookableIngest,
'Substitutions': events.SubstitutionIngest,
'PlayerStats': [
statistics.AssistsIngest,
statistics.ClearancesIngest,
statistics.CornerCrossesIngest,
statistics.CornersIngest,
statistics.CrossesIngest,
statistics.DefensivesIngest,
statistics.DisciplineIngest,
statistics.DuelsIngest,
statistics.FoulWinsIngest,
statistics.FreeKicksIngest,
statistics.GKActionsIngest,
statistics.GKAllowedGoalsIngest,
statistics.GKAllowedShotsIngest,
statistics.GKSavesIngest,
statistics.GoalBodyPartsIngest,
statistics.GoalLineClearancesIngest,
statistics.GoalLocationsIngest,
statistics.GoalTotalsIngest,
statistics.ImportantPlaysIngest,
statistics.MatchStatIngest,
statistics.PassDirectionsIngest,
statistics.PassLengthsIngest,
statistics.PassLocationsIngest,
statistics.PassTotalsIngest,
statistics.PenaltyActionsIngest,
statistics.ShotBlocksIngest,
statistics.ShotBodyPartsIngest,
statistics.ShotLocationsIngest,
statistics.ShotPlaysIngest,
statistics.TacklesIngest,
statistics.ThrowinsIngest,
statistics.TouchesIngest,
statistics.TouchLocationsIngest
]
}
}
| |
a0907ff742c81b676f602d1e17d820152f95d22e | django_docs/urls.py | django_docs/urls.py | from django.conf.urls import patterns, url, include
from docs.sitemaps import DocsSitemap
from docs.urls import urlpatterns as docs_urlpatterns
sitemaps = {'docs': DocsSitemap}
urlpatterns = patterns('',
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap', {'sitemaps': sitemaps}),
) + docs_urlpatterns
| from django.conf.urls import patterns, url, include
from django.http import HttpResponse
from docs.sitemaps import DocsSitemap
from docs.urls import urlpatterns as docs_urlpatterns
sitemaps = {'docs': DocsSitemap}
urlpatterns = docs_urlpatterns + patterns('',
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap', {'sitemaps': sitemaps}),
url(r'^google79eabba6bf6fd6d3\.html$', lambda req: HttpResponse('google-site-verification: google79eabba6bf6fd6d3.html')),
)
| Add validation URL for Google Webmaster Tools. | Add validation URL for Google Webmaster Tools.
| Python | bsd-3-clause | hassanabidpk/djangoproject.com,hassanabidpk/djangoproject.com,gnarf/djangoproject.com,rmoorman/djangoproject.com,vxvinh1511/djangoproject.com,rmoorman/djangoproject.com,nanuxbe/django,vxvinh1511/djangoproject.com,relekang/djangoproject.com,alawnchen/djangoproject.com,rmoorman/djangoproject.com,django/djangoproject.com,xavierdutreilh/djangoproject.com,relekang/djangoproject.com,relekang/djangoproject.com,gnarf/djangoproject.com,vxvinh1511/djangoproject.com,khkaminska/djangoproject.com,alawnchen/djangoproject.com,nanuxbe/django,hassanabidpk/djangoproject.com,khkaminska/djangoproject.com,gnarf/djangoproject.com,hassanabidpk/djangoproject.com,django/djangoproject.com,django/djangoproject.com,xavierdutreilh/djangoproject.com,khkaminska/djangoproject.com,alawnchen/djangoproject.com,xavierdutreilh/djangoproject.com,relekang/djangoproject.com,khkaminska/djangoproject.com,django/djangoproject.com,nanuxbe/django,alawnchen/djangoproject.com,rmoorman/djangoproject.com,vxvinh1511/djangoproject.com,gnarf/djangoproject.com,django/djangoproject.com,nanuxbe/django,xavierdutreilh/djangoproject.com,django/djangoproject.com | from django.conf.urls import patterns, url, include
from docs.sitemaps import DocsSitemap
from docs.urls import urlpatterns as docs_urlpatterns
sitemaps = {'docs': DocsSitemap}
urlpatterns = patterns('',
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap', {'sitemaps': sitemaps}),
) + docs_urlpatterns
Add validation URL for Google Webmaster Tools. | from django.conf.urls import patterns, url, include
from django.http import HttpResponse
from docs.sitemaps import DocsSitemap
from docs.urls import urlpatterns as docs_urlpatterns
sitemaps = {'docs': DocsSitemap}
urlpatterns = docs_urlpatterns + patterns('',
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap', {'sitemaps': sitemaps}),
url(r'^google79eabba6bf6fd6d3\.html$', lambda req: HttpResponse('google-site-verification: google79eabba6bf6fd6d3.html')),
)
| <commit_before>from django.conf.urls import patterns, url, include
from docs.sitemaps import DocsSitemap
from docs.urls import urlpatterns as docs_urlpatterns
sitemaps = {'docs': DocsSitemap}
urlpatterns = patterns('',
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap', {'sitemaps': sitemaps}),
) + docs_urlpatterns
<commit_msg>Add validation URL for Google Webmaster Tools.<commit_after> | from django.conf.urls import patterns, url, include
from django.http import HttpResponse
from docs.sitemaps import DocsSitemap
from docs.urls import urlpatterns as docs_urlpatterns
sitemaps = {'docs': DocsSitemap}
urlpatterns = docs_urlpatterns + patterns('',
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap', {'sitemaps': sitemaps}),
url(r'^google79eabba6bf6fd6d3\.html$', lambda req: HttpResponse('google-site-verification: google79eabba6bf6fd6d3.html')),
)
| from django.conf.urls import patterns, url, include
from docs.sitemaps import DocsSitemap
from docs.urls import urlpatterns as docs_urlpatterns
sitemaps = {'docs': DocsSitemap}
urlpatterns = patterns('',
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap', {'sitemaps': sitemaps}),
) + docs_urlpatterns
Add validation URL for Google Webmaster Tools.from django.conf.urls import patterns, url, include
from django.http import HttpResponse
from docs.sitemaps import DocsSitemap
from docs.urls import urlpatterns as docs_urlpatterns
sitemaps = {'docs': DocsSitemap}
urlpatterns = docs_urlpatterns + patterns('',
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap', {'sitemaps': sitemaps}),
url(r'^google79eabba6bf6fd6d3\.html$', lambda req: HttpResponse('google-site-verification: google79eabba6bf6fd6d3.html')),
)
| <commit_before>from django.conf.urls import patterns, url, include
from docs.sitemaps import DocsSitemap
from docs.urls import urlpatterns as docs_urlpatterns
sitemaps = {'docs': DocsSitemap}
urlpatterns = patterns('',
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap', {'sitemaps': sitemaps}),
) + docs_urlpatterns
<commit_msg>Add validation URL for Google Webmaster Tools.<commit_after>from django.conf.urls import patterns, url, include
from django.http import HttpResponse
from docs.sitemaps import DocsSitemap
from docs.urls import urlpatterns as docs_urlpatterns
sitemaps = {'docs': DocsSitemap}
urlpatterns = docs_urlpatterns + patterns('',
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap', {'sitemaps': sitemaps}),
url(r'^google79eabba6bf6fd6d3\.html$', lambda req: HttpResponse('google-site-verification: google79eabba6bf6fd6d3.html')),
)
|
d1b7b629218830e4b7c584fc1c0804a3b9ee553a | src/vault.py | src/vault.py | from fabric.api import task, local
from buildercore import project
import utils
import sys
import logging
LOG = logging.getLogger(__name__)
def vault_addr():
defaults, _ = project.raw_project_map()
return defaults['aws']['vault']['address']
def vault_policy():
return 'builder-user'
@task
def login():
cmd = "VAULT_ADDR=%s vault login" % vault_addr()
local(cmd)
@task
def logout():
cmd = "rm -f ~/.vault-token"
local(cmd)
@task
def token_lookup(token):
cmd = "VAULT_ADDR=%s VAULT_TOKEN=%s vault token lookup" % (vault_addr(), token)
local(cmd)
@task
def token_create():
token = utils.get_input('token display name: ')
if not token or not token.strip():
print("a token display name is required")
sys.exit(1)
cmd = "VAULT_ADDR=%s vault token create -policy=%s -display-name=%s" % (vault_addr(), vault_policy(), token)
local(cmd)
@task
def token_revoke(token):
cmd = "VAULT_ADDR=%s vault token revoke %s" % (vault_addr(), token)
local(cmd)
| from fabric.api import task, local
from buildercore import project
import utils
import sys
import logging
LOG = logging.getLogger(__name__)
def vault_addr():
defaults, _ = project.raw_project_map()
return defaults['aws']['vault']['address']
def vault_policy():
return 'builder-user'
@task
def login():
cmd = "VAULT_ADDR=%s vault login" % vault_addr()
local(cmd)
@task
def logout():
cmd = "rm -f ~/.vault-token"
local(cmd)
@task
def token_lookup(token):
cmd = "VAULT_ADDR=%s VAULT_TOKEN=%s vault token lookup" % (vault_addr(), token)
local(cmd)
@task
def token_create():
print("Warning: you should be authenticated with a root token to effectively create a new token here")
token = utils.get_input('token display name: ')
if not token or not token.strip():
print("a token display name is required")
sys.exit(1)
cmd = "VAULT_ADDR=%s vault token create -policy=%s -display-name=%s" % (vault_addr(), vault_policy(), token)
local(cmd)
@task
def token_revoke(token):
cmd = "VAULT_ADDR=%s vault token revoke %s" % (vault_addr(), token)
local(cmd)
| Add warning about root token necessity | Add warning about root token necessity
| Python | mit | elifesciences/builder,elifesciences/builder | from fabric.api import task, local
from buildercore import project
import utils
import sys
import logging
LOG = logging.getLogger(__name__)
def vault_addr():
defaults, _ = project.raw_project_map()
return defaults['aws']['vault']['address']
def vault_policy():
return 'builder-user'
@task
def login():
cmd = "VAULT_ADDR=%s vault login" % vault_addr()
local(cmd)
@task
def logout():
cmd = "rm -f ~/.vault-token"
local(cmd)
@task
def token_lookup(token):
cmd = "VAULT_ADDR=%s VAULT_TOKEN=%s vault token lookup" % (vault_addr(), token)
local(cmd)
@task
def token_create():
token = utils.get_input('token display name: ')
if not token or not token.strip():
print("a token display name is required")
sys.exit(1)
cmd = "VAULT_ADDR=%s vault token create -policy=%s -display-name=%s" % (vault_addr(), vault_policy(), token)
local(cmd)
@task
def token_revoke(token):
cmd = "VAULT_ADDR=%s vault token revoke %s" % (vault_addr(), token)
local(cmd)
Add warning about root token necessity | from fabric.api import task, local
from buildercore import project
import utils
import sys
import logging
LOG = logging.getLogger(__name__)
def vault_addr():
defaults, _ = project.raw_project_map()
return defaults['aws']['vault']['address']
def vault_policy():
return 'builder-user'
@task
def login():
cmd = "VAULT_ADDR=%s vault login" % vault_addr()
local(cmd)
@task
def logout():
cmd = "rm -f ~/.vault-token"
local(cmd)
@task
def token_lookup(token):
cmd = "VAULT_ADDR=%s VAULT_TOKEN=%s vault token lookup" % (vault_addr(), token)
local(cmd)
@task
def token_create():
print("Warning: you should be authenticated with a root token to effectively create a new token here")
token = utils.get_input('token display name: ')
if not token or not token.strip():
print("a token display name is required")
sys.exit(1)
cmd = "VAULT_ADDR=%s vault token create -policy=%s -display-name=%s" % (vault_addr(), vault_policy(), token)
local(cmd)
@task
def token_revoke(token):
cmd = "VAULT_ADDR=%s vault token revoke %s" % (vault_addr(), token)
local(cmd)
| <commit_before>from fabric.api import task, local
from buildercore import project
import utils
import sys
import logging
LOG = logging.getLogger(__name__)
def vault_addr():
defaults, _ = project.raw_project_map()
return defaults['aws']['vault']['address']
def vault_policy():
return 'builder-user'
@task
def login():
cmd = "VAULT_ADDR=%s vault login" % vault_addr()
local(cmd)
@task
def logout():
cmd = "rm -f ~/.vault-token"
local(cmd)
@task
def token_lookup(token):
cmd = "VAULT_ADDR=%s VAULT_TOKEN=%s vault token lookup" % (vault_addr(), token)
local(cmd)
@task
def token_create():
token = utils.get_input('token display name: ')
if not token or not token.strip():
print("a token display name is required")
sys.exit(1)
cmd = "VAULT_ADDR=%s vault token create -policy=%s -display-name=%s" % (vault_addr(), vault_policy(), token)
local(cmd)
@task
def token_revoke(token):
cmd = "VAULT_ADDR=%s vault token revoke %s" % (vault_addr(), token)
local(cmd)
<commit_msg>Add warning about root token necessity<commit_after> | from fabric.api import task, local
from buildercore import project
import utils
import sys
import logging
LOG = logging.getLogger(__name__)
def vault_addr():
defaults, _ = project.raw_project_map()
return defaults['aws']['vault']['address']
def vault_policy():
return 'builder-user'
@task
def login():
cmd = "VAULT_ADDR=%s vault login" % vault_addr()
local(cmd)
@task
def logout():
cmd = "rm -f ~/.vault-token"
local(cmd)
@task
def token_lookup(token):
cmd = "VAULT_ADDR=%s VAULT_TOKEN=%s vault token lookup" % (vault_addr(), token)
local(cmd)
@task
def token_create():
print("Warning: you should be authenticated with a root token to effectively create a new token here")
token = utils.get_input('token display name: ')
if not token or not token.strip():
print("a token display name is required")
sys.exit(1)
cmd = "VAULT_ADDR=%s vault token create -policy=%s -display-name=%s" % (vault_addr(), vault_policy(), token)
local(cmd)
@task
def token_revoke(token):
cmd = "VAULT_ADDR=%s vault token revoke %s" % (vault_addr(), token)
local(cmd)
| from fabric.api import task, local
from buildercore import project
import utils
import sys
import logging
LOG = logging.getLogger(__name__)
def vault_addr():
defaults, _ = project.raw_project_map()
return defaults['aws']['vault']['address']
def vault_policy():
return 'builder-user'
@task
def login():
cmd = "VAULT_ADDR=%s vault login" % vault_addr()
local(cmd)
@task
def logout():
cmd = "rm -f ~/.vault-token"
local(cmd)
@task
def token_lookup(token):
cmd = "VAULT_ADDR=%s VAULT_TOKEN=%s vault token lookup" % (vault_addr(), token)
local(cmd)
@task
def token_create():
token = utils.get_input('token display name: ')
if not token or not token.strip():
print("a token display name is required")
sys.exit(1)
cmd = "VAULT_ADDR=%s vault token create -policy=%s -display-name=%s" % (vault_addr(), vault_policy(), token)
local(cmd)
@task
def token_revoke(token):
cmd = "VAULT_ADDR=%s vault token revoke %s" % (vault_addr(), token)
local(cmd)
Add warning about root token necessityfrom fabric.api import task, local
from buildercore import project
import utils
import sys
import logging
LOG = logging.getLogger(__name__)
def vault_addr():
defaults, _ = project.raw_project_map()
return defaults['aws']['vault']['address']
def vault_policy():
return 'builder-user'
@task
def login():
cmd = "VAULT_ADDR=%s vault login" % vault_addr()
local(cmd)
@task
def logout():
cmd = "rm -f ~/.vault-token"
local(cmd)
@task
def token_lookup(token):
cmd = "VAULT_ADDR=%s VAULT_TOKEN=%s vault token lookup" % (vault_addr(), token)
local(cmd)
@task
def token_create():
print("Warning: you should be authenticated with a root token to effectively create a new token here")
token = utils.get_input('token display name: ')
if not token or not token.strip():
print("a token display name is required")
sys.exit(1)
cmd = "VAULT_ADDR=%s vault token create -policy=%s -display-name=%s" % (vault_addr(), vault_policy(), token)
local(cmd)
@task
def token_revoke(token):
cmd = "VAULT_ADDR=%s vault token revoke %s" % (vault_addr(), token)
local(cmd)
| <commit_before>from fabric.api import task, local
from buildercore import project
import utils
import sys
import logging
LOG = logging.getLogger(__name__)
def vault_addr():
defaults, _ = project.raw_project_map()
return defaults['aws']['vault']['address']
def vault_policy():
return 'builder-user'
@task
def login():
cmd = "VAULT_ADDR=%s vault login" % vault_addr()
local(cmd)
@task
def logout():
cmd = "rm -f ~/.vault-token"
local(cmd)
@task
def token_lookup(token):
cmd = "VAULT_ADDR=%s VAULT_TOKEN=%s vault token lookup" % (vault_addr(), token)
local(cmd)
@task
def token_create():
token = utils.get_input('token display name: ')
if not token or not token.strip():
print("a token display name is required")
sys.exit(1)
cmd = "VAULT_ADDR=%s vault token create -policy=%s -display-name=%s" % (vault_addr(), vault_policy(), token)
local(cmd)
@task
def token_revoke(token):
cmd = "VAULT_ADDR=%s vault token revoke %s" % (vault_addr(), token)
local(cmd)
<commit_msg>Add warning about root token necessity<commit_after>from fabric.api import task, local
from buildercore import project
import utils
import sys
import logging
LOG = logging.getLogger(__name__)
def vault_addr():
defaults, _ = project.raw_project_map()
return defaults['aws']['vault']['address']
def vault_policy():
return 'builder-user'
@task
def login():
cmd = "VAULT_ADDR=%s vault login" % vault_addr()
local(cmd)
@task
def logout():
cmd = "rm -f ~/.vault-token"
local(cmd)
@task
def token_lookup(token):
cmd = "VAULT_ADDR=%s VAULT_TOKEN=%s vault token lookup" % (vault_addr(), token)
local(cmd)
@task
def token_create():
print("Warning: you should be authenticated with a root token to effectively create a new token here")
token = utils.get_input('token display name: ')
if not token or not token.strip():
print("a token display name is required")
sys.exit(1)
cmd = "VAULT_ADDR=%s vault token create -policy=%s -display-name=%s" % (vault_addr(), vault_policy(), token)
local(cmd)
@task
def token_revoke(token):
cmd = "VAULT_ADDR=%s vault token revoke %s" % (vault_addr(), token)
local(cmd)
|
7cc3d10b829b51825e6f670fb0f3d29b54a8ead1 | trustyuri/rdf/TransformRdf.py | trustyuri/rdf/TransformRdf.py | import sys, logging
from rdflib.graph import ConjunctiveGraph
from trustyuri.rdf import RdfUtils, RdfTransformer
from rdflib.term import URIRef
import os
def transform(args):
filename = args[0]
baseuristr = args[1]
with open(filename, "r") as f:
rdfFormat = RdfUtils.get_format(filename)
cg = ConjunctiveGraph()
cg.parse(data=f.read(), format=rdfFormat)
baseuri = URIRef(baseuristr)
outdir = os.path.abspath(os.path.join(str(file), os.pardir))
RdfTransformer.transform_to_file(cg, baseuri, outdir, filename)
if __name__ == "__main__":
logging.basicConfig(level=logging.ERROR)
args = sys.argv
args.pop(0)
transform(args) | import sys, logging
from rdflib.graph import ConjunctiveGraph
from trustyuri.rdf import RdfUtils, RdfTransformer
from rdflib.term import URIRef
import os
def transform(args):
filename = args[0]
baseuristr = args[1]
with open(filename, "r") as f:
rdfFormat = RdfUtils.get_format(filename)
cg = ConjunctiveGraph()
cg.parse(data=f.read(), format=rdfFormat)
baseuri = URIRef(baseuristr)
outdir = os.path.abspath(os.path.join(str(file), os.pardir))
RdfTransformer.transform_to_file(cg, baseuri, outdir, filename)
if __name__ == "__main__":
logging.basicConfig(level=logging.ERROR)
args = sys.argv
args.pop(0)
transform(args)
| Add newline to end of file | Add newline to end of file
| Python | mit | trustyuri/trustyuri-python,trustyuri/trustyuri-python | import sys, logging
from rdflib.graph import ConjunctiveGraph
from trustyuri.rdf import RdfUtils, RdfTransformer
from rdflib.term import URIRef
import os
def transform(args):
filename = args[0]
baseuristr = args[1]
with open(filename, "r") as f:
rdfFormat = RdfUtils.get_format(filename)
cg = ConjunctiveGraph()
cg.parse(data=f.read(), format=rdfFormat)
baseuri = URIRef(baseuristr)
outdir = os.path.abspath(os.path.join(str(file), os.pardir))
RdfTransformer.transform_to_file(cg, baseuri, outdir, filename)
if __name__ == "__main__":
logging.basicConfig(level=logging.ERROR)
args = sys.argv
args.pop(0)
transform(args)Add newline to end of file | import sys, logging
from rdflib.graph import ConjunctiveGraph
from trustyuri.rdf import RdfUtils, RdfTransformer
from rdflib.term import URIRef
import os
def transform(args):
filename = args[0]
baseuristr = args[1]
with open(filename, "r") as f:
rdfFormat = RdfUtils.get_format(filename)
cg = ConjunctiveGraph()
cg.parse(data=f.read(), format=rdfFormat)
baseuri = URIRef(baseuristr)
outdir = os.path.abspath(os.path.join(str(file), os.pardir))
RdfTransformer.transform_to_file(cg, baseuri, outdir, filename)
if __name__ == "__main__":
logging.basicConfig(level=logging.ERROR)
args = sys.argv
args.pop(0)
transform(args)
| <commit_before>import sys, logging
from rdflib.graph import ConjunctiveGraph
from trustyuri.rdf import RdfUtils, RdfTransformer
from rdflib.term import URIRef
import os
def transform(args):
filename = args[0]
baseuristr = args[1]
with open(filename, "r") as f:
rdfFormat = RdfUtils.get_format(filename)
cg = ConjunctiveGraph()
cg.parse(data=f.read(), format=rdfFormat)
baseuri = URIRef(baseuristr)
outdir = os.path.abspath(os.path.join(str(file), os.pardir))
RdfTransformer.transform_to_file(cg, baseuri, outdir, filename)
if __name__ == "__main__":
logging.basicConfig(level=logging.ERROR)
args = sys.argv
args.pop(0)
transform(args)<commit_msg>Add newline to end of file<commit_after> | import sys, logging
from rdflib.graph import ConjunctiveGraph
from trustyuri.rdf import RdfUtils, RdfTransformer
from rdflib.term import URIRef
import os
def transform(args):
filename = args[0]
baseuristr = args[1]
with open(filename, "r") as f:
rdfFormat = RdfUtils.get_format(filename)
cg = ConjunctiveGraph()
cg.parse(data=f.read(), format=rdfFormat)
baseuri = URIRef(baseuristr)
outdir = os.path.abspath(os.path.join(str(file), os.pardir))
RdfTransformer.transform_to_file(cg, baseuri, outdir, filename)
if __name__ == "__main__":
logging.basicConfig(level=logging.ERROR)
args = sys.argv
args.pop(0)
transform(args)
| import sys, logging
from rdflib.graph import ConjunctiveGraph
from trustyuri.rdf import RdfUtils, RdfTransformer
from rdflib.term import URIRef
import os
def transform(args):
filename = args[0]
baseuristr = args[1]
with open(filename, "r") as f:
rdfFormat = RdfUtils.get_format(filename)
cg = ConjunctiveGraph()
cg.parse(data=f.read(), format=rdfFormat)
baseuri = URIRef(baseuristr)
outdir = os.path.abspath(os.path.join(str(file), os.pardir))
RdfTransformer.transform_to_file(cg, baseuri, outdir, filename)
if __name__ == "__main__":
logging.basicConfig(level=logging.ERROR)
args = sys.argv
args.pop(0)
transform(args)Add newline to end of fileimport sys, logging
from rdflib.graph import ConjunctiveGraph
from trustyuri.rdf import RdfUtils, RdfTransformer
from rdflib.term import URIRef
import os
def transform(args):
filename = args[0]
baseuristr = args[1]
with open(filename, "r") as f:
rdfFormat = RdfUtils.get_format(filename)
cg = ConjunctiveGraph()
cg.parse(data=f.read(), format=rdfFormat)
baseuri = URIRef(baseuristr)
outdir = os.path.abspath(os.path.join(str(file), os.pardir))
RdfTransformer.transform_to_file(cg, baseuri, outdir, filename)
if __name__ == "__main__":
logging.basicConfig(level=logging.ERROR)
args = sys.argv
args.pop(0)
transform(args)
| <commit_before>import sys, logging
from rdflib.graph import ConjunctiveGraph
from trustyuri.rdf import RdfUtils, RdfTransformer
from rdflib.term import URIRef
import os
def transform(args):
filename = args[0]
baseuristr = args[1]
with open(filename, "r") as f:
rdfFormat = RdfUtils.get_format(filename)
cg = ConjunctiveGraph()
cg.parse(data=f.read(), format=rdfFormat)
baseuri = URIRef(baseuristr)
outdir = os.path.abspath(os.path.join(str(file), os.pardir))
RdfTransformer.transform_to_file(cg, baseuri, outdir, filename)
if __name__ == "__main__":
logging.basicConfig(level=logging.ERROR)
args = sys.argv
args.pop(0)
transform(args)<commit_msg>Add newline to end of file<commit_after>import sys, logging
from rdflib.graph import ConjunctiveGraph
from trustyuri.rdf import RdfUtils, RdfTransformer
from rdflib.term import URIRef
import os
def transform(args):
filename = args[0]
baseuristr = args[1]
with open(filename, "r") as f:
rdfFormat = RdfUtils.get_format(filename)
cg = ConjunctiveGraph()
cg.parse(data=f.read(), format=rdfFormat)
baseuri = URIRef(baseuristr)
outdir = os.path.abspath(os.path.join(str(file), os.pardir))
RdfTransformer.transform_to_file(cg, baseuri, outdir, filename)
if __name__ == "__main__":
logging.basicConfig(level=logging.ERROR)
args = sys.argv
args.pop(0)
transform(args)
|
a39b7b2b9b0c9179d3aedcc29286cdcebf568d54 | tests.py | tests.py | #!/usr/bin/env python
'''
Copyright 2009 Slide, Inc.
'''
import unittest
import pyecc
DEFAULT_DATA = 'This message will be signed\n'
DEFAULT_SIG = '$HPI?t(I*1vAYsl$|%21WXND=6Br*[>k(OR9B!GOwHqL0s+3Uq'
DEFAULT_PUBKEY = '8W;>i^H0qi|J&$coR5MFpR*Vn'
DEFAULT_PRIVKEY = 'my private key'
class ECC_Verify_Tests(unittest.TestCase):
def test_BasicVerification(self):
ecc = pyecc.ECC(public=DEFAULT_PUBKEY, private=DEFAULT_PRIVKEY)
assert ecc.verify(DEFAULT_DATA, DEFAULT_SIG), ('Failed to verify signature',
DEFAULT_DATA, DEFAULT_SIG, DEFAULT_PUBKEY, DEFAULT_PRIVKEY)
if __name__ == '__main__':
unittest.main()
| #!/usr/bin/env python
'''
Copyright 2009 Slide, Inc.
'''
import unittest
import pyecc
DEFAULT_DATA = 'This message will be signed\n'
DEFAULT_SIG = '$HPI?t(I*1vAYsl$|%21WXND=6Br*[>k(OR9B!GOwHqL0s+3Uq'
DEFAULT_PUBKEY = '8W;>i^H0qi|J&$coR5MFpR*Vn'
DEFAULT_PRIVKEY = 'my private key'
class ECC_Verify_Tests(unittest.TestCase):
def setUp(self):
super(ECC_Verify_Tests, self).setUp()
self.ecc = pyecc.ECC(public=DEFAULT_PUBKEY, private=DEFAULT_PRIVKEY)
def test_BasicVerification(self):
assert self.ecc.verify(DEFAULT_DATA, DEFAULT_SIG), ('Failed to verify signature',
DEFAULT_DATA, DEFAULT_SIG, DEFAULT_PUBKEY, DEFAULT_PRIVKEY)
def test_BadVerification(self):
assert self.ecc.verify(DEFAULT_DATA, "FAIL") == False , ('Verified on a bad sig',
DEFAULT_DATA, DEFAULT_SIG, DEFAULT_PUBKEY, DEFAULT_PRIVKEY)
if __name__ == '__main__':
unittest.main()
| Implement a bad sig test | Implement a bad sig test
| Python | lgpl-2.1 | rtyler/PyECC,slideinc/PyECC,rtyler/PyECC,slideinc/PyECC | #!/usr/bin/env python
'''
Copyright 2009 Slide, Inc.
'''
import unittest
import pyecc
DEFAULT_DATA = 'This message will be signed\n'
DEFAULT_SIG = '$HPI?t(I*1vAYsl$|%21WXND=6Br*[>k(OR9B!GOwHqL0s+3Uq'
DEFAULT_PUBKEY = '8W;>i^H0qi|J&$coR5MFpR*Vn'
DEFAULT_PRIVKEY = 'my private key'
class ECC_Verify_Tests(unittest.TestCase):
def test_BasicVerification(self):
ecc = pyecc.ECC(public=DEFAULT_PUBKEY, private=DEFAULT_PRIVKEY)
assert ecc.verify(DEFAULT_DATA, DEFAULT_SIG), ('Failed to verify signature',
DEFAULT_DATA, DEFAULT_SIG, DEFAULT_PUBKEY, DEFAULT_PRIVKEY)
if __name__ == '__main__':
unittest.main()
Implement a bad sig test | #!/usr/bin/env python
'''
Copyright 2009 Slide, Inc.
'''
import unittest
import pyecc
DEFAULT_DATA = 'This message will be signed\n'
DEFAULT_SIG = '$HPI?t(I*1vAYsl$|%21WXND=6Br*[>k(OR9B!GOwHqL0s+3Uq'
DEFAULT_PUBKEY = '8W;>i^H0qi|J&$coR5MFpR*Vn'
DEFAULT_PRIVKEY = 'my private key'
class ECC_Verify_Tests(unittest.TestCase):
def setUp(self):
super(ECC_Verify_Tests, self).setUp()
self.ecc = pyecc.ECC(public=DEFAULT_PUBKEY, private=DEFAULT_PRIVKEY)
def test_BasicVerification(self):
assert self.ecc.verify(DEFAULT_DATA, DEFAULT_SIG), ('Failed to verify signature',
DEFAULT_DATA, DEFAULT_SIG, DEFAULT_PUBKEY, DEFAULT_PRIVKEY)
def test_BadVerification(self):
assert self.ecc.verify(DEFAULT_DATA, "FAIL") == False , ('Verified on a bad sig',
DEFAULT_DATA, DEFAULT_SIG, DEFAULT_PUBKEY, DEFAULT_PRIVKEY)
if __name__ == '__main__':
unittest.main()
| <commit_before>#!/usr/bin/env python
'''
Copyright 2009 Slide, Inc.
'''
import unittest
import pyecc
DEFAULT_DATA = 'This message will be signed\n'
DEFAULT_SIG = '$HPI?t(I*1vAYsl$|%21WXND=6Br*[>k(OR9B!GOwHqL0s+3Uq'
DEFAULT_PUBKEY = '8W;>i^H0qi|J&$coR5MFpR*Vn'
DEFAULT_PRIVKEY = 'my private key'
class ECC_Verify_Tests(unittest.TestCase):
def test_BasicVerification(self):
ecc = pyecc.ECC(public=DEFAULT_PUBKEY, private=DEFAULT_PRIVKEY)
assert ecc.verify(DEFAULT_DATA, DEFAULT_SIG), ('Failed to verify signature',
DEFAULT_DATA, DEFAULT_SIG, DEFAULT_PUBKEY, DEFAULT_PRIVKEY)
if __name__ == '__main__':
unittest.main()
<commit_msg>Implement a bad sig test<commit_after> | #!/usr/bin/env python
'''
Copyright 2009 Slide, Inc.
'''
import unittest
import pyecc
DEFAULT_DATA = 'This message will be signed\n'
DEFAULT_SIG = '$HPI?t(I*1vAYsl$|%21WXND=6Br*[>k(OR9B!GOwHqL0s+3Uq'
DEFAULT_PUBKEY = '8W;>i^H0qi|J&$coR5MFpR*Vn'
DEFAULT_PRIVKEY = 'my private key'
class ECC_Verify_Tests(unittest.TestCase):
def setUp(self):
super(ECC_Verify_Tests, self).setUp()
self.ecc = pyecc.ECC(public=DEFAULT_PUBKEY, private=DEFAULT_PRIVKEY)
def test_BasicVerification(self):
assert self.ecc.verify(DEFAULT_DATA, DEFAULT_SIG), ('Failed to verify signature',
DEFAULT_DATA, DEFAULT_SIG, DEFAULT_PUBKEY, DEFAULT_PRIVKEY)
def test_BadVerification(self):
assert self.ecc.verify(DEFAULT_DATA, "FAIL") == False , ('Verified on a bad sig',
DEFAULT_DATA, DEFAULT_SIG, DEFAULT_PUBKEY, DEFAULT_PRIVKEY)
if __name__ == '__main__':
unittest.main()
| #!/usr/bin/env python
'''
Copyright 2009 Slide, Inc.
'''
import unittest
import pyecc
DEFAULT_DATA = 'This message will be signed\n'
DEFAULT_SIG = '$HPI?t(I*1vAYsl$|%21WXND=6Br*[>k(OR9B!GOwHqL0s+3Uq'
DEFAULT_PUBKEY = '8W;>i^H0qi|J&$coR5MFpR*Vn'
DEFAULT_PRIVKEY = 'my private key'
class ECC_Verify_Tests(unittest.TestCase):
def test_BasicVerification(self):
ecc = pyecc.ECC(public=DEFAULT_PUBKEY, private=DEFAULT_PRIVKEY)
assert ecc.verify(DEFAULT_DATA, DEFAULT_SIG), ('Failed to verify signature',
DEFAULT_DATA, DEFAULT_SIG, DEFAULT_PUBKEY, DEFAULT_PRIVKEY)
if __name__ == '__main__':
unittest.main()
Implement a bad sig test#!/usr/bin/env python
'''
Copyright 2009 Slide, Inc.
'''
import unittest
import pyecc
DEFAULT_DATA = 'This message will be signed\n'
DEFAULT_SIG = '$HPI?t(I*1vAYsl$|%21WXND=6Br*[>k(OR9B!GOwHqL0s+3Uq'
DEFAULT_PUBKEY = '8W;>i^H0qi|J&$coR5MFpR*Vn'
DEFAULT_PRIVKEY = 'my private key'
class ECC_Verify_Tests(unittest.TestCase):
def setUp(self):
super(ECC_Verify_Tests, self).setUp()
self.ecc = pyecc.ECC(public=DEFAULT_PUBKEY, private=DEFAULT_PRIVKEY)
def test_BasicVerification(self):
assert self.ecc.verify(DEFAULT_DATA, DEFAULT_SIG), ('Failed to verify signature',
DEFAULT_DATA, DEFAULT_SIG, DEFAULT_PUBKEY, DEFAULT_PRIVKEY)
def test_BadVerification(self):
assert self.ecc.verify(DEFAULT_DATA, "FAIL") == False , ('Verified on a bad sig',
DEFAULT_DATA, DEFAULT_SIG, DEFAULT_PUBKEY, DEFAULT_PRIVKEY)
if __name__ == '__main__':
unittest.main()
| <commit_before>#!/usr/bin/env python
'''
Copyright 2009 Slide, Inc.
'''
import unittest
import pyecc
DEFAULT_DATA = 'This message will be signed\n'
DEFAULT_SIG = '$HPI?t(I*1vAYsl$|%21WXND=6Br*[>k(OR9B!GOwHqL0s+3Uq'
DEFAULT_PUBKEY = '8W;>i^H0qi|J&$coR5MFpR*Vn'
DEFAULT_PRIVKEY = 'my private key'
class ECC_Verify_Tests(unittest.TestCase):
def test_BasicVerification(self):
ecc = pyecc.ECC(public=DEFAULT_PUBKEY, private=DEFAULT_PRIVKEY)
assert ecc.verify(DEFAULT_DATA, DEFAULT_SIG), ('Failed to verify signature',
DEFAULT_DATA, DEFAULT_SIG, DEFAULT_PUBKEY, DEFAULT_PRIVKEY)
if __name__ == '__main__':
unittest.main()
<commit_msg>Implement a bad sig test<commit_after>#!/usr/bin/env python
'''
Copyright 2009 Slide, Inc.
'''
import unittest
import pyecc
DEFAULT_DATA = 'This message will be signed\n'
DEFAULT_SIG = '$HPI?t(I*1vAYsl$|%21WXND=6Br*[>k(OR9B!GOwHqL0s+3Uq'
DEFAULT_PUBKEY = '8W;>i^H0qi|J&$coR5MFpR*Vn'
DEFAULT_PRIVKEY = 'my private key'
class ECC_Verify_Tests(unittest.TestCase):
def setUp(self):
super(ECC_Verify_Tests, self).setUp()
self.ecc = pyecc.ECC(public=DEFAULT_PUBKEY, private=DEFAULT_PRIVKEY)
def test_BasicVerification(self):
assert self.ecc.verify(DEFAULT_DATA, DEFAULT_SIG), ('Failed to verify signature',
DEFAULT_DATA, DEFAULT_SIG, DEFAULT_PUBKEY, DEFAULT_PRIVKEY)
def test_BadVerification(self):
assert self.ecc.verify(DEFAULT_DATA, "FAIL") == False , ('Verified on a bad sig',
DEFAULT_DATA, DEFAULT_SIG, DEFAULT_PUBKEY, DEFAULT_PRIVKEY)
if __name__ == '__main__':
unittest.main()
|
109b753c807dae30ee736a6f071a058fa8b68d92 | tests/scoring_engine/web/views/test_services.py | tests/scoring_engine/web/views/test_services.py | from tests.scoring_engine.web.web_test import WebTest
class TestServices(WebTest):
def test_auth_required_services(self):
self.verify_auth_required('/services')
def test_auth_required_service_id(self):
self.verify_auth_required('/service/1')
| from tests.scoring_engine.web.web_test import WebTest
from tests.scoring_engine.helpers import generate_sample_model_tree
class TestServices(WebTest):
def set_team_color(self, team, color):
team.color = color
self.session.add(team)
self.session.commit()
def set_blue_team(self, team):
self.set_team_color(team, 'Blue')
def set_white_team(self, team):
self.set_team_color(team, 'White')
def test_auth_required_services(self):
self.verify_auth_required('/services')
def test_auth_required_service_id(self):
self.verify_auth_required('/service/1')
def test_normal_services(self):
user = self.create_default_user()
service = generate_sample_model_tree('Service', self.session)
self.set_blue_team(user.team)
service.team = user.team
self.session.add(service)
self.session.commit()
resp = self.auth_and_get_path('/services')
assert resp.status_code == 200
def test_unauthorized_services(self):
user = self.create_default_user()
service = generate_sample_model_tree('Service', self.session)
self.set_white_team(user.team)
service.team = user.team
self.session.add(service)
self.session.commit()
resp = self.auth_and_get_path('/services')
assert resp.status_code == 302
def test_normal_service_id(self):
user = self.create_default_user()
service = generate_sample_model_tree('Service', self.session)
self.set_blue_team(user.team)
service.team = user.team
self.session.add(service)
self.session.commit()
resp = self.auth_and_get_path('/service/1')
assert resp.status_code == 200
def test_unauthorized_service_id(self):
self.create_default_user()
resp = self.auth_and_get_path('/service/1')
assert resp.status_code == 302 | Update tests for services view | Update tests for services view
| Python | mit | pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine | from tests.scoring_engine.web.web_test import WebTest
class TestServices(WebTest):
def test_auth_required_services(self):
self.verify_auth_required('/services')
def test_auth_required_service_id(self):
self.verify_auth_required('/service/1')
Update tests for services view | from tests.scoring_engine.web.web_test import WebTest
from tests.scoring_engine.helpers import generate_sample_model_tree
class TestServices(WebTest):
def set_team_color(self, team, color):
team.color = color
self.session.add(team)
self.session.commit()
def set_blue_team(self, team):
self.set_team_color(team, 'Blue')
def set_white_team(self, team):
self.set_team_color(team, 'White')
def test_auth_required_services(self):
self.verify_auth_required('/services')
def test_auth_required_service_id(self):
self.verify_auth_required('/service/1')
def test_normal_services(self):
user = self.create_default_user()
service = generate_sample_model_tree('Service', self.session)
self.set_blue_team(user.team)
service.team = user.team
self.session.add(service)
self.session.commit()
resp = self.auth_and_get_path('/services')
assert resp.status_code == 200
def test_unauthorized_services(self):
user = self.create_default_user()
service = generate_sample_model_tree('Service', self.session)
self.set_white_team(user.team)
service.team = user.team
self.session.add(service)
self.session.commit()
resp = self.auth_and_get_path('/services')
assert resp.status_code == 302
def test_normal_service_id(self):
user = self.create_default_user()
service = generate_sample_model_tree('Service', self.session)
self.set_blue_team(user.team)
service.team = user.team
self.session.add(service)
self.session.commit()
resp = self.auth_and_get_path('/service/1')
assert resp.status_code == 200
def test_unauthorized_service_id(self):
self.create_default_user()
resp = self.auth_and_get_path('/service/1')
assert resp.status_code == 302 | <commit_before>from tests.scoring_engine.web.web_test import WebTest
class TestServices(WebTest):
def test_auth_required_services(self):
self.verify_auth_required('/services')
def test_auth_required_service_id(self):
self.verify_auth_required('/service/1')
<commit_msg>Update tests for services view<commit_after> | from tests.scoring_engine.web.web_test import WebTest
from tests.scoring_engine.helpers import generate_sample_model_tree
class TestServices(WebTest):
def set_team_color(self, team, color):
team.color = color
self.session.add(team)
self.session.commit()
def set_blue_team(self, team):
self.set_team_color(team, 'Blue')
def set_white_team(self, team):
self.set_team_color(team, 'White')
def test_auth_required_services(self):
self.verify_auth_required('/services')
def test_auth_required_service_id(self):
self.verify_auth_required('/service/1')
def test_normal_services(self):
user = self.create_default_user()
service = generate_sample_model_tree('Service', self.session)
self.set_blue_team(user.team)
service.team = user.team
self.session.add(service)
self.session.commit()
resp = self.auth_and_get_path('/services')
assert resp.status_code == 200
def test_unauthorized_services(self):
user = self.create_default_user()
service = generate_sample_model_tree('Service', self.session)
self.set_white_team(user.team)
service.team = user.team
self.session.add(service)
self.session.commit()
resp = self.auth_and_get_path('/services')
assert resp.status_code == 302
def test_normal_service_id(self):
user = self.create_default_user()
service = generate_sample_model_tree('Service', self.session)
self.set_blue_team(user.team)
service.team = user.team
self.session.add(service)
self.session.commit()
resp = self.auth_and_get_path('/service/1')
assert resp.status_code == 200
def test_unauthorized_service_id(self):
self.create_default_user()
resp = self.auth_and_get_path('/service/1')
assert resp.status_code == 302 | from tests.scoring_engine.web.web_test import WebTest
class TestServices(WebTest):
def test_auth_required_services(self):
self.verify_auth_required('/services')
def test_auth_required_service_id(self):
self.verify_auth_required('/service/1')
Update tests for services viewfrom tests.scoring_engine.web.web_test import WebTest
from tests.scoring_engine.helpers import generate_sample_model_tree
class TestServices(WebTest):
def set_team_color(self, team, color):
team.color = color
self.session.add(team)
self.session.commit()
def set_blue_team(self, team):
self.set_team_color(team, 'Blue')
def set_white_team(self, team):
self.set_team_color(team, 'White')
def test_auth_required_services(self):
self.verify_auth_required('/services')
def test_auth_required_service_id(self):
self.verify_auth_required('/service/1')
def test_normal_services(self):
user = self.create_default_user()
service = generate_sample_model_tree('Service', self.session)
self.set_blue_team(user.team)
service.team = user.team
self.session.add(service)
self.session.commit()
resp = self.auth_and_get_path('/services')
assert resp.status_code == 200
def test_unauthorized_services(self):
user = self.create_default_user()
service = generate_sample_model_tree('Service', self.session)
self.set_white_team(user.team)
service.team = user.team
self.session.add(service)
self.session.commit()
resp = self.auth_and_get_path('/services')
assert resp.status_code == 302
def test_normal_service_id(self):
user = self.create_default_user()
service = generate_sample_model_tree('Service', self.session)
self.set_blue_team(user.team)
service.team = user.team
self.session.add(service)
self.session.commit()
resp = self.auth_and_get_path('/service/1')
assert resp.status_code == 200
def test_unauthorized_service_id(self):
self.create_default_user()
resp = self.auth_and_get_path('/service/1')
assert resp.status_code == 302 | <commit_before>from tests.scoring_engine.web.web_test import WebTest
class TestServices(WebTest):
def test_auth_required_services(self):
self.verify_auth_required('/services')
def test_auth_required_service_id(self):
self.verify_auth_required('/service/1')
<commit_msg>Update tests for services view<commit_after>from tests.scoring_engine.web.web_test import WebTest
from tests.scoring_engine.helpers import generate_sample_model_tree
class TestServices(WebTest):
def set_team_color(self, team, color):
team.color = color
self.session.add(team)
self.session.commit()
def set_blue_team(self, team):
self.set_team_color(team, 'Blue')
def set_white_team(self, team):
self.set_team_color(team, 'White')
def test_auth_required_services(self):
self.verify_auth_required('/services')
def test_auth_required_service_id(self):
self.verify_auth_required('/service/1')
def test_normal_services(self):
user = self.create_default_user()
service = generate_sample_model_tree('Service', self.session)
self.set_blue_team(user.team)
service.team = user.team
self.session.add(service)
self.session.commit()
resp = self.auth_and_get_path('/services')
assert resp.status_code == 200
def test_unauthorized_services(self):
user = self.create_default_user()
service = generate_sample_model_tree('Service', self.session)
self.set_white_team(user.team)
service.team = user.team
self.session.add(service)
self.session.commit()
resp = self.auth_and_get_path('/services')
assert resp.status_code == 302
def test_normal_service_id(self):
user = self.create_default_user()
service = generate_sample_model_tree('Service', self.session)
self.set_blue_team(user.team)
service.team = user.team
self.session.add(service)
self.session.commit()
resp = self.auth_and_get_path('/service/1')
assert resp.status_code == 200
def test_unauthorized_service_id(self):
self.create_default_user()
resp = self.auth_and_get_path('/service/1')
assert resp.status_code == 302 |
ccf4ceab6fafb6f32668500f913eb256106bcc34 | test/integration/console_scripts_test.py | test/integration/console_scripts_test.py | """Test the PUDL console scripts from within PyTest."""
import pytest
@pytest.mark.parametrize(
"script_name", [
"pudl_setup",
"pudl_datastore",
"ferc1_to_sqlite",
"pudl_etl",
"datapkg_to_sqlite",
"epacems_to_parquet",
"pudl_territories",
])
@pytest.mark.script_launch_mode('inprocess')
def test_pudl_setup(script_runner, script_name):
"""Run each console script in --help mode for testing."""
ret = script_runner.run(script_name, '--help', print_result=False)
assert ret.success
| """Test the PUDL console scripts from within PyTest."""
import pkg_resources
import pytest
# Obtain a list of all deployed entry point scripts to test:
PUDL_SCRIPTS = [
ep.name for ep in pkg_resources.iter_entry_points('console_scripts')
if ep.module_name.startswith("pudl")
]
@pytest.mark.parametrize("script_name", PUDL_SCRIPTS)
@pytest.mark.script_launch_mode('inprocess')
def test_pudl_scripts(script_runner, script_name):
"""Run each console script in --help mode for testing."""
ret = script_runner.run(script_name, '--help', print_result=False)
assert ret.success
| Automate generation of list of console scripts to test. | Automate generation of list of console scripts to test.
| Python | mit | catalyst-cooperative/pudl,catalyst-cooperative/pudl | """Test the PUDL console scripts from within PyTest."""
import pytest
@pytest.mark.parametrize(
"script_name", [
"pudl_setup",
"pudl_datastore",
"ferc1_to_sqlite",
"pudl_etl",
"datapkg_to_sqlite",
"epacems_to_parquet",
"pudl_territories",
])
@pytest.mark.script_launch_mode('inprocess')
def test_pudl_setup(script_runner, script_name):
"""Run each console script in --help mode for testing."""
ret = script_runner.run(script_name, '--help', print_result=False)
assert ret.success
Automate generation of list of console scripts to test. | """Test the PUDL console scripts from within PyTest."""
import pkg_resources
import pytest
# Obtain a list of all deployed entry point scripts to test:
PUDL_SCRIPTS = [
ep.name for ep in pkg_resources.iter_entry_points('console_scripts')
if ep.module_name.startswith("pudl")
]
@pytest.mark.parametrize("script_name", PUDL_SCRIPTS)
@pytest.mark.script_launch_mode('inprocess')
def test_pudl_scripts(script_runner, script_name):
"""Run each console script in --help mode for testing."""
ret = script_runner.run(script_name, '--help', print_result=False)
assert ret.success
| <commit_before>"""Test the PUDL console scripts from within PyTest."""
import pytest
@pytest.mark.parametrize(
"script_name", [
"pudl_setup",
"pudl_datastore",
"ferc1_to_sqlite",
"pudl_etl",
"datapkg_to_sqlite",
"epacems_to_parquet",
"pudl_territories",
])
@pytest.mark.script_launch_mode('inprocess')
def test_pudl_setup(script_runner, script_name):
"""Run each console script in --help mode for testing."""
ret = script_runner.run(script_name, '--help', print_result=False)
assert ret.success
<commit_msg>Automate generation of list of console scripts to test.<commit_after> | """Test the PUDL console scripts from within PyTest."""
import pkg_resources
import pytest
# Obtain a list of all deployed entry point scripts to test:
PUDL_SCRIPTS = [
ep.name for ep in pkg_resources.iter_entry_points('console_scripts')
if ep.module_name.startswith("pudl")
]
@pytest.mark.parametrize("script_name", PUDL_SCRIPTS)
@pytest.mark.script_launch_mode('inprocess')
def test_pudl_scripts(script_runner, script_name):
"""Run each console script in --help mode for testing."""
ret = script_runner.run(script_name, '--help', print_result=False)
assert ret.success
| """Test the PUDL console scripts from within PyTest."""
import pytest
@pytest.mark.parametrize(
"script_name", [
"pudl_setup",
"pudl_datastore",
"ferc1_to_sqlite",
"pudl_etl",
"datapkg_to_sqlite",
"epacems_to_parquet",
"pudl_territories",
])
@pytest.mark.script_launch_mode('inprocess')
def test_pudl_setup(script_runner, script_name):
"""Run each console script in --help mode for testing."""
ret = script_runner.run(script_name, '--help', print_result=False)
assert ret.success
Automate generation of list of console scripts to test."""Test the PUDL console scripts from within PyTest."""
import pkg_resources
import pytest
# Obtain a list of all deployed entry point scripts to test:
PUDL_SCRIPTS = [
ep.name for ep in pkg_resources.iter_entry_points('console_scripts')
if ep.module_name.startswith("pudl")
]
@pytest.mark.parametrize("script_name", PUDL_SCRIPTS)
@pytest.mark.script_launch_mode('inprocess')
def test_pudl_scripts(script_runner, script_name):
"""Run each console script in --help mode for testing."""
ret = script_runner.run(script_name, '--help', print_result=False)
assert ret.success
| <commit_before>"""Test the PUDL console scripts from within PyTest."""
import pytest
@pytest.mark.parametrize(
"script_name", [
"pudl_setup",
"pudl_datastore",
"ferc1_to_sqlite",
"pudl_etl",
"datapkg_to_sqlite",
"epacems_to_parquet",
"pudl_territories",
])
@pytest.mark.script_launch_mode('inprocess')
def test_pudl_setup(script_runner, script_name):
"""Run each console script in --help mode for testing."""
ret = script_runner.run(script_name, '--help', print_result=False)
assert ret.success
<commit_msg>Automate generation of list of console scripts to test.<commit_after>"""Test the PUDL console scripts from within PyTest."""
import pkg_resources
import pytest
# Obtain a list of all deployed entry point scripts to test:
PUDL_SCRIPTS = [
ep.name for ep in pkg_resources.iter_entry_points('console_scripts')
if ep.module_name.startswith("pudl")
]
@pytest.mark.parametrize("script_name", PUDL_SCRIPTS)
@pytest.mark.script_launch_mode('inprocess')
def test_pudl_scripts(script_runner, script_name):
"""Run each console script in --help mode for testing."""
ret = script_runner.run(script_name, '--help', print_result=False)
assert ret.success
|
ea6a84cee4f452f4503c6ce0fdd04b77d9017bdd | tinyblog/management/commands/import_tinyblog.py | tinyblog/management/commands/import_tinyblog.py | from django.core.management.base import BaseCommand, CommandError
from django.core import serializers
import requests
from tinyblog.models import Post
class Command(BaseCommand):
args = 'url'
help = u'Fetches blog entries from <url>, and loads them into tinyblog.'
def handle(self, *args, **options):
if not args:
raise CommandError(u"You must provide a URL.")
url = args[0]
r = requests.get(url)
if r.status_code != 200:
raise CommandError(u"Received status {0} from {1}, expected 200.".format(r.status_code, url))
for obj in serializers.deserialize("json", r.content):
self.stdout.write(u'Processing "{0}"...\n'.format(obj.object.title))
try:
Post.objects.get(slug=obj.object.slug)
self.stdout.write(u'Already had existing object with the slug "{0}".\n'.format(obj.object.slug))
except Post.DoesNotExist:
obj.save()
self.stdout.write(u'Saved new object.\n')
| from django.core.management.base import BaseCommand, CommandError
from django.core import serializers
import requests
from requests import exceptions
from tinyblog.models import Post
class Command(BaseCommand):
args = 'url'
help = u'Fetches blog entries from <url>, and loads them into tinyblog.'
def handle(self, *args, **options):
if not args:
raise CommandError(u"You must provide a URL.")
url = args[0]
try:
r = requests.get(url)
except exceptions.MissingSchema as e:
raise CommandError(e.message)
if r.status_code != 200:
raise CommandError(u"Received status {0} from {1}, expected 200.".format(r.status_code, url))
for obj in serializers.deserialize("json", r.content):
self.stdout.write(u'Processing "{0}"...\n'.format(obj.object.title))
try:
Post.objects.get(slug=obj.object.slug)
self.stdout.write(u'Already had existing object with the slug "{0}".\n'.format(obj.object.slug))
except Post.DoesNotExist:
obj.save()
self.stdout.write(u'Saved new object.\n')
| Fix for bad URL schemes | Fix for bad URL schemes
| Python | bsd-3-clause | dominicrodger/tinyblog,dominicrodger/tinyblog | from django.core.management.base import BaseCommand, CommandError
from django.core import serializers
import requests
from tinyblog.models import Post
class Command(BaseCommand):
args = 'url'
help = u'Fetches blog entries from <url>, and loads them into tinyblog.'
def handle(self, *args, **options):
if not args:
raise CommandError(u"You must provide a URL.")
url = args[0]
r = requests.get(url)
if r.status_code != 200:
raise CommandError(u"Received status {0} from {1}, expected 200.".format(r.status_code, url))
for obj in serializers.deserialize("json", r.content):
self.stdout.write(u'Processing "{0}"...\n'.format(obj.object.title))
try:
Post.objects.get(slug=obj.object.slug)
self.stdout.write(u'Already had existing object with the slug "{0}".\n'.format(obj.object.slug))
except Post.DoesNotExist:
obj.save()
self.stdout.write(u'Saved new object.\n')
Fix for bad URL schemes | from django.core.management.base import BaseCommand, CommandError
from django.core import serializers
import requests
from requests import exceptions
from tinyblog.models import Post
class Command(BaseCommand):
args = 'url'
help = u'Fetches blog entries from <url>, and loads them into tinyblog.'
def handle(self, *args, **options):
if not args:
raise CommandError(u"You must provide a URL.")
url = args[0]
try:
r = requests.get(url)
except exceptions.MissingSchema as e:
raise CommandError(e.message)
if r.status_code != 200:
raise CommandError(u"Received status {0} from {1}, expected 200.".format(r.status_code, url))
for obj in serializers.deserialize("json", r.content):
self.stdout.write(u'Processing "{0}"...\n'.format(obj.object.title))
try:
Post.objects.get(slug=obj.object.slug)
self.stdout.write(u'Already had existing object with the slug "{0}".\n'.format(obj.object.slug))
except Post.DoesNotExist:
obj.save()
self.stdout.write(u'Saved new object.\n')
| <commit_before>from django.core.management.base import BaseCommand, CommandError
from django.core import serializers
import requests
from tinyblog.models import Post
class Command(BaseCommand):
args = 'url'
help = u'Fetches blog entries from <url>, and loads them into tinyblog.'
def handle(self, *args, **options):
if not args:
raise CommandError(u"You must provide a URL.")
url = args[0]
r = requests.get(url)
if r.status_code != 200:
raise CommandError(u"Received status {0} from {1}, expected 200.".format(r.status_code, url))
for obj in serializers.deserialize("json", r.content):
self.stdout.write(u'Processing "{0}"...\n'.format(obj.object.title))
try:
Post.objects.get(slug=obj.object.slug)
self.stdout.write(u'Already had existing object with the slug "{0}".\n'.format(obj.object.slug))
except Post.DoesNotExist:
obj.save()
self.stdout.write(u'Saved new object.\n')
<commit_msg>Fix for bad URL schemes<commit_after> | from django.core.management.base import BaseCommand, CommandError
from django.core import serializers
import requests
from requests import exceptions
from tinyblog.models import Post
class Command(BaseCommand):
args = 'url'
help = u'Fetches blog entries from <url>, and loads them into tinyblog.'
def handle(self, *args, **options):
if not args:
raise CommandError(u"You must provide a URL.")
url = args[0]
try:
r = requests.get(url)
except exceptions.MissingSchema as e:
raise CommandError(e.message)
if r.status_code != 200:
raise CommandError(u"Received status {0} from {1}, expected 200.".format(r.status_code, url))
for obj in serializers.deserialize("json", r.content):
self.stdout.write(u'Processing "{0}"...\n'.format(obj.object.title))
try:
Post.objects.get(slug=obj.object.slug)
self.stdout.write(u'Already had existing object with the slug "{0}".\n'.format(obj.object.slug))
except Post.DoesNotExist:
obj.save()
self.stdout.write(u'Saved new object.\n')
| from django.core.management.base import BaseCommand, CommandError
from django.core import serializers
import requests
from tinyblog.models import Post
class Command(BaseCommand):
args = 'url'
help = u'Fetches blog entries from <url>, and loads them into tinyblog.'
def handle(self, *args, **options):
if not args:
raise CommandError(u"You must provide a URL.")
url = args[0]
r = requests.get(url)
if r.status_code != 200:
raise CommandError(u"Received status {0} from {1}, expected 200.".format(r.status_code, url))
for obj in serializers.deserialize("json", r.content):
self.stdout.write(u'Processing "{0}"...\n'.format(obj.object.title))
try:
Post.objects.get(slug=obj.object.slug)
self.stdout.write(u'Already had existing object with the slug "{0}".\n'.format(obj.object.slug))
except Post.DoesNotExist:
obj.save()
self.stdout.write(u'Saved new object.\n')
Fix for bad URL schemesfrom django.core.management.base import BaseCommand, CommandError
from django.core import serializers
import requests
from requests import exceptions
from tinyblog.models import Post
class Command(BaseCommand):
args = 'url'
help = u'Fetches blog entries from <url>, and loads them into tinyblog.'
def handle(self, *args, **options):
if not args:
raise CommandError(u"You must provide a URL.")
url = args[0]
try:
r = requests.get(url)
except exceptions.MissingSchema as e:
raise CommandError(e.message)
if r.status_code != 200:
raise CommandError(u"Received status {0} from {1}, expected 200.".format(r.status_code, url))
for obj in serializers.deserialize("json", r.content):
self.stdout.write(u'Processing "{0}"...\n'.format(obj.object.title))
try:
Post.objects.get(slug=obj.object.slug)
self.stdout.write(u'Already had existing object with the slug "{0}".\n'.format(obj.object.slug))
except Post.DoesNotExist:
obj.save()
self.stdout.write(u'Saved new object.\n')
| <commit_before>from django.core.management.base import BaseCommand, CommandError
from django.core import serializers
import requests
from tinyblog.models import Post
class Command(BaseCommand):
args = 'url'
help = u'Fetches blog entries from <url>, and loads them into tinyblog.'
def handle(self, *args, **options):
if not args:
raise CommandError(u"You must provide a URL.")
url = args[0]
r = requests.get(url)
if r.status_code != 200:
raise CommandError(u"Received status {0} from {1}, expected 200.".format(r.status_code, url))
for obj in serializers.deserialize("json", r.content):
self.stdout.write(u'Processing "{0}"...\n'.format(obj.object.title))
try:
Post.objects.get(slug=obj.object.slug)
self.stdout.write(u'Already had existing object with the slug "{0}".\n'.format(obj.object.slug))
except Post.DoesNotExist:
obj.save()
self.stdout.write(u'Saved new object.\n')
<commit_msg>Fix for bad URL schemes<commit_after>from django.core.management.base import BaseCommand, CommandError
from django.core import serializers
import requests
from requests import exceptions
from tinyblog.models import Post
class Command(BaseCommand):
args = 'url'
help = u'Fetches blog entries from <url>, and loads them into tinyblog.'
def handle(self, *args, **options):
if not args:
raise CommandError(u"You must provide a URL.")
url = args[0]
try:
r = requests.get(url)
except exceptions.MissingSchema as e:
raise CommandError(e.message)
if r.status_code != 200:
raise CommandError(u"Received status {0} from {1}, expected 200.".format(r.status_code, url))
for obj in serializers.deserialize("json", r.content):
self.stdout.write(u'Processing "{0}"...\n'.format(obj.object.title))
try:
Post.objects.get(slug=obj.object.slug)
self.stdout.write(u'Already had existing object with the slug "{0}".\n'.format(obj.object.slug))
except Post.DoesNotExist:
obj.save()
self.stdout.write(u'Saved new object.\n')
|
09d33da8657ec4c86855032f5ae16566c12fc2a5 | l10n_br_coa/models/l10n_br_account_tax_template.py | l10n_br_coa/models/l10n_br_account_tax_template.py | # Copyright 2020 KMEE
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import fields, models
class L10nBrAccountTaxTemplate(models.Model):
_name = 'l10n_br_account.tax.template'
_inherit = 'account.tax.template'
chart_template_id = fields.Many2one(required=False)
def create_account_tax_templates(self, chart_template_id):
self.ensure_one()
account_tax_template_data = {'chart_template_id': chart_template_id}
account_tax_template_data.update({
field: self[field]
for field in self._fields if self[field] is not False})
self.env['account.tax.template'].create(account_tax_template_data)
| # Copyright 2020 KMEE
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import fields, models
class L10nBrAccountTaxTemplate(models.Model):
_name = 'l10n_br_account.tax.template'
_inherit = 'account.tax.template'
chart_template_id = fields.Many2one(required=False)
def create_account_tax_templates(self, chart_template_id):
self.ensure_one()
chart = self.env['account.chart.template'].browse(chart_template_id)
module = chart.get_external_id()[chart_template_id].split('.')[0]
xmlid = '.'.join(
[module, self.get_external_id()[self.id].split('.')[1]])
tax_template_data = self.copy_data()[0]
tax_template_data.update({'chart_template_id': chart_template_id})
data = dict(xml_id=xmlid, values=tax_template_data, noupdate=True)
self.env['account.tax.template']._load_records([data])
| Create account.tax.template with external ids | [ADD] Create account.tax.template with external ids
| Python | agpl-3.0 | akretion/l10n-brazil,OCA/l10n-brazil,OCA/l10n-brazil,akretion/l10n-brazil,OCA/l10n-brazil,akretion/l10n-brazil | # Copyright 2020 KMEE
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import fields, models
class L10nBrAccountTaxTemplate(models.Model):
_name = 'l10n_br_account.tax.template'
_inherit = 'account.tax.template'
chart_template_id = fields.Many2one(required=False)
def create_account_tax_templates(self, chart_template_id):
self.ensure_one()
account_tax_template_data = {'chart_template_id': chart_template_id}
account_tax_template_data.update({
field: self[field]
for field in self._fields if self[field] is not False})
self.env['account.tax.template'].create(account_tax_template_data)
[ADD] Create account.tax.template with external ids | # Copyright 2020 KMEE
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import fields, models
class L10nBrAccountTaxTemplate(models.Model):
_name = 'l10n_br_account.tax.template'
_inherit = 'account.tax.template'
chart_template_id = fields.Many2one(required=False)
def create_account_tax_templates(self, chart_template_id):
self.ensure_one()
chart = self.env['account.chart.template'].browse(chart_template_id)
module = chart.get_external_id()[chart_template_id].split('.')[0]
xmlid = '.'.join(
[module, self.get_external_id()[self.id].split('.')[1]])
tax_template_data = self.copy_data()[0]
tax_template_data.update({'chart_template_id': chart_template_id})
data = dict(xml_id=xmlid, values=tax_template_data, noupdate=True)
self.env['account.tax.template']._load_records([data])
| <commit_before># Copyright 2020 KMEE
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import fields, models
class L10nBrAccountTaxTemplate(models.Model):
_name = 'l10n_br_account.tax.template'
_inherit = 'account.tax.template'
chart_template_id = fields.Many2one(required=False)
def create_account_tax_templates(self, chart_template_id):
self.ensure_one()
account_tax_template_data = {'chart_template_id': chart_template_id}
account_tax_template_data.update({
field: self[field]
for field in self._fields if self[field] is not False})
self.env['account.tax.template'].create(account_tax_template_data)
<commit_msg>[ADD] Create account.tax.template with external ids<commit_after> | # Copyright 2020 KMEE
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import fields, models
class L10nBrAccountTaxTemplate(models.Model):
_name = 'l10n_br_account.tax.template'
_inherit = 'account.tax.template'
chart_template_id = fields.Many2one(required=False)
def create_account_tax_templates(self, chart_template_id):
self.ensure_one()
chart = self.env['account.chart.template'].browse(chart_template_id)
module = chart.get_external_id()[chart_template_id].split('.')[0]
xmlid = '.'.join(
[module, self.get_external_id()[self.id].split('.')[1]])
tax_template_data = self.copy_data()[0]
tax_template_data.update({'chart_template_id': chart_template_id})
data = dict(xml_id=xmlid, values=tax_template_data, noupdate=True)
self.env['account.tax.template']._load_records([data])
| # Copyright 2020 KMEE
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import fields, models
class L10nBrAccountTaxTemplate(models.Model):
_name = 'l10n_br_account.tax.template'
_inherit = 'account.tax.template'
chart_template_id = fields.Many2one(required=False)
def create_account_tax_templates(self, chart_template_id):
self.ensure_one()
account_tax_template_data = {'chart_template_id': chart_template_id}
account_tax_template_data.update({
field: self[field]
for field in self._fields if self[field] is not False})
self.env['account.tax.template'].create(account_tax_template_data)
[ADD] Create account.tax.template with external ids# Copyright 2020 KMEE
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import fields, models
class L10nBrAccountTaxTemplate(models.Model):
_name = 'l10n_br_account.tax.template'
_inherit = 'account.tax.template'
chart_template_id = fields.Many2one(required=False)
def create_account_tax_templates(self, chart_template_id):
self.ensure_one()
chart = self.env['account.chart.template'].browse(chart_template_id)
module = chart.get_external_id()[chart_template_id].split('.')[0]
xmlid = '.'.join(
[module, self.get_external_id()[self.id].split('.')[1]])
tax_template_data = self.copy_data()[0]
tax_template_data.update({'chart_template_id': chart_template_id})
data = dict(xml_id=xmlid, values=tax_template_data, noupdate=True)
self.env['account.tax.template']._load_records([data])
| <commit_before># Copyright 2020 KMEE
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import fields, models
class L10nBrAccountTaxTemplate(models.Model):
_name = 'l10n_br_account.tax.template'
_inherit = 'account.tax.template'
chart_template_id = fields.Many2one(required=False)
def create_account_tax_templates(self, chart_template_id):
self.ensure_one()
account_tax_template_data = {'chart_template_id': chart_template_id}
account_tax_template_data.update({
field: self[field]
for field in self._fields if self[field] is not False})
self.env['account.tax.template'].create(account_tax_template_data)
<commit_msg>[ADD] Create account.tax.template with external ids<commit_after># Copyright 2020 KMEE
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import fields, models
class L10nBrAccountTaxTemplate(models.Model):
_name = 'l10n_br_account.tax.template'
_inherit = 'account.tax.template'
chart_template_id = fields.Many2one(required=False)
def create_account_tax_templates(self, chart_template_id):
self.ensure_one()
chart = self.env['account.chart.template'].browse(chart_template_id)
module = chart.get_external_id()[chart_template_id].split('.')[0]
xmlid = '.'.join(
[module, self.get_external_id()[self.id].split('.')[1]])
tax_template_data = self.copy_data()[0]
tax_template_data.update({'chart_template_id': chart_template_id})
data = dict(xml_id=xmlid, values=tax_template_data, noupdate=True)
self.env['account.tax.template']._load_records([data])
|
e745cbd16cd2eef2a5805aa7bd113bcaf147af4e | waterfall_wall/serializers.py | waterfall_wall/serializers.py | from django.contrib.auth.models import User, Group
from waterfall_wall.models import Image
from rest_framework import serializers
class ImageSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Image
fields = ('path', 'url', 'nude_percent')
| from django.contrib.auth.models import User, Group
from waterfall_wall.models import Image
from rest_framework import serializers
class ImageSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.SerializerMethodField()
def get_url(self, obj):
return obj.path.url
class Meta:
model = Image
fields = ('url', 'nude_percent')
| Correct the url format of image API | Correct the url format of image API
| Python | mit | carlcarl/rcard,carlcarl/rcard | from django.contrib.auth.models import User, Group
from waterfall_wall.models import Image
from rest_framework import serializers
class ImageSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Image
fields = ('path', 'url', 'nude_percent')
Correct the url format of image API | from django.contrib.auth.models import User, Group
from waterfall_wall.models import Image
from rest_framework import serializers
class ImageSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.SerializerMethodField()
def get_url(self, obj):
return obj.path.url
class Meta:
model = Image
fields = ('url', 'nude_percent')
| <commit_before>from django.contrib.auth.models import User, Group
from waterfall_wall.models import Image
from rest_framework import serializers
class ImageSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Image
fields = ('path', 'url', 'nude_percent')
<commit_msg>Correct the url format of image API<commit_after> | from django.contrib.auth.models import User, Group
from waterfall_wall.models import Image
from rest_framework import serializers
class ImageSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.SerializerMethodField()
def get_url(self, obj):
return obj.path.url
class Meta:
model = Image
fields = ('url', 'nude_percent')
| from django.contrib.auth.models import User, Group
from waterfall_wall.models import Image
from rest_framework import serializers
class ImageSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Image
fields = ('path', 'url', 'nude_percent')
Correct the url format of image APIfrom django.contrib.auth.models import User, Group
from waterfall_wall.models import Image
from rest_framework import serializers
class ImageSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.SerializerMethodField()
def get_url(self, obj):
return obj.path.url
class Meta:
model = Image
fields = ('url', 'nude_percent')
| <commit_before>from django.contrib.auth.models import User, Group
from waterfall_wall.models import Image
from rest_framework import serializers
class ImageSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Image
fields = ('path', 'url', 'nude_percent')
<commit_msg>Correct the url format of image API<commit_after>from django.contrib.auth.models import User, Group
from waterfall_wall.models import Image
from rest_framework import serializers
class ImageSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.SerializerMethodField()
def get_url(self, obj):
return obj.path.url
class Meta:
model = Image
fields = ('url', 'nude_percent')
|
8c52f7d65d460dbe659256ed54eb4b60f702bab3 | healthcheck/contrib/django/status_endpoint/views.py | healthcheck/contrib/django/status_endpoint/views.py | import json
from django.conf import settings
from django.views.decorators.http import require_http_methods
from django.http import HttpResponse
from healthcheck.healthcheck import (
DjangoDBsHealthCheck, FilesDontExistHealthCheck, HealthChecker)
class JsonResponse(HttpResponse):
def __init__(self, data, **kwargs):
kwargs.setdefault('content_type', 'application/json')
data = json.dumps(data)
super(JsonResponse, self).__init__(content=data, **kwargs)
class JsonResponseServerError(JsonResponse):
status_code = 500
@require_http_methods(['GET'])
def status(request):
checks = []
if getattr(settings, 'STATUS_CHECK_DBS', True):
checks.append(DjangoDBsHealthCheck())
files_to_check = getattr(settings, 'STATUS_CHECK_FILES')
if files_to_check:
checks.append(FilesDontExistHealthCheck(
files_to_check, check_id="quiesce file doesn't exist"))
ok, details = HealthChecker(checks)()
if ok and not details:
details = 'There were no checks.'
if not ok:
return JsonResponseServerError(json.dumps(details))
return JsonResponse(details)
| import json
from django.conf import settings
from django.views.decorators.http import require_http_methods
from django.http import HttpResponse
from healthcheck.healthcheck import (
DjangoDBsHealthCheck, FilesDontExistHealthCheck, HealthChecker)
class JsonResponse(HttpResponse):
def __init__(self, data, **kwargs):
kwargs.setdefault('content_type', 'application/json')
data = json.dumps(data)
super(JsonResponse, self).__init__(content=data, **kwargs)
class JsonResponseServerError(JsonResponse):
status_code = 500
@require_http_methods(['GET'])
def status(request):
checks = []
if getattr(settings, 'STATUS_CHECK_DBS', True):
checks.append(DjangoDBsHealthCheck())
files_to_check = getattr(settings, 'STATUS_CHECK_FILES', None)
if files_to_check:
checks.append(FilesDontExistHealthCheck(
files_to_check, check_id="quiesce file doesn't exist"))
ok, details = HealthChecker(checks)()
if ok and not details:
details = 'There were no checks.'
if not ok:
return JsonResponseServerError(json.dumps(details))
return JsonResponse(details)
| Add empty default for STATUS_CHECK_FILES setting | Add empty default for STATUS_CHECK_FILES setting
Given that:
* The line caused an attribute error if the setting wasn't defined,
* The use of getattr was pointless as written since normal attribute access would have worked fine,
* `dict.get` has a default of `None` without having to specify it, and
* The line was followed by a boolean test for the fetched attribute,
I infer that this was the intended meaning.
It should probably be documented somewhere that these settings are required and what they do.
| Python | mit | yola/healthcheck | import json
from django.conf import settings
from django.views.decorators.http import require_http_methods
from django.http import HttpResponse
from healthcheck.healthcheck import (
DjangoDBsHealthCheck, FilesDontExistHealthCheck, HealthChecker)
class JsonResponse(HttpResponse):
def __init__(self, data, **kwargs):
kwargs.setdefault('content_type', 'application/json')
data = json.dumps(data)
super(JsonResponse, self).__init__(content=data, **kwargs)
class JsonResponseServerError(JsonResponse):
status_code = 500
@require_http_methods(['GET'])
def status(request):
checks = []
if getattr(settings, 'STATUS_CHECK_DBS', True):
checks.append(DjangoDBsHealthCheck())
files_to_check = getattr(settings, 'STATUS_CHECK_FILES')
if files_to_check:
checks.append(FilesDontExistHealthCheck(
files_to_check, check_id="quiesce file doesn't exist"))
ok, details = HealthChecker(checks)()
if ok and not details:
details = 'There were no checks.'
if not ok:
return JsonResponseServerError(json.dumps(details))
return JsonResponse(details)
Add empty default for STATUS_CHECK_FILES setting
Given that:
* The line caused an attribute error if the setting wasn't defined,
* The use of getattr was pointless as written since normal attribute access would have worked fine,
* `dict.get` has a default of `None` without having to specify it, and
* The line was followed by a boolean test for the fetched attribute,
I infer that this was the intended meaning.
It should probably be documented somewhere that these settings are required and what they do. | import json
from django.conf import settings
from django.views.decorators.http import require_http_methods
from django.http import HttpResponse
from healthcheck.healthcheck import (
DjangoDBsHealthCheck, FilesDontExistHealthCheck, HealthChecker)
class JsonResponse(HttpResponse):
def __init__(self, data, **kwargs):
kwargs.setdefault('content_type', 'application/json')
data = json.dumps(data)
super(JsonResponse, self).__init__(content=data, **kwargs)
class JsonResponseServerError(JsonResponse):
status_code = 500
@require_http_methods(['GET'])
def status(request):
checks = []
if getattr(settings, 'STATUS_CHECK_DBS', True):
checks.append(DjangoDBsHealthCheck())
files_to_check = getattr(settings, 'STATUS_CHECK_FILES', None)
if files_to_check:
checks.append(FilesDontExistHealthCheck(
files_to_check, check_id="quiesce file doesn't exist"))
ok, details = HealthChecker(checks)()
if ok and not details:
details = 'There were no checks.'
if not ok:
return JsonResponseServerError(json.dumps(details))
return JsonResponse(details)
| <commit_before>import json
from django.conf import settings
from django.views.decorators.http import require_http_methods
from django.http import HttpResponse
from healthcheck.healthcheck import (
DjangoDBsHealthCheck, FilesDontExistHealthCheck, HealthChecker)
class JsonResponse(HttpResponse):
def __init__(self, data, **kwargs):
kwargs.setdefault('content_type', 'application/json')
data = json.dumps(data)
super(JsonResponse, self).__init__(content=data, **kwargs)
class JsonResponseServerError(JsonResponse):
status_code = 500
@require_http_methods(['GET'])
def status(request):
checks = []
if getattr(settings, 'STATUS_CHECK_DBS', True):
checks.append(DjangoDBsHealthCheck())
files_to_check = getattr(settings, 'STATUS_CHECK_FILES')
if files_to_check:
checks.append(FilesDontExistHealthCheck(
files_to_check, check_id="quiesce file doesn't exist"))
ok, details = HealthChecker(checks)()
if ok and not details:
details = 'There were no checks.'
if not ok:
return JsonResponseServerError(json.dumps(details))
return JsonResponse(details)
<commit_msg>Add empty default for STATUS_CHECK_FILES setting
Given that:
* The line caused an attribute error if the setting wasn't defined,
* The use of getattr was pointless as written since normal attribute access would have worked fine,
* `dict.get` has a default of `None` without having to specify it, and
* The line was followed by a boolean test for the fetched attribute,
I infer that this was the intended meaning.
It should probably be documented somewhere that these settings are required and what they do.<commit_after> | import json
from django.conf import settings
from django.views.decorators.http import require_http_methods
from django.http import HttpResponse
from healthcheck.healthcheck import (
DjangoDBsHealthCheck, FilesDontExistHealthCheck, HealthChecker)
class JsonResponse(HttpResponse):
def __init__(self, data, **kwargs):
kwargs.setdefault('content_type', 'application/json')
data = json.dumps(data)
super(JsonResponse, self).__init__(content=data, **kwargs)
class JsonResponseServerError(JsonResponse):
status_code = 500
@require_http_methods(['GET'])
def status(request):
checks = []
if getattr(settings, 'STATUS_CHECK_DBS', True):
checks.append(DjangoDBsHealthCheck())
files_to_check = getattr(settings, 'STATUS_CHECK_FILES', None)
if files_to_check:
checks.append(FilesDontExistHealthCheck(
files_to_check, check_id="quiesce file doesn't exist"))
ok, details = HealthChecker(checks)()
if ok and not details:
details = 'There were no checks.'
if not ok:
return JsonResponseServerError(json.dumps(details))
return JsonResponse(details)
| import json
from django.conf import settings
from django.views.decorators.http import require_http_methods
from django.http import HttpResponse
from healthcheck.healthcheck import (
DjangoDBsHealthCheck, FilesDontExistHealthCheck, HealthChecker)
class JsonResponse(HttpResponse):
def __init__(self, data, **kwargs):
kwargs.setdefault('content_type', 'application/json')
data = json.dumps(data)
super(JsonResponse, self).__init__(content=data, **kwargs)
class JsonResponseServerError(JsonResponse):
status_code = 500
@require_http_methods(['GET'])
def status(request):
checks = []
if getattr(settings, 'STATUS_CHECK_DBS', True):
checks.append(DjangoDBsHealthCheck())
files_to_check = getattr(settings, 'STATUS_CHECK_FILES')
if files_to_check:
checks.append(FilesDontExistHealthCheck(
files_to_check, check_id="quiesce file doesn't exist"))
ok, details = HealthChecker(checks)()
if ok and not details:
details = 'There were no checks.'
if not ok:
return JsonResponseServerError(json.dumps(details))
return JsonResponse(details)
Add empty default for STATUS_CHECK_FILES setting
Given that:
* The line caused an attribute error if the setting wasn't defined,
* The use of getattr was pointless as written since normal attribute access would have worked fine,
* `dict.get` has a default of `None` without having to specify it, and
* The line was followed by a boolean test for the fetched attribute,
I infer that this was the intended meaning.
It should probably be documented somewhere that these settings are required and what they do.import json
from django.conf import settings
from django.views.decorators.http import require_http_methods
from django.http import HttpResponse
from healthcheck.healthcheck import (
DjangoDBsHealthCheck, FilesDontExistHealthCheck, HealthChecker)
class JsonResponse(HttpResponse):
def __init__(self, data, **kwargs):
kwargs.setdefault('content_type', 'application/json')
data = json.dumps(data)
super(JsonResponse, self).__init__(content=data, **kwargs)
class JsonResponseServerError(JsonResponse):
status_code = 500
@require_http_methods(['GET'])
def status(request):
checks = []
if getattr(settings, 'STATUS_CHECK_DBS', True):
checks.append(DjangoDBsHealthCheck())
files_to_check = getattr(settings, 'STATUS_CHECK_FILES', None)
if files_to_check:
checks.append(FilesDontExistHealthCheck(
files_to_check, check_id="quiesce file doesn't exist"))
ok, details = HealthChecker(checks)()
if ok and not details:
details = 'There were no checks.'
if not ok:
return JsonResponseServerError(json.dumps(details))
return JsonResponse(details)
| <commit_before>import json
from django.conf import settings
from django.views.decorators.http import require_http_methods
from django.http import HttpResponse
from healthcheck.healthcheck import (
DjangoDBsHealthCheck, FilesDontExistHealthCheck, HealthChecker)
class JsonResponse(HttpResponse):
def __init__(self, data, **kwargs):
kwargs.setdefault('content_type', 'application/json')
data = json.dumps(data)
super(JsonResponse, self).__init__(content=data, **kwargs)
class JsonResponseServerError(JsonResponse):
status_code = 500
@require_http_methods(['GET'])
def status(request):
checks = []
if getattr(settings, 'STATUS_CHECK_DBS', True):
checks.append(DjangoDBsHealthCheck())
files_to_check = getattr(settings, 'STATUS_CHECK_FILES')
if files_to_check:
checks.append(FilesDontExistHealthCheck(
files_to_check, check_id="quiesce file doesn't exist"))
ok, details = HealthChecker(checks)()
if ok and not details:
details = 'There were no checks.'
if not ok:
return JsonResponseServerError(json.dumps(details))
return JsonResponse(details)
<commit_msg>Add empty default for STATUS_CHECK_FILES setting
Given that:
* The line caused an attribute error if the setting wasn't defined,
* The use of getattr was pointless as written since normal attribute access would have worked fine,
* `dict.get` has a default of `None` without having to specify it, and
* The line was followed by a boolean test for the fetched attribute,
I infer that this was the intended meaning.
It should probably be documented somewhere that these settings are required and what they do.<commit_after>import json
from django.conf import settings
from django.views.decorators.http import require_http_methods
from django.http import HttpResponse
from healthcheck.healthcheck import (
DjangoDBsHealthCheck, FilesDontExistHealthCheck, HealthChecker)
class JsonResponse(HttpResponse):
def __init__(self, data, **kwargs):
kwargs.setdefault('content_type', 'application/json')
data = json.dumps(data)
super(JsonResponse, self).__init__(content=data, **kwargs)
class JsonResponseServerError(JsonResponse):
status_code = 500
@require_http_methods(['GET'])
def status(request):
checks = []
if getattr(settings, 'STATUS_CHECK_DBS', True):
checks.append(DjangoDBsHealthCheck())
files_to_check = getattr(settings, 'STATUS_CHECK_FILES', None)
if files_to_check:
checks.append(FilesDontExistHealthCheck(
files_to_check, check_id="quiesce file doesn't exist"))
ok, details = HealthChecker(checks)()
if ok and not details:
details = 'There were no checks.'
if not ok:
return JsonResponseServerError(json.dumps(details))
return JsonResponse(details)
|
35c97d14eede0e48a0daff8e7f04aeba09f02268 | get.py | get.py |
from requests import get
from bs4 import BeautifulSoup as BS
from os.path import exists
url = "https://www.projecteuler.net/problem=%d"
def get_info(i):
soup = BS(get(url % i, verify=False).content)
problem = soup.find(id="content")
title = problem.h2.string
content = problem.find(class_="problem_content").get_text()
info = title + '\n' + content
info = info.encode('u8')
return info
def save(i):
name = "%03d.py" % i
# if exists(name):
# print name, "exist"
# return
f = file(name, "w")
info = get_info(i)
f.write(
'''\
#-*- encoding: utf-8 -*-
"""
%s"""
from utils import *
#
''' % info)
f.close()
print name, "saved"
def save_all(i, j):
map(save, range(i, j + 1))
N = 10
last = int(file('last.txt').read())
save_all(last + 1, last + N)
file('last.txt', 'w').write(str(last + N))
|
from requests import get
from bs4 import BeautifulSoup as BS
from os.path import exists
from multiprocessing import Pool
url = "https://www.projecteuler.net/problem=%d"
def get_info(i):
soup = BS(get(url % i, verify=False).content)
problem = soup.find(id="content")
title = problem.h2.string
content = problem.find(class_="problem_content").get_text()
info = title + '\n' + content
info = info.encode('u8')
return info
def save(i):
name = "%03d.py" % i
# if exists(name):
# print name, "exist"
# return
f = file(name, "w")
info = get_info(i)
f.write(
'''\
#-*- encoding: utf-8 -*-
"""
%s"""
from utils import *
#
''' % info)
f.close()
print name, "saved"
def save_all(i, j):
p = Pool()
p.map(save, range(i, j + 1))
if __name__ == '__main__':
N = 10
last = int(file('last.txt').read())
save_all(last + 1, last + N)
file('last.txt', 'w').write(str(last + N))
| Use multiprocessing.Pool to speed up | Use multiprocessing.Pool to speed up
| Python | mit | zlsun/ProjectEuler |
from requests import get
from bs4 import BeautifulSoup as BS
from os.path import exists
url = "https://www.projecteuler.net/problem=%d"
def get_info(i):
soup = BS(get(url % i, verify=False).content)
problem = soup.find(id="content")
title = problem.h2.string
content = problem.find(class_="problem_content").get_text()
info = title + '\n' + content
info = info.encode('u8')
return info
def save(i):
name = "%03d.py" % i
# if exists(name):
# print name, "exist"
# return
f = file(name, "w")
info = get_info(i)
f.write(
'''\
#-*- encoding: utf-8 -*-
"""
%s"""
from utils import *
#
''' % info)
f.close()
print name, "saved"
def save_all(i, j):
map(save, range(i, j + 1))
N = 10
last = int(file('last.txt').read())
save_all(last + 1, last + N)
file('last.txt', 'w').write(str(last + N))
Use multiprocessing.Pool to speed up |
from requests import get
from bs4 import BeautifulSoup as BS
from os.path import exists
from multiprocessing import Pool
url = "https://www.projecteuler.net/problem=%d"
def get_info(i):
soup = BS(get(url % i, verify=False).content)
problem = soup.find(id="content")
title = problem.h2.string
content = problem.find(class_="problem_content").get_text()
info = title + '\n' + content
info = info.encode('u8')
return info
def save(i):
name = "%03d.py" % i
# if exists(name):
# print name, "exist"
# return
f = file(name, "w")
info = get_info(i)
f.write(
'''\
#-*- encoding: utf-8 -*-
"""
%s"""
from utils import *
#
''' % info)
f.close()
print name, "saved"
def save_all(i, j):
p = Pool()
p.map(save, range(i, j + 1))
if __name__ == '__main__':
N = 10
last = int(file('last.txt').read())
save_all(last + 1, last + N)
file('last.txt', 'w').write(str(last + N))
| <commit_before>
from requests import get
from bs4 import BeautifulSoup as BS
from os.path import exists
url = "https://www.projecteuler.net/problem=%d"
def get_info(i):
soup = BS(get(url % i, verify=False).content)
problem = soup.find(id="content")
title = problem.h2.string
content = problem.find(class_="problem_content").get_text()
info = title + '\n' + content
info = info.encode('u8')
return info
def save(i):
name = "%03d.py" % i
# if exists(name):
# print name, "exist"
# return
f = file(name, "w")
info = get_info(i)
f.write(
'''\
#-*- encoding: utf-8 -*-
"""
%s"""
from utils import *
#
''' % info)
f.close()
print name, "saved"
def save_all(i, j):
map(save, range(i, j + 1))
N = 10
last = int(file('last.txt').read())
save_all(last + 1, last + N)
file('last.txt', 'w').write(str(last + N))
<commit_msg>Use multiprocessing.Pool to speed up<commit_after> |
from requests import get
from bs4 import BeautifulSoup as BS
from os.path import exists
from multiprocessing import Pool
url = "https://www.projecteuler.net/problem=%d"
def get_info(i):
soup = BS(get(url % i, verify=False).content)
problem = soup.find(id="content")
title = problem.h2.string
content = problem.find(class_="problem_content").get_text()
info = title + '\n' + content
info = info.encode('u8')
return info
def save(i):
name = "%03d.py" % i
# if exists(name):
# print name, "exist"
# return
f = file(name, "w")
info = get_info(i)
f.write(
'''\
#-*- encoding: utf-8 -*-
"""
%s"""
from utils import *
#
''' % info)
f.close()
print name, "saved"
def save_all(i, j):
p = Pool()
p.map(save, range(i, j + 1))
if __name__ == '__main__':
N = 10
last = int(file('last.txt').read())
save_all(last + 1, last + N)
file('last.txt', 'w').write(str(last + N))
|
from requests import get
from bs4 import BeautifulSoup as BS
from os.path import exists
url = "https://www.projecteuler.net/problem=%d"
def get_info(i):
soup = BS(get(url % i, verify=False).content)
problem = soup.find(id="content")
title = problem.h2.string
content = problem.find(class_="problem_content").get_text()
info = title + '\n' + content
info = info.encode('u8')
return info
def save(i):
name = "%03d.py" % i
# if exists(name):
# print name, "exist"
# return
f = file(name, "w")
info = get_info(i)
f.write(
'''\
#-*- encoding: utf-8 -*-
"""
%s"""
from utils import *
#
''' % info)
f.close()
print name, "saved"
def save_all(i, j):
map(save, range(i, j + 1))
N = 10
last = int(file('last.txt').read())
save_all(last + 1, last + N)
file('last.txt', 'w').write(str(last + N))
Use multiprocessing.Pool to speed up
from requests import get
from bs4 import BeautifulSoup as BS
from os.path import exists
from multiprocessing import Pool
url = "https://www.projecteuler.net/problem=%d"
def get_info(i):
soup = BS(get(url % i, verify=False).content)
problem = soup.find(id="content")
title = problem.h2.string
content = problem.find(class_="problem_content").get_text()
info = title + '\n' + content
info = info.encode('u8')
return info
def save(i):
name = "%03d.py" % i
# if exists(name):
# print name, "exist"
# return
f = file(name, "w")
info = get_info(i)
f.write(
'''\
#-*- encoding: utf-8 -*-
"""
%s"""
from utils import *
#
''' % info)
f.close()
print name, "saved"
def save_all(i, j):
p = Pool()
p.map(save, range(i, j + 1))
if __name__ == '__main__':
N = 10
last = int(file('last.txt').read())
save_all(last + 1, last + N)
file('last.txt', 'w').write(str(last + N))
| <commit_before>
from requests import get
from bs4 import BeautifulSoup as BS
from os.path import exists
url = "https://www.projecteuler.net/problem=%d"
def get_info(i):
soup = BS(get(url % i, verify=False).content)
problem = soup.find(id="content")
title = problem.h2.string
content = problem.find(class_="problem_content").get_text()
info = title + '\n' + content
info = info.encode('u8')
return info
def save(i):
name = "%03d.py" % i
# if exists(name):
# print name, "exist"
# return
f = file(name, "w")
info = get_info(i)
f.write(
'''\
#-*- encoding: utf-8 -*-
"""
%s"""
from utils import *
#
''' % info)
f.close()
print name, "saved"
def save_all(i, j):
map(save, range(i, j + 1))
N = 10
last = int(file('last.txt').read())
save_all(last + 1, last + N)
file('last.txt', 'w').write(str(last + N))
<commit_msg>Use multiprocessing.Pool to speed up<commit_after>
from requests import get
from bs4 import BeautifulSoup as BS
from os.path import exists
from multiprocessing import Pool
url = "https://www.projecteuler.net/problem=%d"
def get_info(i):
soup = BS(get(url % i, verify=False).content)
problem = soup.find(id="content")
title = problem.h2.string
content = problem.find(class_="problem_content").get_text()
info = title + '\n' + content
info = info.encode('u8')
return info
def save(i):
name = "%03d.py" % i
# if exists(name):
# print name, "exist"
# return
f = file(name, "w")
info = get_info(i)
f.write(
'''\
#-*- encoding: utf-8 -*-
"""
%s"""
from utils import *
#
''' % info)
f.close()
print name, "saved"
def save_all(i, j):
p = Pool()
p.map(save, range(i, j + 1))
if __name__ == '__main__':
N = 10
last = int(file('last.txt').read())
save_all(last + 1, last + N)
file('last.txt', 'w').write(str(last + N))
|
eddd7f856c7dc423c387d496a87cf5fdf941215b | helpers/visited_thread_set.py | helpers/visited_thread_set.py |
class VisitedThreadSet():
set = None
def __init__(self):
pass
def load(self):
pass
def save(self):
pass
def add_thread(self):
pass
def check_thread_exists(self):
pass
|
class VisitedThreadSet():
set = None
def __init__(self):
self.set = set()
def load_set(self):
pass
def save_set(self):
pass
def add(self, value):
self.set.add(str(value))
def contains(self, value):
if str(value) in self.set:
return True
else:
return False
| Add value to VisitedThreadSet or check if it exists | New: Add value to VisitedThreadSet or check if it exists
| Python | mit | AFFogarty/SEP-Bot,AFFogarty/SEP-Bot |
class VisitedThreadSet():
set = None
def __init__(self):
pass
def load(self):
pass
def save(self):
pass
def add_thread(self):
pass
def check_thread_exists(self):
pass
New: Add value to VisitedThreadSet or check if it exists |
class VisitedThreadSet():
set = None
def __init__(self):
self.set = set()
def load_set(self):
pass
def save_set(self):
pass
def add(self, value):
self.set.add(str(value))
def contains(self, value):
if str(value) in self.set:
return True
else:
return False
| <commit_before>
class VisitedThreadSet():
set = None
def __init__(self):
pass
def load(self):
pass
def save(self):
pass
def add_thread(self):
pass
def check_thread_exists(self):
pass
<commit_msg>New: Add value to VisitedThreadSet or check if it exists<commit_after> |
class VisitedThreadSet():
set = None
def __init__(self):
self.set = set()
def load_set(self):
pass
def save_set(self):
pass
def add(self, value):
self.set.add(str(value))
def contains(self, value):
if str(value) in self.set:
return True
else:
return False
|
class VisitedThreadSet():
set = None
def __init__(self):
pass
def load(self):
pass
def save(self):
pass
def add_thread(self):
pass
def check_thread_exists(self):
pass
New: Add value to VisitedThreadSet or check if it exists
class VisitedThreadSet():
set = None
def __init__(self):
self.set = set()
def load_set(self):
pass
def save_set(self):
pass
def add(self, value):
self.set.add(str(value))
def contains(self, value):
if str(value) in self.set:
return True
else:
return False
| <commit_before>
class VisitedThreadSet():
set = None
def __init__(self):
pass
def load(self):
pass
def save(self):
pass
def add_thread(self):
pass
def check_thread_exists(self):
pass
<commit_msg>New: Add value to VisitedThreadSet or check if it exists<commit_after>
class VisitedThreadSet():
set = None
def __init__(self):
self.set = set()
def load_set(self):
pass
def save_set(self):
pass
def add(self, value):
self.set.add(str(value))
def contains(self, value):
if str(value) in self.set:
return True
else:
return False
|
25bfdfccc89c8150ea8bc4a024415861808d4a6e | fabfile/__init__.py | fabfile/__init__.py | """
Fabric's own fabfile.
"""
from __future__ import with_statement
import nose
from fabric.api import *
import docs, tag
from utils import msg
@task(default=True)
def test(args=None):
"""
Run all unit tests and doctests.
Specify string argument ``args`` for additional args to ``nosetests``.
"""
default_args = "-sv --with-doctest --nologcapture --with-color"
default_args += (" " + args) if args else ""
try:
nose.core.run(argv=[''] + default_args.split())
except SystemExit:
abort("Nose encountered an error; you may be missing newly added test dependencies. Try running 'pip install -r requirements.txt'.")
@task
def upload():
"""
Build, register and upload to PyPI
"""
with msg("Uploading to PyPI"):
local('python setup.py sdist register upload')
@task
def release(force='no'):
"""
Tag/push, build, upload new version and build/upload documentation.
"""
tag.tag(force=force, push='yes')
upload()
| """
Fabric's own fabfile.
"""
from __future__ import with_statement
import nose
from fabric.api import *
import docs, tag
from utils import msg
@task(default=True)
def test(args=None):
"""
Run all unit tests and doctests.
Specify string argument ``args`` for additional args to ``nosetests``.
"""
default_args = "-sv --with-doctest --nologcapture --with-color"
default_args += (" " + args) if args else ""
nose.core.run_exit(argv=[''] + default_args.split())
@task
def upload():
"""
Build, register and upload to PyPI
"""
with msg("Uploading to PyPI"):
local('python setup.py sdist register upload')
@task
def release(force='no'):
"""
Tag/push, build, upload new version and build/upload documentation.
"""
tag.tag(force=force, push='yes')
upload()
| Fix test runner to exit correctly. | Fix test runner to exit correctly.
The attempt at catching ImportError-driven exits seemed broken, so nuked it. Bah.
| Python | bsd-2-clause | fernandezcuesta/fabric,xLegoz/fabric,tolbkni/fabric,askulkarni2/fabric,hrubi/fabric,StackStorm/fabric,cgvarela/fabric,bitprophet/fabric,mathiasertl/fabric,jaraco/fabric,cmattoon/fabric,rbramwell/fabric,rodrigc/fabric,rane-hs/fabric-py3,MjAbuz/fabric,bitmonk/fabric,kmonsoor/fabric,tekapo/fabric,sdelements/fabric,qinrong/fabric,itoed/fabric,getsentry/fabric,ploxiln/fabric,raimon49/fabric,elijah513/fabric,pgroudas/fabric,kxxoling/fabric,amaniak/fabric,haridsv/fabric,likesxuqiang/fabric,TarasRudnyk/fabric,opavader/fabric,pashinin/fabric,SamuelMarks/fabric,bspink/fabric | """
Fabric's own fabfile.
"""
from __future__ import with_statement
import nose
from fabric.api import *
import docs, tag
from utils import msg
@task(default=True)
def test(args=None):
"""
Run all unit tests and doctests.
Specify string argument ``args`` for additional args to ``nosetests``.
"""
default_args = "-sv --with-doctest --nologcapture --with-color"
default_args += (" " + args) if args else ""
try:
nose.core.run(argv=[''] + default_args.split())
except SystemExit:
abort("Nose encountered an error; you may be missing newly added test dependencies. Try running 'pip install -r requirements.txt'.")
@task
def upload():
"""
Build, register and upload to PyPI
"""
with msg("Uploading to PyPI"):
local('python setup.py sdist register upload')
@task
def release(force='no'):
"""
Tag/push, build, upload new version and build/upload documentation.
"""
tag.tag(force=force, push='yes')
upload()
Fix test runner to exit correctly.
The attempt at catching ImportError-driven exits seemed broken, so nuked it. Bah. | """
Fabric's own fabfile.
"""
from __future__ import with_statement
import nose
from fabric.api import *
import docs, tag
from utils import msg
@task(default=True)
def test(args=None):
"""
Run all unit tests and doctests.
Specify string argument ``args`` for additional args to ``nosetests``.
"""
default_args = "-sv --with-doctest --nologcapture --with-color"
default_args += (" " + args) if args else ""
nose.core.run_exit(argv=[''] + default_args.split())
@task
def upload():
"""
Build, register and upload to PyPI
"""
with msg("Uploading to PyPI"):
local('python setup.py sdist register upload')
@task
def release(force='no'):
"""
Tag/push, build, upload new version and build/upload documentation.
"""
tag.tag(force=force, push='yes')
upload()
| <commit_before>"""
Fabric's own fabfile.
"""
from __future__ import with_statement
import nose
from fabric.api import *
import docs, tag
from utils import msg
@task(default=True)
def test(args=None):
"""
Run all unit tests and doctests.
Specify string argument ``args`` for additional args to ``nosetests``.
"""
default_args = "-sv --with-doctest --nologcapture --with-color"
default_args += (" " + args) if args else ""
try:
nose.core.run(argv=[''] + default_args.split())
except SystemExit:
abort("Nose encountered an error; you may be missing newly added test dependencies. Try running 'pip install -r requirements.txt'.")
@task
def upload():
"""
Build, register and upload to PyPI
"""
with msg("Uploading to PyPI"):
local('python setup.py sdist register upload')
@task
def release(force='no'):
"""
Tag/push, build, upload new version and build/upload documentation.
"""
tag.tag(force=force, push='yes')
upload()
<commit_msg>Fix test runner to exit correctly.
The attempt at catching ImportError-driven exits seemed broken, so nuked it. Bah.<commit_after> | """
Fabric's own fabfile.
"""
from __future__ import with_statement
import nose
from fabric.api import *
import docs, tag
from utils import msg
@task(default=True)
def test(args=None):
"""
Run all unit tests and doctests.
Specify string argument ``args`` for additional args to ``nosetests``.
"""
default_args = "-sv --with-doctest --nologcapture --with-color"
default_args += (" " + args) if args else ""
nose.core.run_exit(argv=[''] + default_args.split())
@task
def upload():
"""
Build, register and upload to PyPI
"""
with msg("Uploading to PyPI"):
local('python setup.py sdist register upload')
@task
def release(force='no'):
"""
Tag/push, build, upload new version and build/upload documentation.
"""
tag.tag(force=force, push='yes')
upload()
| """
Fabric's own fabfile.
"""
from __future__ import with_statement
import nose
from fabric.api import *
import docs, tag
from utils import msg
@task(default=True)
def test(args=None):
"""
Run all unit tests and doctests.
Specify string argument ``args`` for additional args to ``nosetests``.
"""
default_args = "-sv --with-doctest --nologcapture --with-color"
default_args += (" " + args) if args else ""
try:
nose.core.run(argv=[''] + default_args.split())
except SystemExit:
abort("Nose encountered an error; you may be missing newly added test dependencies. Try running 'pip install -r requirements.txt'.")
@task
def upload():
"""
Build, register and upload to PyPI
"""
with msg("Uploading to PyPI"):
local('python setup.py sdist register upload')
@task
def release(force='no'):
"""
Tag/push, build, upload new version and build/upload documentation.
"""
tag.tag(force=force, push='yes')
upload()
Fix test runner to exit correctly.
The attempt at catching ImportError-driven exits seemed broken, so nuked it. Bah."""
Fabric's own fabfile.
"""
from __future__ import with_statement
import nose
from fabric.api import *
import docs, tag
from utils import msg
@task(default=True)
def test(args=None):
"""
Run all unit tests and doctests.
Specify string argument ``args`` for additional args to ``nosetests``.
"""
default_args = "-sv --with-doctest --nologcapture --with-color"
default_args += (" " + args) if args else ""
nose.core.run_exit(argv=[''] + default_args.split())
@task
def upload():
"""
Build, register and upload to PyPI
"""
with msg("Uploading to PyPI"):
local('python setup.py sdist register upload')
@task
def release(force='no'):
"""
Tag/push, build, upload new version and build/upload documentation.
"""
tag.tag(force=force, push='yes')
upload()
| <commit_before>"""
Fabric's own fabfile.
"""
from __future__ import with_statement
import nose
from fabric.api import *
import docs, tag
from utils import msg
@task(default=True)
def test(args=None):
"""
Run all unit tests and doctests.
Specify string argument ``args`` for additional args to ``nosetests``.
"""
default_args = "-sv --with-doctest --nologcapture --with-color"
default_args += (" " + args) if args else ""
try:
nose.core.run(argv=[''] + default_args.split())
except SystemExit:
abort("Nose encountered an error; you may be missing newly added test dependencies. Try running 'pip install -r requirements.txt'.")
@task
def upload():
"""
Build, register and upload to PyPI
"""
with msg("Uploading to PyPI"):
local('python setup.py sdist register upload')
@task
def release(force='no'):
"""
Tag/push, build, upload new version and build/upload documentation.
"""
tag.tag(force=force, push='yes')
upload()
<commit_msg>Fix test runner to exit correctly.
The attempt at catching ImportError-driven exits seemed broken, so nuked it. Bah.<commit_after>"""
Fabric's own fabfile.
"""
from __future__ import with_statement
import nose
from fabric.api import *
import docs, tag
from utils import msg
@task(default=True)
def test(args=None):
"""
Run all unit tests and doctests.
Specify string argument ``args`` for additional args to ``nosetests``.
"""
default_args = "-sv --with-doctest --nologcapture --with-color"
default_args += (" " + args) if args else ""
nose.core.run_exit(argv=[''] + default_args.split())
@task
def upload():
"""
Build, register and upload to PyPI
"""
with msg("Uploading to PyPI"):
local('python setup.py sdist register upload')
@task
def release(force='no'):
"""
Tag/push, build, upload new version and build/upload documentation.
"""
tag.tag(force=force, push='yes')
upload()
|
72870bfe77e7e8669cc4ed46e112e0710dabc609 | Koko/views.py | Koko/views.py | #!/usr/bin/python
# -*- coding: UTF-8 -*-
from flask import render_template
from base import app
@app.route('/', methods = ['GET'])
@app.route('/koko', methods = ['GET'])
def start():
''' Returns a landing page. '''
return render_template('start.html')
| #!/usr/bin/python
# -*- coding: UTF-8 -*-
import requests
from hashlib import md5
from flask import request, render_template, Response
from base import app, cache
@app.route('/', methods = ['GET'])
@app.route('/koko', methods = ['GET'])
def start():
''' Returns a landing page. '''
return render_template('start.html')
@app.route('/koko/lltk/<path:query>')
@cache.cached(timeout = 3600, key_prefix = lambda: md5(repr(request)).hexdigest(), unless = lambda: bool(request.args.has_key('caching') and request.args['caching'].lower() == 'false'))
def lltk(query):
''' Wrappers the LLTK-RESTful interface. '''
parameters = '?' + '&'.join([element[0] + '=' + element[1] for element in request.args.to_dict().items()])
uri = 'http://%s:%d%s/%s%s' % (config['lltk-host'], config['lltk-port'], config['lltk-prefix'], query, parameters)
response = requests.get(uri)
return Response(response.text, status = response.status_code, content_type = response.headers['content-type'],)
| Add a wrapper route to LLTK-RESTful | Add a wrapper route to LLTK-RESTful
| Python | agpl-3.0 | lltk/Koko,lltk/Koko | #!/usr/bin/python
# -*- coding: UTF-8 -*-
from flask import render_template
from base import app
@app.route('/', methods = ['GET'])
@app.route('/koko', methods = ['GET'])
def start():
''' Returns a landing page. '''
return render_template('start.html')
Add a wrapper route to LLTK-RESTful | #!/usr/bin/python
# -*- coding: UTF-8 -*-
import requests
from hashlib import md5
from flask import request, render_template, Response
from base import app, cache
@app.route('/', methods = ['GET'])
@app.route('/koko', methods = ['GET'])
def start():
''' Returns a landing page. '''
return render_template('start.html')
@app.route('/koko/lltk/<path:query>')
@cache.cached(timeout = 3600, key_prefix = lambda: md5(repr(request)).hexdigest(), unless = lambda: bool(request.args.has_key('caching') and request.args['caching'].lower() == 'false'))
def lltk(query):
''' Wrappers the LLTK-RESTful interface. '''
parameters = '?' + '&'.join([element[0] + '=' + element[1] for element in request.args.to_dict().items()])
uri = 'http://%s:%d%s/%s%s' % (config['lltk-host'], config['lltk-port'], config['lltk-prefix'], query, parameters)
response = requests.get(uri)
return Response(response.text, status = response.status_code, content_type = response.headers['content-type'],)
| <commit_before>#!/usr/bin/python
# -*- coding: UTF-8 -*-
from flask import render_template
from base import app
@app.route('/', methods = ['GET'])
@app.route('/koko', methods = ['GET'])
def start():
''' Returns a landing page. '''
return render_template('start.html')
<commit_msg>Add a wrapper route to LLTK-RESTful<commit_after> | #!/usr/bin/python
# -*- coding: UTF-8 -*-
import requests
from hashlib import md5
from flask import request, render_template, Response
from base import app, cache
@app.route('/', methods = ['GET'])
@app.route('/koko', methods = ['GET'])
def start():
''' Returns a landing page. '''
return render_template('start.html')
@app.route('/koko/lltk/<path:query>')
@cache.cached(timeout = 3600, key_prefix = lambda: md5(repr(request)).hexdigest(), unless = lambda: bool(request.args.has_key('caching') and request.args['caching'].lower() == 'false'))
def lltk(query):
''' Wrappers the LLTK-RESTful interface. '''
parameters = '?' + '&'.join([element[0] + '=' + element[1] for element in request.args.to_dict().items()])
uri = 'http://%s:%d%s/%s%s' % (config['lltk-host'], config['lltk-port'], config['lltk-prefix'], query, parameters)
response = requests.get(uri)
return Response(response.text, status = response.status_code, content_type = response.headers['content-type'],)
| #!/usr/bin/python
# -*- coding: UTF-8 -*-
from flask import render_template
from base import app
@app.route('/', methods = ['GET'])
@app.route('/koko', methods = ['GET'])
def start():
''' Returns a landing page. '''
return render_template('start.html')
Add a wrapper route to LLTK-RESTful#!/usr/bin/python
# -*- coding: UTF-8 -*-
import requests
from hashlib import md5
from flask import request, render_template, Response
from base import app, cache
@app.route('/', methods = ['GET'])
@app.route('/koko', methods = ['GET'])
def start():
''' Returns a landing page. '''
return render_template('start.html')
@app.route('/koko/lltk/<path:query>')
@cache.cached(timeout = 3600, key_prefix = lambda: md5(repr(request)).hexdigest(), unless = lambda: bool(request.args.has_key('caching') and request.args['caching'].lower() == 'false'))
def lltk(query):
''' Wrappers the LLTK-RESTful interface. '''
parameters = '?' + '&'.join([element[0] + '=' + element[1] for element in request.args.to_dict().items()])
uri = 'http://%s:%d%s/%s%s' % (config['lltk-host'], config['lltk-port'], config['lltk-prefix'], query, parameters)
response = requests.get(uri)
return Response(response.text, status = response.status_code, content_type = response.headers['content-type'],)
| <commit_before>#!/usr/bin/python
# -*- coding: UTF-8 -*-
from flask import render_template
from base import app
@app.route('/', methods = ['GET'])
@app.route('/koko', methods = ['GET'])
def start():
''' Returns a landing page. '''
return render_template('start.html')
<commit_msg>Add a wrapper route to LLTK-RESTful<commit_after>#!/usr/bin/python
# -*- coding: UTF-8 -*-
import requests
from hashlib import md5
from flask import request, render_template, Response
from base import app, cache
@app.route('/', methods = ['GET'])
@app.route('/koko', methods = ['GET'])
def start():
''' Returns a landing page. '''
return render_template('start.html')
@app.route('/koko/lltk/<path:query>')
@cache.cached(timeout = 3600, key_prefix = lambda: md5(repr(request)).hexdigest(), unless = lambda: bool(request.args.has_key('caching') and request.args['caching'].lower() == 'false'))
def lltk(query):
''' Wrappers the LLTK-RESTful interface. '''
parameters = '?' + '&'.join([element[0] + '=' + element[1] for element in request.args.to_dict().items()])
uri = 'http://%s:%d%s/%s%s' % (config['lltk-host'], config['lltk-port'], config['lltk-prefix'], query, parameters)
response = requests.get(uri)
return Response(response.text, status = response.status_code, content_type = response.headers['content-type'],)
|
a20fc95d3a4dc194ef4f6d227976ff5bba229aaa | feincms/__init__.py | feincms/__init__.py | VERSION = (1, 4, 2)
__version__ = '.'.join(map(str, VERSION))
class LazySettings(object):
def _load_settings(self):
from feincms import default_settings
from django.conf import settings as django_settings
for key in dir(default_settings):
if not key.startswith(('FEINCMS_', '_HACK_')):
continue
setattr(self, key, getattr(django_settings, key,
getattr(default_settings, key)))
def __getattr__(self, attr):
self._load_settings()
del self.__class__.__getattr__
return self.__dict__[attr]
settings = LazySettings()
COMPLETELY_LOADED = False
def ensure_completely_loaded():
"""
This method ensures all models are completely loaded
FeinCMS requires Django to be completely initialized before proceeding,
because of the extension mechanism and the dynamically created content
types.
For more informations, have a look at issue #23 on github:
http://github.com/feincms/feincms/issues#issue/23
"""
global COMPLETELY_LOADED
if COMPLETELY_LOADED:
return True
from django.core.management.validation import get_validation_errors
from StringIO import StringIO
get_validation_errors(StringIO(), None)
COMPLETELY_LOADED = True
return True
| VERSION = (1, 4, 2)
__version__ = '.'.join(map(str, VERSION))
class LazySettings(object):
def _load_settings(self):
from feincms import default_settings
from django.conf import settings as django_settings
for key in dir(default_settings):
if not (key.startswith('FEINCMS_') or key.startswith('_HACK_')):
continue
setattr(self, key, getattr(django_settings, key,
getattr(default_settings, key)))
def __getattr__(self, attr):
self._load_settings()
del self.__class__.__getattr__
return self.__dict__[attr]
settings = LazySettings()
COMPLETELY_LOADED = False
def ensure_completely_loaded():
"""
This method ensures all models are completely loaded
FeinCMS requires Django to be completely initialized before proceeding,
because of the extension mechanism and the dynamically created content
types.
For more informations, have a look at issue #23 on github:
http://github.com/feincms/feincms/issues#issue/23
"""
global COMPLETELY_LOADED
if COMPLETELY_LOADED:
return True
from django.core.management.validation import get_validation_errors
from StringIO import StringIO
get_validation_errors(StringIO(), None)
COMPLETELY_LOADED = True
return True
| Fix a Python 2.4 incompatibility that snuck in | Fix a Python 2.4 incompatibility that snuck in
Fixes github issue #214
| Python | bsd-3-clause | matthiask/django-content-editor,nickburlett/feincms,matthiask/django-content-editor,matthiask/feincms2-content,michaelkuty/feincms,matthiask/feincms2-content,pjdelport/feincms,mjl/feincms,joshuajonah/feincms,matthiask/django-content-editor,feincms/feincms,nickburlett/feincms,nickburlett/feincms,mjl/feincms,joshuajonah/feincms,joshuajonah/feincms,pjdelport/feincms,joshuajonah/feincms,michaelkuty/feincms,pjdelport/feincms,feincms/feincms,michaelkuty/feincms,michaelkuty/feincms,matthiask/feincms2-content,mjl/feincms,nickburlett/feincms,feincms/feincms,matthiask/django-content-editor | VERSION = (1, 4, 2)
__version__ = '.'.join(map(str, VERSION))
class LazySettings(object):
def _load_settings(self):
from feincms import default_settings
from django.conf import settings as django_settings
for key in dir(default_settings):
if not key.startswith(('FEINCMS_', '_HACK_')):
continue
setattr(self, key, getattr(django_settings, key,
getattr(default_settings, key)))
def __getattr__(self, attr):
self._load_settings()
del self.__class__.__getattr__
return self.__dict__[attr]
settings = LazySettings()
COMPLETELY_LOADED = False
def ensure_completely_loaded():
"""
This method ensures all models are completely loaded
FeinCMS requires Django to be completely initialized before proceeding,
because of the extension mechanism and the dynamically created content
types.
For more informations, have a look at issue #23 on github:
http://github.com/feincms/feincms/issues#issue/23
"""
global COMPLETELY_LOADED
if COMPLETELY_LOADED:
return True
from django.core.management.validation import get_validation_errors
from StringIO import StringIO
get_validation_errors(StringIO(), None)
COMPLETELY_LOADED = True
return True
Fix a Python 2.4 incompatibility that snuck in
Fixes github issue #214 | VERSION = (1, 4, 2)
__version__ = '.'.join(map(str, VERSION))
class LazySettings(object):
def _load_settings(self):
from feincms import default_settings
from django.conf import settings as django_settings
for key in dir(default_settings):
if not (key.startswith('FEINCMS_') or key.startswith('_HACK_')):
continue
setattr(self, key, getattr(django_settings, key,
getattr(default_settings, key)))
def __getattr__(self, attr):
self._load_settings()
del self.__class__.__getattr__
return self.__dict__[attr]
settings = LazySettings()
COMPLETELY_LOADED = False
def ensure_completely_loaded():
"""
This method ensures all models are completely loaded
FeinCMS requires Django to be completely initialized before proceeding,
because of the extension mechanism and the dynamically created content
types.
For more informations, have a look at issue #23 on github:
http://github.com/feincms/feincms/issues#issue/23
"""
global COMPLETELY_LOADED
if COMPLETELY_LOADED:
return True
from django.core.management.validation import get_validation_errors
from StringIO import StringIO
get_validation_errors(StringIO(), None)
COMPLETELY_LOADED = True
return True
| <commit_before>VERSION = (1, 4, 2)
__version__ = '.'.join(map(str, VERSION))
class LazySettings(object):
def _load_settings(self):
from feincms import default_settings
from django.conf import settings as django_settings
for key in dir(default_settings):
if not key.startswith(('FEINCMS_', '_HACK_')):
continue
setattr(self, key, getattr(django_settings, key,
getattr(default_settings, key)))
def __getattr__(self, attr):
self._load_settings()
del self.__class__.__getattr__
return self.__dict__[attr]
settings = LazySettings()
COMPLETELY_LOADED = False
def ensure_completely_loaded():
"""
This method ensures all models are completely loaded
FeinCMS requires Django to be completely initialized before proceeding,
because of the extension mechanism and the dynamically created content
types.
For more informations, have a look at issue #23 on github:
http://github.com/feincms/feincms/issues#issue/23
"""
global COMPLETELY_LOADED
if COMPLETELY_LOADED:
return True
from django.core.management.validation import get_validation_errors
from StringIO import StringIO
get_validation_errors(StringIO(), None)
COMPLETELY_LOADED = True
return True
<commit_msg>Fix a Python 2.4 incompatibility that snuck in
Fixes github issue #214<commit_after> | VERSION = (1, 4, 2)
__version__ = '.'.join(map(str, VERSION))
class LazySettings(object):
def _load_settings(self):
from feincms import default_settings
from django.conf import settings as django_settings
for key in dir(default_settings):
if not (key.startswith('FEINCMS_') or key.startswith('_HACK_')):
continue
setattr(self, key, getattr(django_settings, key,
getattr(default_settings, key)))
def __getattr__(self, attr):
self._load_settings()
del self.__class__.__getattr__
return self.__dict__[attr]
settings = LazySettings()
COMPLETELY_LOADED = False
def ensure_completely_loaded():
"""
This method ensures all models are completely loaded
FeinCMS requires Django to be completely initialized before proceeding,
because of the extension mechanism and the dynamically created content
types.
For more informations, have a look at issue #23 on github:
http://github.com/feincms/feincms/issues#issue/23
"""
global COMPLETELY_LOADED
if COMPLETELY_LOADED:
return True
from django.core.management.validation import get_validation_errors
from StringIO import StringIO
get_validation_errors(StringIO(), None)
COMPLETELY_LOADED = True
return True
| VERSION = (1, 4, 2)
__version__ = '.'.join(map(str, VERSION))
class LazySettings(object):
def _load_settings(self):
from feincms import default_settings
from django.conf import settings as django_settings
for key in dir(default_settings):
if not key.startswith(('FEINCMS_', '_HACK_')):
continue
setattr(self, key, getattr(django_settings, key,
getattr(default_settings, key)))
def __getattr__(self, attr):
self._load_settings()
del self.__class__.__getattr__
return self.__dict__[attr]
settings = LazySettings()
COMPLETELY_LOADED = False
def ensure_completely_loaded():
"""
This method ensures all models are completely loaded
FeinCMS requires Django to be completely initialized before proceeding,
because of the extension mechanism and the dynamically created content
types.
For more informations, have a look at issue #23 on github:
http://github.com/feincms/feincms/issues#issue/23
"""
global COMPLETELY_LOADED
if COMPLETELY_LOADED:
return True
from django.core.management.validation import get_validation_errors
from StringIO import StringIO
get_validation_errors(StringIO(), None)
COMPLETELY_LOADED = True
return True
Fix a Python 2.4 incompatibility that snuck in
Fixes github issue #214VERSION = (1, 4, 2)
__version__ = '.'.join(map(str, VERSION))
class LazySettings(object):
def _load_settings(self):
from feincms import default_settings
from django.conf import settings as django_settings
for key in dir(default_settings):
if not (key.startswith('FEINCMS_') or key.startswith('_HACK_')):
continue
setattr(self, key, getattr(django_settings, key,
getattr(default_settings, key)))
def __getattr__(self, attr):
self._load_settings()
del self.__class__.__getattr__
return self.__dict__[attr]
settings = LazySettings()
COMPLETELY_LOADED = False
def ensure_completely_loaded():
"""
This method ensures all models are completely loaded
FeinCMS requires Django to be completely initialized before proceeding,
because of the extension mechanism and the dynamically created content
types.
For more informations, have a look at issue #23 on github:
http://github.com/feincms/feincms/issues#issue/23
"""
global COMPLETELY_LOADED
if COMPLETELY_LOADED:
return True
from django.core.management.validation import get_validation_errors
from StringIO import StringIO
get_validation_errors(StringIO(), None)
COMPLETELY_LOADED = True
return True
| <commit_before>VERSION = (1, 4, 2)
__version__ = '.'.join(map(str, VERSION))
class LazySettings(object):
def _load_settings(self):
from feincms import default_settings
from django.conf import settings as django_settings
for key in dir(default_settings):
if not key.startswith(('FEINCMS_', '_HACK_')):
continue
setattr(self, key, getattr(django_settings, key,
getattr(default_settings, key)))
def __getattr__(self, attr):
self._load_settings()
del self.__class__.__getattr__
return self.__dict__[attr]
settings = LazySettings()
COMPLETELY_LOADED = False
def ensure_completely_loaded():
"""
This method ensures all models are completely loaded
FeinCMS requires Django to be completely initialized before proceeding,
because of the extension mechanism and the dynamically created content
types.
For more informations, have a look at issue #23 on github:
http://github.com/feincms/feincms/issues#issue/23
"""
global COMPLETELY_LOADED
if COMPLETELY_LOADED:
return True
from django.core.management.validation import get_validation_errors
from StringIO import StringIO
get_validation_errors(StringIO(), None)
COMPLETELY_LOADED = True
return True
<commit_msg>Fix a Python 2.4 incompatibility that snuck in
Fixes github issue #214<commit_after>VERSION = (1, 4, 2)
__version__ = '.'.join(map(str, VERSION))
class LazySettings(object):
def _load_settings(self):
from feincms import default_settings
from django.conf import settings as django_settings
for key in dir(default_settings):
if not (key.startswith('FEINCMS_') or key.startswith('_HACK_')):
continue
setattr(self, key, getattr(django_settings, key,
getattr(default_settings, key)))
def __getattr__(self, attr):
self._load_settings()
del self.__class__.__getattr__
return self.__dict__[attr]
settings = LazySettings()
COMPLETELY_LOADED = False
def ensure_completely_loaded():
"""
This method ensures all models are completely loaded
FeinCMS requires Django to be completely initialized before proceeding,
because of the extension mechanism and the dynamically created content
types.
For more informations, have a look at issue #23 on github:
http://github.com/feincms/feincms/issues#issue/23
"""
global COMPLETELY_LOADED
if COMPLETELY_LOADED:
return True
from django.core.management.validation import get_validation_errors
from StringIO import StringIO
get_validation_errors(StringIO(), None)
COMPLETELY_LOADED = True
return True
|
169d5b56ab5936a785ae501a91005fcfe3af6674 | ibmcnx/test/test.py | ibmcnx/test/test.py | #import ibmcnx.test.loadFunction
import sys
from java.lang import String
from java.util import HashSet
from java.util import HashMap
import java
import lotusConnectionsCommonAdmin
globdict = globals()
def loadFilesService():
global globdict
execfile( "filesAdmin.py", globdict )
ibmcnx.test.loadFunction.loadFilesService()
FilesPolicyService.browse( "title", "true", 1, 25 )
| #import ibmcnx.test.loadFunction
import sys
from java.lang import String
from java.util import HashSet
from java.util import HashMap
import java
import lotusConnectionsCommonAdmin
globdict = globals()
def loadFilesService():
global globdict
execfile( "filesAdmin.py", globdict )
loadFilesService()
FilesPolicyService.browse( "title", "true", 1, 25 )
| Customize scripts to work with menu | Customize scripts to work with menu
| Python | apache-2.0 | stoeps13/ibmcnx2,stoeps13/ibmcnx2 | #import ibmcnx.test.loadFunction
import sys
from java.lang import String
from java.util import HashSet
from java.util import HashMap
import java
import lotusConnectionsCommonAdmin
globdict = globals()
def loadFilesService():
global globdict
execfile( "filesAdmin.py", globdict )
ibmcnx.test.loadFunction.loadFilesService()
FilesPolicyService.browse( "title", "true", 1, 25 )
Customize scripts to work with menu | #import ibmcnx.test.loadFunction
import sys
from java.lang import String
from java.util import HashSet
from java.util import HashMap
import java
import lotusConnectionsCommonAdmin
globdict = globals()
def loadFilesService():
global globdict
execfile( "filesAdmin.py", globdict )
loadFilesService()
FilesPolicyService.browse( "title", "true", 1, 25 )
| <commit_before>#import ibmcnx.test.loadFunction
import sys
from java.lang import String
from java.util import HashSet
from java.util import HashMap
import java
import lotusConnectionsCommonAdmin
globdict = globals()
def loadFilesService():
global globdict
execfile( "filesAdmin.py", globdict )
ibmcnx.test.loadFunction.loadFilesService()
FilesPolicyService.browse( "title", "true", 1, 25 )
<commit_msg>Customize scripts to work with menu<commit_after> | #import ibmcnx.test.loadFunction
import sys
from java.lang import String
from java.util import HashSet
from java.util import HashMap
import java
import lotusConnectionsCommonAdmin
globdict = globals()
def loadFilesService():
global globdict
execfile( "filesAdmin.py", globdict )
loadFilesService()
FilesPolicyService.browse( "title", "true", 1, 25 )
| #import ibmcnx.test.loadFunction
import sys
from java.lang import String
from java.util import HashSet
from java.util import HashMap
import java
import lotusConnectionsCommonAdmin
globdict = globals()
def loadFilesService():
global globdict
execfile( "filesAdmin.py", globdict )
ibmcnx.test.loadFunction.loadFilesService()
FilesPolicyService.browse( "title", "true", 1, 25 )
Customize scripts to work with menu#import ibmcnx.test.loadFunction
import sys
from java.lang import String
from java.util import HashSet
from java.util import HashMap
import java
import lotusConnectionsCommonAdmin
globdict = globals()
def loadFilesService():
global globdict
execfile( "filesAdmin.py", globdict )
loadFilesService()
FilesPolicyService.browse( "title", "true", 1, 25 )
| <commit_before>#import ibmcnx.test.loadFunction
import sys
from java.lang import String
from java.util import HashSet
from java.util import HashMap
import java
import lotusConnectionsCommonAdmin
globdict = globals()
def loadFilesService():
global globdict
execfile( "filesAdmin.py", globdict )
ibmcnx.test.loadFunction.loadFilesService()
FilesPolicyService.browse( "title", "true", 1, 25 )
<commit_msg>Customize scripts to work with menu<commit_after>#import ibmcnx.test.loadFunction
import sys
from java.lang import String
from java.util import HashSet
from java.util import HashMap
import java
import lotusConnectionsCommonAdmin
globdict = globals()
def loadFilesService():
global globdict
execfile( "filesAdmin.py", globdict )
loadFilesService()
FilesPolicyService.browse( "title", "true", 1, 25 )
|
d7ca978c696deb13c53fc3fdc9d227d0836b97f8 | test/data/testCC.py | test/data/testCC.py | # -*- coding: utf-8 -*-
#
# Copyright (c) 2017, the cclib development team
#
# This file is part of cclib (http://cclib.github.io) and is distributed under
# the terms of the BSD 3-Clause License.
"""Test coupled cluster logfiles"""
import os
import unittest
import numpy
__filedir__ = os.path.realpath(os.path.dirname(__file__))
class GenericCCTest(unittest.TestCase):
"""Generic coupled cluster unittest"""
def testsign(self):
"""Are the coupled cluster corrections negative?"""
corrections = self.data.ccenergies - self.data.scfenergies
self.assertTrue(numpy.alltrue(corrections < 0.0))
if __name__ == "__main__":
import sys
sys.path.insert(1, os.path.join(__filedir__, ".."))
from test_data import DataSuite
suite = DataSuite(['CC'])
suite.testall()
| # -*- coding: utf-8 -*-
#
# Copyright (c) 2017, the cclib development team
#
# This file is part of cclib (http://cclib.github.io) and is distributed under
# the terms of the BSD 3-Clause License.
"""Test coupled cluster logfiles"""
import os
import unittest
import numpy
__filedir__ = os.path.realpath(os.path.dirname(__file__))
class GenericCCTest(unittest.TestCase):
"""Generic coupled cluster unittest"""
def testsizeandshape(self):
"""Are the dimensions of ccenergies correct?"""
self.assertEqual(self.data.ccenergies.shape,
(len(self.data.scfenergies),))
def testsign(self):
"""Are the coupled cluster corrections negative?"""
corrections = self.data.ccenergies - self.data.scfenergies
self.assertTrue(numpy.alltrue(corrections < 0.0))
if __name__ == "__main__":
import sys
sys.path.insert(1, os.path.join(__filedir__, ".."))
from test_data import DataSuite
suite = DataSuite(['CC'])
suite.testall()
| Test size and shape of ccenergies | Test size and shape of ccenergies
| Python | bsd-3-clause | berquist/cclib,berquist/cclib,cclib/cclib,ATenderholt/cclib,ATenderholt/cclib,langner/cclib,cclib/cclib,berquist/cclib,cclib/cclib,langner/cclib,langner/cclib | # -*- coding: utf-8 -*-
#
# Copyright (c) 2017, the cclib development team
#
# This file is part of cclib (http://cclib.github.io) and is distributed under
# the terms of the BSD 3-Clause License.
"""Test coupled cluster logfiles"""
import os
import unittest
import numpy
__filedir__ = os.path.realpath(os.path.dirname(__file__))
class GenericCCTest(unittest.TestCase):
"""Generic coupled cluster unittest"""
def testsign(self):
"""Are the coupled cluster corrections negative?"""
corrections = self.data.ccenergies - self.data.scfenergies
self.assertTrue(numpy.alltrue(corrections < 0.0))
if __name__ == "__main__":
import sys
sys.path.insert(1, os.path.join(__filedir__, ".."))
from test_data import DataSuite
suite = DataSuite(['CC'])
suite.testall()
Test size and shape of ccenergies | # -*- coding: utf-8 -*-
#
# Copyright (c) 2017, the cclib development team
#
# This file is part of cclib (http://cclib.github.io) and is distributed under
# the terms of the BSD 3-Clause License.
"""Test coupled cluster logfiles"""
import os
import unittest
import numpy
__filedir__ = os.path.realpath(os.path.dirname(__file__))
class GenericCCTest(unittest.TestCase):
"""Generic coupled cluster unittest"""
def testsizeandshape(self):
"""Are the dimensions of ccenergies correct?"""
self.assertEqual(self.data.ccenergies.shape,
(len(self.data.scfenergies),))
def testsign(self):
"""Are the coupled cluster corrections negative?"""
corrections = self.data.ccenergies - self.data.scfenergies
self.assertTrue(numpy.alltrue(corrections < 0.0))
if __name__ == "__main__":
import sys
sys.path.insert(1, os.path.join(__filedir__, ".."))
from test_data import DataSuite
suite = DataSuite(['CC'])
suite.testall()
| <commit_before># -*- coding: utf-8 -*-
#
# Copyright (c) 2017, the cclib development team
#
# This file is part of cclib (http://cclib.github.io) and is distributed under
# the terms of the BSD 3-Clause License.
"""Test coupled cluster logfiles"""
import os
import unittest
import numpy
__filedir__ = os.path.realpath(os.path.dirname(__file__))
class GenericCCTest(unittest.TestCase):
"""Generic coupled cluster unittest"""
def testsign(self):
"""Are the coupled cluster corrections negative?"""
corrections = self.data.ccenergies - self.data.scfenergies
self.assertTrue(numpy.alltrue(corrections < 0.0))
if __name__ == "__main__":
import sys
sys.path.insert(1, os.path.join(__filedir__, ".."))
from test_data import DataSuite
suite = DataSuite(['CC'])
suite.testall()
<commit_msg>Test size and shape of ccenergies<commit_after> | # -*- coding: utf-8 -*-
#
# Copyright (c) 2017, the cclib development team
#
# This file is part of cclib (http://cclib.github.io) and is distributed under
# the terms of the BSD 3-Clause License.
"""Test coupled cluster logfiles"""
import os
import unittest
import numpy
__filedir__ = os.path.realpath(os.path.dirname(__file__))
class GenericCCTest(unittest.TestCase):
"""Generic coupled cluster unittest"""
def testsizeandshape(self):
"""Are the dimensions of ccenergies correct?"""
self.assertEqual(self.data.ccenergies.shape,
(len(self.data.scfenergies),))
def testsign(self):
"""Are the coupled cluster corrections negative?"""
corrections = self.data.ccenergies - self.data.scfenergies
self.assertTrue(numpy.alltrue(corrections < 0.0))
if __name__ == "__main__":
import sys
sys.path.insert(1, os.path.join(__filedir__, ".."))
from test_data import DataSuite
suite = DataSuite(['CC'])
suite.testall()
| # -*- coding: utf-8 -*-
#
# Copyright (c) 2017, the cclib development team
#
# This file is part of cclib (http://cclib.github.io) and is distributed under
# the terms of the BSD 3-Clause License.
"""Test coupled cluster logfiles"""
import os
import unittest
import numpy
__filedir__ = os.path.realpath(os.path.dirname(__file__))
class GenericCCTest(unittest.TestCase):
"""Generic coupled cluster unittest"""
def testsign(self):
"""Are the coupled cluster corrections negative?"""
corrections = self.data.ccenergies - self.data.scfenergies
self.assertTrue(numpy.alltrue(corrections < 0.0))
if __name__ == "__main__":
import sys
sys.path.insert(1, os.path.join(__filedir__, ".."))
from test_data import DataSuite
suite = DataSuite(['CC'])
suite.testall()
Test size and shape of ccenergies# -*- coding: utf-8 -*-
#
# Copyright (c) 2017, the cclib development team
#
# This file is part of cclib (http://cclib.github.io) and is distributed under
# the terms of the BSD 3-Clause License.
"""Test coupled cluster logfiles"""
import os
import unittest
import numpy
__filedir__ = os.path.realpath(os.path.dirname(__file__))
class GenericCCTest(unittest.TestCase):
"""Generic coupled cluster unittest"""
def testsizeandshape(self):
"""Are the dimensions of ccenergies correct?"""
self.assertEqual(self.data.ccenergies.shape,
(len(self.data.scfenergies),))
def testsign(self):
"""Are the coupled cluster corrections negative?"""
corrections = self.data.ccenergies - self.data.scfenergies
self.assertTrue(numpy.alltrue(corrections < 0.0))
if __name__ == "__main__":
import sys
sys.path.insert(1, os.path.join(__filedir__, ".."))
from test_data import DataSuite
suite = DataSuite(['CC'])
suite.testall()
| <commit_before># -*- coding: utf-8 -*-
#
# Copyright (c) 2017, the cclib development team
#
# This file is part of cclib (http://cclib.github.io) and is distributed under
# the terms of the BSD 3-Clause License.
"""Test coupled cluster logfiles"""
import os
import unittest
import numpy
__filedir__ = os.path.realpath(os.path.dirname(__file__))
class GenericCCTest(unittest.TestCase):
"""Generic coupled cluster unittest"""
def testsign(self):
"""Are the coupled cluster corrections negative?"""
corrections = self.data.ccenergies - self.data.scfenergies
self.assertTrue(numpy.alltrue(corrections < 0.0))
if __name__ == "__main__":
import sys
sys.path.insert(1, os.path.join(__filedir__, ".."))
from test_data import DataSuite
suite = DataSuite(['CC'])
suite.testall()
<commit_msg>Test size and shape of ccenergies<commit_after># -*- coding: utf-8 -*-
#
# Copyright (c) 2017, the cclib development team
#
# This file is part of cclib (http://cclib.github.io) and is distributed under
# the terms of the BSD 3-Clause License.
"""Test coupled cluster logfiles"""
import os
import unittest
import numpy
__filedir__ = os.path.realpath(os.path.dirname(__file__))
class GenericCCTest(unittest.TestCase):
"""Generic coupled cluster unittest"""
def testsizeandshape(self):
"""Are the dimensions of ccenergies correct?"""
self.assertEqual(self.data.ccenergies.shape,
(len(self.data.scfenergies),))
def testsign(self):
"""Are the coupled cluster corrections negative?"""
corrections = self.data.ccenergies - self.data.scfenergies
self.assertTrue(numpy.alltrue(corrections < 0.0))
if __name__ == "__main__":
import sys
sys.path.insert(1, os.path.join(__filedir__, ".."))
from test_data import DataSuite
suite = DataSuite(['CC'])
suite.testall()
|
16acf6dba180d17b142e5799f62e59a2771099fa | numscons/checkers/__init__.py | numscons/checkers/__init__.py | from blas_lapack_checkers import CheckCLAPACK, CheckCBLAS, CheckF77BLAS, CheckF77LAPACK
from fft_checkers import CheckFFT
from simple_check import NumpyCheckLibAndHeader
from perflib import *
from fortran import *
from perflib_info import write_info
import blas_lapack_checkers
import fft_checkers
import perflib
import perflib_info
__all__ = blas_lapack_checkers.__all__
__all__ += fft_checkers.__all__
__all__ += perflib.__all__
__all__ += perflib_info.__all__
| from blas_lapack_checkers import CheckCLAPACK, CheckCBLAS, CheckF77BLAS, CheckF77LAPACK
from fft_checkers import CheckFFT
from simple_check import NumpyCheckLibAndHeader
from perflib import *
from fortran import *
from perflib_info import write_info
import blas_lapack_checkers
import fft_checkers
import perflib
import perflib_info
__all__ = blas_lapack_checkers.__all__
__all__ += fft_checkers.__all__
__all__ += perflib.__all__
__all__ += perflib_info.__all__
__all__ += fortran.__all__
| Add fortran module import to numscons top level namespace. | Add fortran module import to numscons top level namespace. | Python | bsd-3-clause | cournape/numscons,cournape/numscons,cournape/numscons | from blas_lapack_checkers import CheckCLAPACK, CheckCBLAS, CheckF77BLAS, CheckF77LAPACK
from fft_checkers import CheckFFT
from simple_check import NumpyCheckLibAndHeader
from perflib import *
from fortran import *
from perflib_info import write_info
import blas_lapack_checkers
import fft_checkers
import perflib
import perflib_info
__all__ = blas_lapack_checkers.__all__
__all__ += fft_checkers.__all__
__all__ += perflib.__all__
__all__ += perflib_info.__all__
Add fortran module import to numscons top level namespace. | from blas_lapack_checkers import CheckCLAPACK, CheckCBLAS, CheckF77BLAS, CheckF77LAPACK
from fft_checkers import CheckFFT
from simple_check import NumpyCheckLibAndHeader
from perflib import *
from fortran import *
from perflib_info import write_info
import blas_lapack_checkers
import fft_checkers
import perflib
import perflib_info
__all__ = blas_lapack_checkers.__all__
__all__ += fft_checkers.__all__
__all__ += perflib.__all__
__all__ += perflib_info.__all__
__all__ += fortran.__all__
| <commit_before>from blas_lapack_checkers import CheckCLAPACK, CheckCBLAS, CheckF77BLAS, CheckF77LAPACK
from fft_checkers import CheckFFT
from simple_check import NumpyCheckLibAndHeader
from perflib import *
from fortran import *
from perflib_info import write_info
import blas_lapack_checkers
import fft_checkers
import perflib
import perflib_info
__all__ = blas_lapack_checkers.__all__
__all__ += fft_checkers.__all__
__all__ += perflib.__all__
__all__ += perflib_info.__all__
<commit_msg>Add fortran module import to numscons top level namespace.<commit_after> | from blas_lapack_checkers import CheckCLAPACK, CheckCBLAS, CheckF77BLAS, CheckF77LAPACK
from fft_checkers import CheckFFT
from simple_check import NumpyCheckLibAndHeader
from perflib import *
from fortran import *
from perflib_info import write_info
import blas_lapack_checkers
import fft_checkers
import perflib
import perflib_info
__all__ = blas_lapack_checkers.__all__
__all__ += fft_checkers.__all__
__all__ += perflib.__all__
__all__ += perflib_info.__all__
__all__ += fortran.__all__
| from blas_lapack_checkers import CheckCLAPACK, CheckCBLAS, CheckF77BLAS, CheckF77LAPACK
from fft_checkers import CheckFFT
from simple_check import NumpyCheckLibAndHeader
from perflib import *
from fortran import *
from perflib_info import write_info
import blas_lapack_checkers
import fft_checkers
import perflib
import perflib_info
__all__ = blas_lapack_checkers.__all__
__all__ += fft_checkers.__all__
__all__ += perflib.__all__
__all__ += perflib_info.__all__
Add fortran module import to numscons top level namespace.from blas_lapack_checkers import CheckCLAPACK, CheckCBLAS, CheckF77BLAS, CheckF77LAPACK
from fft_checkers import CheckFFT
from simple_check import NumpyCheckLibAndHeader
from perflib import *
from fortran import *
from perflib_info import write_info
import blas_lapack_checkers
import fft_checkers
import perflib
import perflib_info
__all__ = blas_lapack_checkers.__all__
__all__ += fft_checkers.__all__
__all__ += perflib.__all__
__all__ += perflib_info.__all__
__all__ += fortran.__all__
| <commit_before>from blas_lapack_checkers import CheckCLAPACK, CheckCBLAS, CheckF77BLAS, CheckF77LAPACK
from fft_checkers import CheckFFT
from simple_check import NumpyCheckLibAndHeader
from perflib import *
from fortran import *
from perflib_info import write_info
import blas_lapack_checkers
import fft_checkers
import perflib
import perflib_info
__all__ = blas_lapack_checkers.__all__
__all__ += fft_checkers.__all__
__all__ += perflib.__all__
__all__ += perflib_info.__all__
<commit_msg>Add fortran module import to numscons top level namespace.<commit_after>from blas_lapack_checkers import CheckCLAPACK, CheckCBLAS, CheckF77BLAS, CheckF77LAPACK
from fft_checkers import CheckFFT
from simple_check import NumpyCheckLibAndHeader
from perflib import *
from fortran import *
from perflib_info import write_info
import blas_lapack_checkers
import fft_checkers
import perflib
import perflib_info
__all__ = blas_lapack_checkers.__all__
__all__ += fft_checkers.__all__
__all__ += perflib.__all__
__all__ += perflib_info.__all__
__all__ += fortran.__all__
|
80cecb69170adf7235ecbff3eec4e737cf5d9292 | impersonate/urls.py | impersonate/urls.py | # -*- coding: utf-8 -*-
from django.conf.urls import patterns, url
urlpatterns = patterns('impersonate.views',
url(r'^stop/$',
'stop_impersonate',
name='impersonate-stop'),
url(r'^list/$',
'list_users',
{'template': 'impersonate/list_users.html'},
name='impersonate-list'),
url(r'^search/$',
'search_users',
{'template': 'impersonate/search_users.html'},
name='impersonate-search'),
url(r'^(?P<uid>.+)/$',
'impersonate',
name='impersonate-start'),
)
| # -*- coding: utf-8 -*-
from django.conf.urls import url
from .views import stop_impersonate, list_users, search_users, impersonate
urlpatterns = [
url(r'^stop/$',
stop_impersonate,
name='impersonate-stop'),
url(r'^list/$',
list_users,
{'template': 'impersonate/list_users.html'},
name='impersonate-list'),
url(r'^search/$',
search_users,
{'template': 'impersonate/search_users.html'},
name='impersonate-search'),
url(r'^(?P<uid>.+)/$',
impersonate,
name='impersonate-start'),
]
| Replace deprecated string view arguments to url | Replace deprecated string view arguments to url
| Python | bsd-3-clause | Top20Talent/django-impersonate,Top20Talent/django-impersonate | # -*- coding: utf-8 -*-
from django.conf.urls import patterns, url
urlpatterns = patterns('impersonate.views',
url(r'^stop/$',
'stop_impersonate',
name='impersonate-stop'),
url(r'^list/$',
'list_users',
{'template': 'impersonate/list_users.html'},
name='impersonate-list'),
url(r'^search/$',
'search_users',
{'template': 'impersonate/search_users.html'},
name='impersonate-search'),
url(r'^(?P<uid>.+)/$',
'impersonate',
name='impersonate-start'),
)
Replace deprecated string view arguments to url | # -*- coding: utf-8 -*-
from django.conf.urls import url
from .views import stop_impersonate, list_users, search_users, impersonate
urlpatterns = [
url(r'^stop/$',
stop_impersonate,
name='impersonate-stop'),
url(r'^list/$',
list_users,
{'template': 'impersonate/list_users.html'},
name='impersonate-list'),
url(r'^search/$',
search_users,
{'template': 'impersonate/search_users.html'},
name='impersonate-search'),
url(r'^(?P<uid>.+)/$',
impersonate,
name='impersonate-start'),
]
| <commit_before># -*- coding: utf-8 -*-
from django.conf.urls import patterns, url
urlpatterns = patterns('impersonate.views',
url(r'^stop/$',
'stop_impersonate',
name='impersonate-stop'),
url(r'^list/$',
'list_users',
{'template': 'impersonate/list_users.html'},
name='impersonate-list'),
url(r'^search/$',
'search_users',
{'template': 'impersonate/search_users.html'},
name='impersonate-search'),
url(r'^(?P<uid>.+)/$',
'impersonate',
name='impersonate-start'),
)
<commit_msg>Replace deprecated string view arguments to url<commit_after> | # -*- coding: utf-8 -*-
from django.conf.urls import url
from .views import stop_impersonate, list_users, search_users, impersonate
urlpatterns = [
url(r'^stop/$',
stop_impersonate,
name='impersonate-stop'),
url(r'^list/$',
list_users,
{'template': 'impersonate/list_users.html'},
name='impersonate-list'),
url(r'^search/$',
search_users,
{'template': 'impersonate/search_users.html'},
name='impersonate-search'),
url(r'^(?P<uid>.+)/$',
impersonate,
name='impersonate-start'),
]
| # -*- coding: utf-8 -*-
from django.conf.urls import patterns, url
urlpatterns = patterns('impersonate.views',
url(r'^stop/$',
'stop_impersonate',
name='impersonate-stop'),
url(r'^list/$',
'list_users',
{'template': 'impersonate/list_users.html'},
name='impersonate-list'),
url(r'^search/$',
'search_users',
{'template': 'impersonate/search_users.html'},
name='impersonate-search'),
url(r'^(?P<uid>.+)/$',
'impersonate',
name='impersonate-start'),
)
Replace deprecated string view arguments to url# -*- coding: utf-8 -*-
from django.conf.urls import url
from .views import stop_impersonate, list_users, search_users, impersonate
urlpatterns = [
url(r'^stop/$',
stop_impersonate,
name='impersonate-stop'),
url(r'^list/$',
list_users,
{'template': 'impersonate/list_users.html'},
name='impersonate-list'),
url(r'^search/$',
search_users,
{'template': 'impersonate/search_users.html'},
name='impersonate-search'),
url(r'^(?P<uid>.+)/$',
impersonate,
name='impersonate-start'),
]
| <commit_before># -*- coding: utf-8 -*-
from django.conf.urls import patterns, url
urlpatterns = patterns('impersonate.views',
url(r'^stop/$',
'stop_impersonate',
name='impersonate-stop'),
url(r'^list/$',
'list_users',
{'template': 'impersonate/list_users.html'},
name='impersonate-list'),
url(r'^search/$',
'search_users',
{'template': 'impersonate/search_users.html'},
name='impersonate-search'),
url(r'^(?P<uid>.+)/$',
'impersonate',
name='impersonate-start'),
)
<commit_msg>Replace deprecated string view arguments to url<commit_after># -*- coding: utf-8 -*-
from django.conf.urls import url
from .views import stop_impersonate, list_users, search_users, impersonate
urlpatterns = [
url(r'^stop/$',
stop_impersonate,
name='impersonate-stop'),
url(r'^list/$',
list_users,
{'template': 'impersonate/list_users.html'},
name='impersonate-list'),
url(r'^search/$',
search_users,
{'template': 'impersonate/search_users.html'},
name='impersonate-search'),
url(r'^(?P<uid>.+)/$',
impersonate,
name='impersonate-start'),
]
|
c6eaf3b4eaf851612baac9b0a10925654aaecf52 | partner_compassion/model/mail_followers.py | partner_compassion/model/mail_followers.py | # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2016 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp import models, api
class MailFollowers(models.Model):
""" Prevent having too much followers in mail threads.
"""
_inherit = 'mail.followers'
@api.model
def _mail_restrict_follower_selection_get_domain(self, model):
parameter_name = 'mail_restrict_follower_selection.domain'
return self.env['ir.config_parameter'].get_param(
'%s.%s' % (parameter_name, model),
self.env['ir.config_parameter'].get_param(
parameter_name, default='[]')
)
@api.model
def create(self, vals):
"""
Remove partners not in domain selection of module
mail_restrict_follower_selection
"""
model = vals['res_model']
res_id = vals['res_id']
domain = self._mail_restrict_follower_selection_get_domain(model)
allowed = self.env['res.partner'].search(eval(domain))
if allowed and res_id in allowed.ids:
return super(MailFollowers, self).create(vals)
return self
| # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2016 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp import models, api
class MailFollowers(models.Model):
""" Prevent having too much followers in mail threads.
"""
_inherit = 'mail.followers'
@api.model
def _mail_restrict_follower_selection_get_domain(self, model):
parameter_name = 'mail_restrict_follower_selection.domain'
return self.env['ir.config_parameter'].get_param(
'%s.%s' % (parameter_name, model),
self.env['ir.config_parameter'].get_param(
parameter_name, default='[]')
)
@api.model
def create(self, vals):
"""
Remove partners not in domain selection of module
mail_restrict_follower_selection
"""
model = vals['res_model']
res_id = vals['partner_id']
domain = self._mail_restrict_follower_selection_get_domain(model)
allowed = self.env['res.partner'].search(eval(domain))
if allowed and res_id in allowed.ids:
return super(MailFollowers, self).create(vals)
return self
| Fix typo in follower restrictions | Fix typo in follower restrictions
| Python | agpl-3.0 | CompassionCH/compassion-switzerland,eicher31/compassion-switzerland,eicher31/compassion-switzerland,eicher31/compassion-switzerland,ecino/compassion-switzerland,ecino/compassion-switzerland,CompassionCH/compassion-switzerland,CompassionCH/compassion-switzerland,ecino/compassion-switzerland | # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2016 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp import models, api
class MailFollowers(models.Model):
""" Prevent having too much followers in mail threads.
"""
_inherit = 'mail.followers'
@api.model
def _mail_restrict_follower_selection_get_domain(self, model):
parameter_name = 'mail_restrict_follower_selection.domain'
return self.env['ir.config_parameter'].get_param(
'%s.%s' % (parameter_name, model),
self.env['ir.config_parameter'].get_param(
parameter_name, default='[]')
)
@api.model
def create(self, vals):
"""
Remove partners not in domain selection of module
mail_restrict_follower_selection
"""
model = vals['res_model']
res_id = vals['res_id']
domain = self._mail_restrict_follower_selection_get_domain(model)
allowed = self.env['res.partner'].search(eval(domain))
if allowed and res_id in allowed.ids:
return super(MailFollowers, self).create(vals)
return self
Fix typo in follower restrictions | # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2016 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp import models, api
class MailFollowers(models.Model):
""" Prevent having too much followers in mail threads.
"""
_inherit = 'mail.followers'
@api.model
def _mail_restrict_follower_selection_get_domain(self, model):
parameter_name = 'mail_restrict_follower_selection.domain'
return self.env['ir.config_parameter'].get_param(
'%s.%s' % (parameter_name, model),
self.env['ir.config_parameter'].get_param(
parameter_name, default='[]')
)
@api.model
def create(self, vals):
"""
Remove partners not in domain selection of module
mail_restrict_follower_selection
"""
model = vals['res_model']
res_id = vals['partner_id']
domain = self._mail_restrict_follower_selection_get_domain(model)
allowed = self.env['res.partner'].search(eval(domain))
if allowed and res_id in allowed.ids:
return super(MailFollowers, self).create(vals)
return self
| <commit_before># -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2016 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp import models, api
class MailFollowers(models.Model):
""" Prevent having too much followers in mail threads.
"""
_inherit = 'mail.followers'
@api.model
def _mail_restrict_follower_selection_get_domain(self, model):
parameter_name = 'mail_restrict_follower_selection.domain'
return self.env['ir.config_parameter'].get_param(
'%s.%s' % (parameter_name, model),
self.env['ir.config_parameter'].get_param(
parameter_name, default='[]')
)
@api.model
def create(self, vals):
"""
Remove partners not in domain selection of module
mail_restrict_follower_selection
"""
model = vals['res_model']
res_id = vals['res_id']
domain = self._mail_restrict_follower_selection_get_domain(model)
allowed = self.env['res.partner'].search(eval(domain))
if allowed and res_id in allowed.ids:
return super(MailFollowers, self).create(vals)
return self
<commit_msg>Fix typo in follower restrictions<commit_after> | # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2016 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp import models, api
class MailFollowers(models.Model):
""" Prevent having too much followers in mail threads.
"""
_inherit = 'mail.followers'
@api.model
def _mail_restrict_follower_selection_get_domain(self, model):
parameter_name = 'mail_restrict_follower_selection.domain'
return self.env['ir.config_parameter'].get_param(
'%s.%s' % (parameter_name, model),
self.env['ir.config_parameter'].get_param(
parameter_name, default='[]')
)
@api.model
def create(self, vals):
"""
Remove partners not in domain selection of module
mail_restrict_follower_selection
"""
model = vals['res_model']
res_id = vals['partner_id']
domain = self._mail_restrict_follower_selection_get_domain(model)
allowed = self.env['res.partner'].search(eval(domain))
if allowed and res_id in allowed.ids:
return super(MailFollowers, self).create(vals)
return self
| # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2016 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp import models, api
class MailFollowers(models.Model):
""" Prevent having too much followers in mail threads.
"""
_inherit = 'mail.followers'
@api.model
def _mail_restrict_follower_selection_get_domain(self, model):
parameter_name = 'mail_restrict_follower_selection.domain'
return self.env['ir.config_parameter'].get_param(
'%s.%s' % (parameter_name, model),
self.env['ir.config_parameter'].get_param(
parameter_name, default='[]')
)
@api.model
def create(self, vals):
"""
Remove partners not in domain selection of module
mail_restrict_follower_selection
"""
model = vals['res_model']
res_id = vals['res_id']
domain = self._mail_restrict_follower_selection_get_domain(model)
allowed = self.env['res.partner'].search(eval(domain))
if allowed and res_id in allowed.ids:
return super(MailFollowers, self).create(vals)
return self
Fix typo in follower restrictions# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2016 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp import models, api
class MailFollowers(models.Model):
""" Prevent having too much followers in mail threads.
"""
_inherit = 'mail.followers'
@api.model
def _mail_restrict_follower_selection_get_domain(self, model):
parameter_name = 'mail_restrict_follower_selection.domain'
return self.env['ir.config_parameter'].get_param(
'%s.%s' % (parameter_name, model),
self.env['ir.config_parameter'].get_param(
parameter_name, default='[]')
)
@api.model
def create(self, vals):
"""
Remove partners not in domain selection of module
mail_restrict_follower_selection
"""
model = vals['res_model']
res_id = vals['partner_id']
domain = self._mail_restrict_follower_selection_get_domain(model)
allowed = self.env['res.partner'].search(eval(domain))
if allowed and res_id in allowed.ids:
return super(MailFollowers, self).create(vals)
return self
| <commit_before># -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2016 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp import models, api
class MailFollowers(models.Model):
""" Prevent having too much followers in mail threads.
"""
_inherit = 'mail.followers'
@api.model
def _mail_restrict_follower_selection_get_domain(self, model):
parameter_name = 'mail_restrict_follower_selection.domain'
return self.env['ir.config_parameter'].get_param(
'%s.%s' % (parameter_name, model),
self.env['ir.config_parameter'].get_param(
parameter_name, default='[]')
)
@api.model
def create(self, vals):
"""
Remove partners not in domain selection of module
mail_restrict_follower_selection
"""
model = vals['res_model']
res_id = vals['res_id']
domain = self._mail_restrict_follower_selection_get_domain(model)
allowed = self.env['res.partner'].search(eval(domain))
if allowed and res_id in allowed.ids:
return super(MailFollowers, self).create(vals)
return self
<commit_msg>Fix typo in follower restrictions<commit_after># -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2016 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp import models, api
class MailFollowers(models.Model):
""" Prevent having too much followers in mail threads.
"""
_inherit = 'mail.followers'
@api.model
def _mail_restrict_follower_selection_get_domain(self, model):
parameter_name = 'mail_restrict_follower_selection.domain'
return self.env['ir.config_parameter'].get_param(
'%s.%s' % (parameter_name, model),
self.env['ir.config_parameter'].get_param(
parameter_name, default='[]')
)
@api.model
def create(self, vals):
"""
Remove partners not in domain selection of module
mail_restrict_follower_selection
"""
model = vals['res_model']
res_id = vals['partner_id']
domain = self._mail_restrict_follower_selection_get_domain(model)
allowed = self.env['res.partner'].search(eval(domain))
if allowed and res_id in allowed.ids:
return super(MailFollowers, self).create(vals)
return self
|
1e04152b69f88f6512920db8ccdd9ba2f0201517 | geotrek/api/mobile/urls.py | geotrek/api/mobile/urls.py | from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import url, include
from rest_framework import routers
if 'geotrek.flatpages' and 'geotrek.trekking' and 'geotrek.tourism':
from geotrek.api.mobile import views as api_mobile
router = routers.DefaultRouter()
urlpatterns = [
url(r'^$', api_mobile.SwaggerSchemaView.as_view(), name="schema"),
url(r'^', include(router.urls)),
]
urlpatterns += url(r'^settings/$', api_mobile.SettingsView.as_view(), name='settings'),
if 'geotrek.flatpages' in settings.INSTALLED_APPS:
router.register(r'flatpages', api_mobile.FlatPageViewSet, base_name='flatpage')
if 'geotrek.trekking' in settings.INSTALLED_APPS:
router.register(r'treks', api_mobile.TrekViewSet, base_name='treks')
| from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import url, include
from rest_framework import routers
if 'geotrek.flatpages' and 'geotrek.trekking' and 'geotrek.tourism' in settings.INSTALLED_APPS:
from geotrek.api.mobile import views as api_mobile
router = routers.DefaultRouter()
router.register(r'flatpages', api_mobile.FlatPageViewSet, base_name='flatpage')
router.register(r'treks', api_mobile.TrekViewSet, base_name='treks')
urlpatterns = [
url(r'^$', api_mobile.SwaggerSchemaView.as_view(), name="schema"),
url(r'^', include(router.urls)),
]
urlpatterns += url(r'^settings/$', api_mobile.SettingsView.as_view(), name='settings')
| Fix api mobile only with geotrek flatpages trekking tourism | Fix api mobile only with geotrek flatpages trekking tourism
| Python | bsd-2-clause | GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,makinacorpus/Geotrek,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin | from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import url, include
from rest_framework import routers
if 'geotrek.flatpages' and 'geotrek.trekking' and 'geotrek.tourism':
from geotrek.api.mobile import views as api_mobile
router = routers.DefaultRouter()
urlpatterns = [
url(r'^$', api_mobile.SwaggerSchemaView.as_view(), name="schema"),
url(r'^', include(router.urls)),
]
urlpatterns += url(r'^settings/$', api_mobile.SettingsView.as_view(), name='settings'),
if 'geotrek.flatpages' in settings.INSTALLED_APPS:
router.register(r'flatpages', api_mobile.FlatPageViewSet, base_name='flatpage')
if 'geotrek.trekking' in settings.INSTALLED_APPS:
router.register(r'treks', api_mobile.TrekViewSet, base_name='treks')
Fix api mobile only with geotrek flatpages trekking tourism | from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import url, include
from rest_framework import routers
if 'geotrek.flatpages' and 'geotrek.trekking' and 'geotrek.tourism' in settings.INSTALLED_APPS:
from geotrek.api.mobile import views as api_mobile
router = routers.DefaultRouter()
router.register(r'flatpages', api_mobile.FlatPageViewSet, base_name='flatpage')
router.register(r'treks', api_mobile.TrekViewSet, base_name='treks')
urlpatterns = [
url(r'^$', api_mobile.SwaggerSchemaView.as_view(), name="schema"),
url(r'^', include(router.urls)),
]
urlpatterns += url(r'^settings/$', api_mobile.SettingsView.as_view(), name='settings')
| <commit_before>from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import url, include
from rest_framework import routers
if 'geotrek.flatpages' and 'geotrek.trekking' and 'geotrek.tourism':
from geotrek.api.mobile import views as api_mobile
router = routers.DefaultRouter()
urlpatterns = [
url(r'^$', api_mobile.SwaggerSchemaView.as_view(), name="schema"),
url(r'^', include(router.urls)),
]
urlpatterns += url(r'^settings/$', api_mobile.SettingsView.as_view(), name='settings'),
if 'geotrek.flatpages' in settings.INSTALLED_APPS:
router.register(r'flatpages', api_mobile.FlatPageViewSet, base_name='flatpage')
if 'geotrek.trekking' in settings.INSTALLED_APPS:
router.register(r'treks', api_mobile.TrekViewSet, base_name='treks')
<commit_msg>Fix api mobile only with geotrek flatpages trekking tourism<commit_after> | from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import url, include
from rest_framework import routers
if 'geotrek.flatpages' and 'geotrek.trekking' and 'geotrek.tourism' in settings.INSTALLED_APPS:
from geotrek.api.mobile import views as api_mobile
router = routers.DefaultRouter()
router.register(r'flatpages', api_mobile.FlatPageViewSet, base_name='flatpage')
router.register(r'treks', api_mobile.TrekViewSet, base_name='treks')
urlpatterns = [
url(r'^$', api_mobile.SwaggerSchemaView.as_view(), name="schema"),
url(r'^', include(router.urls)),
]
urlpatterns += url(r'^settings/$', api_mobile.SettingsView.as_view(), name='settings')
| from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import url, include
from rest_framework import routers
if 'geotrek.flatpages' and 'geotrek.trekking' and 'geotrek.tourism':
from geotrek.api.mobile import views as api_mobile
router = routers.DefaultRouter()
urlpatterns = [
url(r'^$', api_mobile.SwaggerSchemaView.as_view(), name="schema"),
url(r'^', include(router.urls)),
]
urlpatterns += url(r'^settings/$', api_mobile.SettingsView.as_view(), name='settings'),
if 'geotrek.flatpages' in settings.INSTALLED_APPS:
router.register(r'flatpages', api_mobile.FlatPageViewSet, base_name='flatpage')
if 'geotrek.trekking' in settings.INSTALLED_APPS:
router.register(r'treks', api_mobile.TrekViewSet, base_name='treks')
Fix api mobile only with geotrek flatpages trekking tourismfrom __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import url, include
from rest_framework import routers
if 'geotrek.flatpages' and 'geotrek.trekking' and 'geotrek.tourism' in settings.INSTALLED_APPS:
from geotrek.api.mobile import views as api_mobile
router = routers.DefaultRouter()
router.register(r'flatpages', api_mobile.FlatPageViewSet, base_name='flatpage')
router.register(r'treks', api_mobile.TrekViewSet, base_name='treks')
urlpatterns = [
url(r'^$', api_mobile.SwaggerSchemaView.as_view(), name="schema"),
url(r'^', include(router.urls)),
]
urlpatterns += url(r'^settings/$', api_mobile.SettingsView.as_view(), name='settings')
| <commit_before>from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import url, include
from rest_framework import routers
if 'geotrek.flatpages' and 'geotrek.trekking' and 'geotrek.tourism':
from geotrek.api.mobile import views as api_mobile
router = routers.DefaultRouter()
urlpatterns = [
url(r'^$', api_mobile.SwaggerSchemaView.as_view(), name="schema"),
url(r'^', include(router.urls)),
]
urlpatterns += url(r'^settings/$', api_mobile.SettingsView.as_view(), name='settings'),
if 'geotrek.flatpages' in settings.INSTALLED_APPS:
router.register(r'flatpages', api_mobile.FlatPageViewSet, base_name='flatpage')
if 'geotrek.trekking' in settings.INSTALLED_APPS:
router.register(r'treks', api_mobile.TrekViewSet, base_name='treks')
<commit_msg>Fix api mobile only with geotrek flatpages trekking tourism<commit_after>from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import url, include
from rest_framework import routers
if 'geotrek.flatpages' and 'geotrek.trekking' and 'geotrek.tourism' in settings.INSTALLED_APPS:
from geotrek.api.mobile import views as api_mobile
router = routers.DefaultRouter()
router.register(r'flatpages', api_mobile.FlatPageViewSet, base_name='flatpage')
router.register(r'treks', api_mobile.TrekViewSet, base_name='treks')
urlpatterns = [
url(r'^$', api_mobile.SwaggerSchemaView.as_view(), name="schema"),
url(r'^', include(router.urls)),
]
urlpatterns += url(r'^settings/$', api_mobile.SettingsView.as_view(), name='settings')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.