commit stringlengths 40 40 | old_file stringlengths 4 118 | new_file stringlengths 4 118 | old_contents stringlengths 0 2.94k | new_contents stringlengths 1 4.43k | subject stringlengths 15 444 | message stringlengths 16 3.45k | lang stringclasses 1 value | license stringclasses 13 values | repos stringlengths 5 43.2k | prompt stringlengths 17 4.58k | response stringlengths 1 4.43k | prompt_tagged stringlengths 58 4.62k | response_tagged stringlengths 1 4.43k | text stringlengths 132 7.29k | text_tagged stringlengths 173 7.33k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f557c20678de706d9e714e1d903b482b7e886e3b | keras_contrib/backend/cntk_backend.py | keras_contrib/backend/cntk_backend.py | from keras.backend import cntk_backend as KCN
from keras.backend.cntk_backend import logsumexp
import cntk as C
import numpy as np
def clip(x, min_value, max_value):
"""Element-wise value clipping.
If min_value > max_value, clipping range is [min_value,min_value].
# Arguments
x: Tensor or variable.
min_value: Tensor, float, int, or None.
If min_value is None, defaults to -infinity.
max_value: Tensor, float, int, or None.
If max_value is None, defaults to infinity.
# Returns
A tensor.
"""
if max_value is None:
max_value = np.inf
if min_value is None:
min_value = -np.inf
max_value = C.maximum(min_value, max_value)
return C.clip(x, min_value, max_value)
| from keras.backend import cntk_backend as KCN
from keras.backend.cntk_backend import logsumexp
import cntk as C
import numpy as np
def clip(x, min_value, max_value):
"""Element-wise value clipping.
If min_value > max_value, clipping range is [min_value,min_value].
# Arguments
x: Tensor or variable.
min_value: Tensor, float, int, or None.
If min_value is None, defaults to -infinity.
max_value: Tensor, float, int, or None.
If max_value is None, defaults to infinity.
# Returns
A tensor.
"""
if max_value is None:
max_value = np.inf
if min_value is None:
min_value = -np.inf
max_value = C.maximum(min_value, max_value)
return C.clip(x, min_value, max_value)
def moments(x, axes, shift=None, keep_dims=False):
''' Calculates and returns the mean and variance of the input '''
mean_batch = KCN.mean(x, axis=axes, keepdims=keep_dims)
var_batch = KCN.var(x, axis=axes, keepdims=keep_dims)
return mean_batch, var_batch
| Add moments op to CNTK backend, and associated tests | Add moments op to CNTK backend, and associated tests
| Python | mit | keras-team/keras-contrib,keras-team/keras-contrib,farizrahman4u/keras-contrib,keras-team/keras-contrib | from keras.backend import cntk_backend as KCN
from keras.backend.cntk_backend import logsumexp
import cntk as C
import numpy as np
def clip(x, min_value, max_value):
"""Element-wise value clipping.
If min_value > max_value, clipping range is [min_value,min_value].
# Arguments
x: Tensor or variable.
min_value: Tensor, float, int, or None.
If min_value is None, defaults to -infinity.
max_value: Tensor, float, int, or None.
If max_value is None, defaults to infinity.
# Returns
A tensor.
"""
if max_value is None:
max_value = np.inf
if min_value is None:
min_value = -np.inf
max_value = C.maximum(min_value, max_value)
return C.clip(x, min_value, max_value)
Add moments op to CNTK backend, and associated tests | from keras.backend import cntk_backend as KCN
from keras.backend.cntk_backend import logsumexp
import cntk as C
import numpy as np
def clip(x, min_value, max_value):
"""Element-wise value clipping.
If min_value > max_value, clipping range is [min_value,min_value].
# Arguments
x: Tensor or variable.
min_value: Tensor, float, int, or None.
If min_value is None, defaults to -infinity.
max_value: Tensor, float, int, or None.
If max_value is None, defaults to infinity.
# Returns
A tensor.
"""
if max_value is None:
max_value = np.inf
if min_value is None:
min_value = -np.inf
max_value = C.maximum(min_value, max_value)
return C.clip(x, min_value, max_value)
def moments(x, axes, shift=None, keep_dims=False):
''' Calculates and returns the mean and variance of the input '''
mean_batch = KCN.mean(x, axis=axes, keepdims=keep_dims)
var_batch = KCN.var(x, axis=axes, keepdims=keep_dims)
return mean_batch, var_batch
| <commit_before>from keras.backend import cntk_backend as KCN
from keras.backend.cntk_backend import logsumexp
import cntk as C
import numpy as np
def clip(x, min_value, max_value):
"""Element-wise value clipping.
If min_value > max_value, clipping range is [min_value,min_value].
# Arguments
x: Tensor or variable.
min_value: Tensor, float, int, or None.
If min_value is None, defaults to -infinity.
max_value: Tensor, float, int, or None.
If max_value is None, defaults to infinity.
# Returns
A tensor.
"""
if max_value is None:
max_value = np.inf
if min_value is None:
min_value = -np.inf
max_value = C.maximum(min_value, max_value)
return C.clip(x, min_value, max_value)
<commit_msg>Add moments op to CNTK backend, and associated tests<commit_after> | from keras.backend import cntk_backend as KCN
from keras.backend.cntk_backend import logsumexp
import cntk as C
import numpy as np
def clip(x, min_value, max_value):
"""Element-wise value clipping.
If min_value > max_value, clipping range is [min_value,min_value].
# Arguments
x: Tensor or variable.
min_value: Tensor, float, int, or None.
If min_value is None, defaults to -infinity.
max_value: Tensor, float, int, or None.
If max_value is None, defaults to infinity.
# Returns
A tensor.
"""
if max_value is None:
max_value = np.inf
if min_value is None:
min_value = -np.inf
max_value = C.maximum(min_value, max_value)
return C.clip(x, min_value, max_value)
def moments(x, axes, shift=None, keep_dims=False):
''' Calculates and returns the mean and variance of the input '''
mean_batch = KCN.mean(x, axis=axes, keepdims=keep_dims)
var_batch = KCN.var(x, axis=axes, keepdims=keep_dims)
return mean_batch, var_batch
| from keras.backend import cntk_backend as KCN
from keras.backend.cntk_backend import logsumexp
import cntk as C
import numpy as np
def clip(x, min_value, max_value):
"""Element-wise value clipping.
If min_value > max_value, clipping range is [min_value,min_value].
# Arguments
x: Tensor or variable.
min_value: Tensor, float, int, or None.
If min_value is None, defaults to -infinity.
max_value: Tensor, float, int, or None.
If max_value is None, defaults to infinity.
# Returns
A tensor.
"""
if max_value is None:
max_value = np.inf
if min_value is None:
min_value = -np.inf
max_value = C.maximum(min_value, max_value)
return C.clip(x, min_value, max_value)
Add moments op to CNTK backend, and associated testsfrom keras.backend import cntk_backend as KCN
from keras.backend.cntk_backend import logsumexp
import cntk as C
import numpy as np
def clip(x, min_value, max_value):
"""Element-wise value clipping.
If min_value > max_value, clipping range is [min_value,min_value].
# Arguments
x: Tensor or variable.
min_value: Tensor, float, int, or None.
If min_value is None, defaults to -infinity.
max_value: Tensor, float, int, or None.
If max_value is None, defaults to infinity.
# Returns
A tensor.
"""
if max_value is None:
max_value = np.inf
if min_value is None:
min_value = -np.inf
max_value = C.maximum(min_value, max_value)
return C.clip(x, min_value, max_value)
def moments(x, axes, shift=None, keep_dims=False):
''' Calculates and returns the mean and variance of the input '''
mean_batch = KCN.mean(x, axis=axes, keepdims=keep_dims)
var_batch = KCN.var(x, axis=axes, keepdims=keep_dims)
return mean_batch, var_batch
| <commit_before>from keras.backend import cntk_backend as KCN
from keras.backend.cntk_backend import logsumexp
import cntk as C
import numpy as np
def clip(x, min_value, max_value):
"""Element-wise value clipping.
If min_value > max_value, clipping range is [min_value,min_value].
# Arguments
x: Tensor or variable.
min_value: Tensor, float, int, or None.
If min_value is None, defaults to -infinity.
max_value: Tensor, float, int, or None.
If max_value is None, defaults to infinity.
# Returns
A tensor.
"""
if max_value is None:
max_value = np.inf
if min_value is None:
min_value = -np.inf
max_value = C.maximum(min_value, max_value)
return C.clip(x, min_value, max_value)
<commit_msg>Add moments op to CNTK backend, and associated tests<commit_after>from keras.backend import cntk_backend as KCN
from keras.backend.cntk_backend import logsumexp
import cntk as C
import numpy as np
def clip(x, min_value, max_value):
"""Element-wise value clipping.
If min_value > max_value, clipping range is [min_value,min_value].
# Arguments
x: Tensor or variable.
min_value: Tensor, float, int, or None.
If min_value is None, defaults to -infinity.
max_value: Tensor, float, int, or None.
If max_value is None, defaults to infinity.
# Returns
A tensor.
"""
if max_value is None:
max_value = np.inf
if min_value is None:
min_value = -np.inf
max_value = C.maximum(min_value, max_value)
return C.clip(x, min_value, max_value)
def moments(x, axes, shift=None, keep_dims=False):
''' Calculates and returns the mean and variance of the input '''
mean_batch = KCN.mean(x, axis=axes, keepdims=keep_dims)
var_batch = KCN.var(x, axis=axes, keepdims=keep_dims)
return mean_batch, var_batch
|
52da8be7ffe6ea2ba09acf3ce44b9a79758b115b | glance/version.py | glance/version.py | # Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
version_info = pbr.version.VersionInfo('glance')
| # Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
GLANCE_VENDOR = "OpenStack Foundation"
GLANCE_PRODUCT = "OpenStack Glance"
GLANCE_PACKAGE = None # OS distro package version suffix
loaded = False
class VersionInfo(object):
release = "REDHATGLANCERELEASE"
version = "REDHATGLANCEVERSION"
def version_string(self):
return self.version
def cached_version_string(self):
return self.version
def release_string(self):
return self.release
def canonical_version_string(self):
return self.version
def version_string_with_vcs(self):
return self.release
version_info = VersionInfo()
| Remove runtime dep on python pbr | Remove runtime dep on python pbr
| Python | apache-2.0 | redhat-openstack/glance,redhat-openstack/glance | # Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
version_info = pbr.version.VersionInfo('glance')
Remove runtime dep on python pbr | # Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
GLANCE_VENDOR = "OpenStack Foundation"
GLANCE_PRODUCT = "OpenStack Glance"
GLANCE_PACKAGE = None # OS distro package version suffix
loaded = False
class VersionInfo(object):
release = "REDHATGLANCERELEASE"
version = "REDHATGLANCEVERSION"
def version_string(self):
return self.version
def cached_version_string(self):
return self.version
def release_string(self):
return self.release
def canonical_version_string(self):
return self.version
def version_string_with_vcs(self):
return self.release
version_info = VersionInfo()
| <commit_before># Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
version_info = pbr.version.VersionInfo('glance')
<commit_msg>Remove runtime dep on python pbr<commit_after> | # Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
GLANCE_VENDOR = "OpenStack Foundation"
GLANCE_PRODUCT = "OpenStack Glance"
GLANCE_PACKAGE = None # OS distro package version suffix
loaded = False
class VersionInfo(object):
release = "REDHATGLANCERELEASE"
version = "REDHATGLANCEVERSION"
def version_string(self):
return self.version
def cached_version_string(self):
return self.version
def release_string(self):
return self.release
def canonical_version_string(self):
return self.version
def version_string_with_vcs(self):
return self.release
version_info = VersionInfo()
| # Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
version_info = pbr.version.VersionInfo('glance')
Remove runtime dep on python pbr# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
GLANCE_VENDOR = "OpenStack Foundation"
GLANCE_PRODUCT = "OpenStack Glance"
GLANCE_PACKAGE = None # OS distro package version suffix
loaded = False
class VersionInfo(object):
release = "REDHATGLANCERELEASE"
version = "REDHATGLANCEVERSION"
def version_string(self):
return self.version
def cached_version_string(self):
return self.version
def release_string(self):
return self.release
def canonical_version_string(self):
return self.version
def version_string_with_vcs(self):
return self.release
version_info = VersionInfo()
| <commit_before># Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
version_info = pbr.version.VersionInfo('glance')
<commit_msg>Remove runtime dep on python pbr<commit_after># Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
GLANCE_VENDOR = "OpenStack Foundation"
GLANCE_PRODUCT = "OpenStack Glance"
GLANCE_PACKAGE = None # OS distro package version suffix
loaded = False
class VersionInfo(object):
release = "REDHATGLANCERELEASE"
version = "REDHATGLANCEVERSION"
def version_string(self):
return self.version
def cached_version_string(self):
return self.version
def release_string(self):
return self.release
def canonical_version_string(self):
return self.version
def version_string_with_vcs(self):
return self.release
version_info = VersionInfo()
|
94b73811a4986dee5ac32fe1d91f377828a5bca5 | mnemosyne/app/__init__.py | mnemosyne/app/__init__.py | import aiohttp
import aiohttp.web
from mnemosyne.app import by_time, by_uuid
application = aiohttp.web.Application()
# by_uuid API
# app.router.add_route('GET', '/applications', mnemosyne.applications.index)
application.router.add_route('GET', '/trace/{traceUuid}', by_uuid.getTrace)
application.router.add_route('GET', '/transaction/{transactionUuid}', by_uuid.getTransaction)
application.router.add_route('GET', '/application/{applicationUuid}', by_uuid.getApplication)
# by_time API
application.router.add_route('GET', '/traces/frontend/age/{ageInMin}', by_time.getFrontendTraces)
|
import os
import aiohttp
import aiohttp.web
from mnemosyne.app import by_time, by_uuid
application = aiohttp.web.Application()
class DirectoryIndex(aiohttp.web.StaticRoute):
def handle(self, request):
filename = request.match_info['filename']
if not filename:
filename = 'index.html'
elif filename.endswith('/'):
filename += 'index.html'
request.match_info['filename'] = filename
return super().handle(request)
public_dir = os.path.abspath(os.path.join(__file__, '../../../public'))
application.router.register_route(DirectoryIndex(None, '/', public_dir))
# by_uuid API
application.router.add_route('GET', '/trace/{traceUuid}', by_uuid.getTrace)
application.router.add_route(
'GET', '/transaction/{transactionUuid}', by_uuid.getTransaction)
application.router.add_route(
'GET', '/application/{applicationUuid}', by_uuid.getApplication)
# by_time API
application.router.add_route(
'GET', '/traces/frontend/age/{ageInMin}', by_time.getFrontendTraces)
| Add static route serving files | Add static route serving files
Custom static file handler resolves `/` to `/index.html`.
| Python | agpl-3.0 | jgraichen/mnemosyne,jgraichen/mnemosyne,jgraichen/mnemosyne | import aiohttp
import aiohttp.web
from mnemosyne.app import by_time, by_uuid
application = aiohttp.web.Application()
# by_uuid API
# app.router.add_route('GET', '/applications', mnemosyne.applications.index)
application.router.add_route('GET', '/trace/{traceUuid}', by_uuid.getTrace)
application.router.add_route('GET', '/transaction/{transactionUuid}', by_uuid.getTransaction)
application.router.add_route('GET', '/application/{applicationUuid}', by_uuid.getApplication)
# by_time API
application.router.add_route('GET', '/traces/frontend/age/{ageInMin}', by_time.getFrontendTraces)
Add static route serving files
Custom static file handler resolves `/` to `/index.html`. |
import os
import aiohttp
import aiohttp.web
from mnemosyne.app import by_time, by_uuid
application = aiohttp.web.Application()
class DirectoryIndex(aiohttp.web.StaticRoute):
def handle(self, request):
filename = request.match_info['filename']
if not filename:
filename = 'index.html'
elif filename.endswith('/'):
filename += 'index.html'
request.match_info['filename'] = filename
return super().handle(request)
public_dir = os.path.abspath(os.path.join(__file__, '../../../public'))
application.router.register_route(DirectoryIndex(None, '/', public_dir))
# by_uuid API
application.router.add_route('GET', '/trace/{traceUuid}', by_uuid.getTrace)
application.router.add_route(
'GET', '/transaction/{transactionUuid}', by_uuid.getTransaction)
application.router.add_route(
'GET', '/application/{applicationUuid}', by_uuid.getApplication)
# by_time API
application.router.add_route(
'GET', '/traces/frontend/age/{ageInMin}', by_time.getFrontendTraces)
| <commit_before>import aiohttp
import aiohttp.web
from mnemosyne.app import by_time, by_uuid
application = aiohttp.web.Application()
# by_uuid API
# app.router.add_route('GET', '/applications', mnemosyne.applications.index)
application.router.add_route('GET', '/trace/{traceUuid}', by_uuid.getTrace)
application.router.add_route('GET', '/transaction/{transactionUuid}', by_uuid.getTransaction)
application.router.add_route('GET', '/application/{applicationUuid}', by_uuid.getApplication)
# by_time API
application.router.add_route('GET', '/traces/frontend/age/{ageInMin}', by_time.getFrontendTraces)
<commit_msg>Add static route serving files
Custom static file handler resolves `/` to `/index.html`.<commit_after> |
import os
import aiohttp
import aiohttp.web
from mnemosyne.app import by_time, by_uuid
application = aiohttp.web.Application()
class DirectoryIndex(aiohttp.web.StaticRoute):
def handle(self, request):
filename = request.match_info['filename']
if not filename:
filename = 'index.html'
elif filename.endswith('/'):
filename += 'index.html'
request.match_info['filename'] = filename
return super().handle(request)
public_dir = os.path.abspath(os.path.join(__file__, '../../../public'))
application.router.register_route(DirectoryIndex(None, '/', public_dir))
# by_uuid API
application.router.add_route('GET', '/trace/{traceUuid}', by_uuid.getTrace)
application.router.add_route(
'GET', '/transaction/{transactionUuid}', by_uuid.getTransaction)
application.router.add_route(
'GET', '/application/{applicationUuid}', by_uuid.getApplication)
# by_time API
application.router.add_route(
'GET', '/traces/frontend/age/{ageInMin}', by_time.getFrontendTraces)
| import aiohttp
import aiohttp.web
from mnemosyne.app import by_time, by_uuid
application = aiohttp.web.Application()
# by_uuid API
# app.router.add_route('GET', '/applications', mnemosyne.applications.index)
application.router.add_route('GET', '/trace/{traceUuid}', by_uuid.getTrace)
application.router.add_route('GET', '/transaction/{transactionUuid}', by_uuid.getTransaction)
application.router.add_route('GET', '/application/{applicationUuid}', by_uuid.getApplication)
# by_time API
application.router.add_route('GET', '/traces/frontend/age/{ageInMin}', by_time.getFrontendTraces)
Add static route serving files
Custom static file handler resolves `/` to `/index.html`.
import os
import aiohttp
import aiohttp.web
from mnemosyne.app import by_time, by_uuid
application = aiohttp.web.Application()
class DirectoryIndex(aiohttp.web.StaticRoute):
def handle(self, request):
filename = request.match_info['filename']
if not filename:
filename = 'index.html'
elif filename.endswith('/'):
filename += 'index.html'
request.match_info['filename'] = filename
return super().handle(request)
public_dir = os.path.abspath(os.path.join(__file__, '../../../public'))
application.router.register_route(DirectoryIndex(None, '/', public_dir))
# by_uuid API
application.router.add_route('GET', '/trace/{traceUuid}', by_uuid.getTrace)
application.router.add_route(
'GET', '/transaction/{transactionUuid}', by_uuid.getTransaction)
application.router.add_route(
'GET', '/application/{applicationUuid}', by_uuid.getApplication)
# by_time API
application.router.add_route(
'GET', '/traces/frontend/age/{ageInMin}', by_time.getFrontendTraces)
| <commit_before>import aiohttp
import aiohttp.web
from mnemosyne.app import by_time, by_uuid
application = aiohttp.web.Application()
# by_uuid API
# app.router.add_route('GET', '/applications', mnemosyne.applications.index)
application.router.add_route('GET', '/trace/{traceUuid}', by_uuid.getTrace)
application.router.add_route('GET', '/transaction/{transactionUuid}', by_uuid.getTransaction)
application.router.add_route('GET', '/application/{applicationUuid}', by_uuid.getApplication)
# by_time API
application.router.add_route('GET', '/traces/frontend/age/{ageInMin}', by_time.getFrontendTraces)
<commit_msg>Add static route serving files
Custom static file handler resolves `/` to `/index.html`.<commit_after>
import os
import aiohttp
import aiohttp.web
from mnemosyne.app import by_time, by_uuid
application = aiohttp.web.Application()
class DirectoryIndex(aiohttp.web.StaticRoute):
def handle(self, request):
filename = request.match_info['filename']
if not filename:
filename = 'index.html'
elif filename.endswith('/'):
filename += 'index.html'
request.match_info['filename'] = filename
return super().handle(request)
public_dir = os.path.abspath(os.path.join(__file__, '../../../public'))
application.router.register_route(DirectoryIndex(None, '/', public_dir))
# by_uuid API
application.router.add_route('GET', '/trace/{traceUuid}', by_uuid.getTrace)
application.router.add_route(
'GET', '/transaction/{transactionUuid}', by_uuid.getTransaction)
application.router.add_route(
'GET', '/application/{applicationUuid}', by_uuid.getApplication)
# by_time API
application.router.add_route(
'GET', '/traces/frontend/age/{ageInMin}', by_time.getFrontendTraces)
|
66c1bcdb242b30658d323832af04ee814432bdc9 | hackernews_scrapy/items.py | hackernews_scrapy/items.py | # -*- coding: utf-8 -*-
import scrapy
class HackernewsScrapyItem(scrapy.Item):
title = scrapy.Field()
crawled_at = scrapy.Field()
| # -*- coding: utf-8 -*-
import scrapy
class HackernewsScrapyItem(scrapy.Item):
title = scrapy.Field()
url = scrapy.Field()
| Add url field to HackernewsScrapyItem and remove "crawled_at" | Add url field to HackernewsScrapyItem and remove "crawled_at"
| Python | mit | mdsrosa/hackernews_scrapy | # -*- coding: utf-8 -*-
import scrapy
class HackernewsScrapyItem(scrapy.Item):
title = scrapy.Field()
crawled_at = scrapy.Field()
Add url field to HackernewsScrapyItem and remove "crawled_at" | # -*- coding: utf-8 -*-
import scrapy
class HackernewsScrapyItem(scrapy.Item):
title = scrapy.Field()
url = scrapy.Field()
| <commit_before># -*- coding: utf-8 -*-
import scrapy
class HackernewsScrapyItem(scrapy.Item):
title = scrapy.Field()
crawled_at = scrapy.Field()
<commit_msg>Add url field to HackernewsScrapyItem and remove "crawled_at"<commit_after> | # -*- coding: utf-8 -*-
import scrapy
class HackernewsScrapyItem(scrapy.Item):
title = scrapy.Field()
url = scrapy.Field()
| # -*- coding: utf-8 -*-
import scrapy
class HackernewsScrapyItem(scrapy.Item):
title = scrapy.Field()
crawled_at = scrapy.Field()
Add url field to HackernewsScrapyItem and remove "crawled_at"# -*- coding: utf-8 -*-
import scrapy
class HackernewsScrapyItem(scrapy.Item):
title = scrapy.Field()
url = scrapy.Field()
| <commit_before># -*- coding: utf-8 -*-
import scrapy
class HackernewsScrapyItem(scrapy.Item):
title = scrapy.Field()
crawled_at = scrapy.Field()
<commit_msg>Add url field to HackernewsScrapyItem and remove "crawled_at"<commit_after># -*- coding: utf-8 -*-
import scrapy
class HackernewsScrapyItem(scrapy.Item):
title = scrapy.Field()
url = scrapy.Field()
|
11d4059cf5c66e6de648c675bb049825901479cf | code/array_map.py | code/array_map.py | arr = [1, 5, 10, 20]
print(*map(lambda num: num * 2, arr))
| arr = [1, 5, 10, 20]
print([num * 2 for num in arr])
| Use more consistent example for map | Use more consistent example for map
There is a `map` function in pythin, but for simple single expression
calculations, list comprehensions are much better suited.
While map works well if there is a function, you can pass.
| Python | mit | Evmorov/ruby-coffeescript,evmorov/lang-compare,evmorov/lang-compare,evmorov/lang-compare,evmorov/lang-compare,Evmorov/ruby-coffeescript,evmorov/lang-compare,Evmorov/ruby-coffeescript,evmorov/lang-compare | arr = [1, 5, 10, 20]
print(*map(lambda num: num * 2, arr))
Use more consistent example for map
There is a `map` function in pythin, but for simple single expression
calculations, list comprehensions are much better suited.
While map works well if there is a function, you can pass. | arr = [1, 5, 10, 20]
print([num * 2 for num in arr])
| <commit_before>arr = [1, 5, 10, 20]
print(*map(lambda num: num * 2, arr))
<commit_msg>Use more consistent example for map
There is a `map` function in pythin, but for simple single expression
calculations, list comprehensions are much better suited.
While map works well if there is a function, you can pass.<commit_after> | arr = [1, 5, 10, 20]
print([num * 2 for num in arr])
| arr = [1, 5, 10, 20]
print(*map(lambda num: num * 2, arr))
Use more consistent example for map
There is a `map` function in pythin, but for simple single expression
calculations, list comprehensions are much better suited.
While map works well if there is a function, you can pass.arr = [1, 5, 10, 20]
print([num * 2 for num in arr])
| <commit_before>arr = [1, 5, 10, 20]
print(*map(lambda num: num * 2, arr))
<commit_msg>Use more consistent example for map
There is a `map` function in pythin, but for simple single expression
calculations, list comprehensions are much better suited.
While map works well if there is a function, you can pass.<commit_after>arr = [1, 5, 10, 20]
print([num * 2 for num in arr])
|
546ff329d4a792ddfb0576c78cf6d3e4f2321727 | scripts/build_profile_docs.py | scripts/build_profile_docs.py | #! /bin/env python
import os
from typing import Any, Dict, Generator, Iterable, Type
from isort.profiles import profiles
OUTPUT_FILE = os.path.abspath(
os.path.join(os.path.dirname(os.path.abspath(__file__)), "../docs/configuration/profiles.md")
)
HEADER = """Built-in Profile for isort
========
The following profiles are built into isort to allow easy interoperability with
common projects and code styles.
To use any of the listed profiles, use `isort --profile PROFILE_NAME` from the command line, or `profile=PROFILE_NAME` in your configuration file.
"""
def format_profile(profile_name: str, profile: Dict[str, Any]) -> str:
options = "\n".join(f" - **{name}**: `{repr(value)}`" for name, value in profile.items())
return f"""
#{profile_name}
{profile.get('descripiton', '')}
{options}
"""
def document_text() -> str:
return f"{HEADER}{''.join(format_profile(profile_name, profile) for profile_name, profile in profiles.items())}"
def write_document():
with open(OUTPUT_FILE, "w") as output_file:
output_file.write(document_text())
if __name__ == "__main__":
write_document()
| #! /bin/env python
import os
from typing import Any, Dict, Generator, Iterable, Type
from isort.profiles import profiles
OUTPUT_FILE = os.path.abspath(
os.path.join(os.path.dirname(os.path.abspath(__file__)), "../docs/configuration/profiles.md")
)
HEADER = """Built-in Profile for isort
========
The following profiles are built into isort to allow easy interoperability with
common projects and code styles.
To use any of the listed profiles, use `isort --profile PROFILE_NAME` from the command line, or `profile=PROFILE_NAME` in your configuration file.
"""
def format_profile(profile_name: str, profile: Dict[str, Any]) -> str:
options = "\n".join(f" - **{name}**: `{repr(value)}`" for name, value in profile.items())
return f"""
#{profile_name}
{profile.get('description', '')}
{options}
"""
def document_text() -> str:
return f"{HEADER}{''.join(format_profile(profile_name, profile) for profile_name, profile in profiles.items())}"
def write_document():
with open(OUTPUT_FILE, "w") as output_file:
output_file.write(document_text())
if __name__ == "__main__":
write_document()
| Fix typo in profile doc description build | Fix typo in profile doc description build
| Python | mit | PyCQA/isort,PyCQA/isort | #! /bin/env python
import os
from typing import Any, Dict, Generator, Iterable, Type
from isort.profiles import profiles
OUTPUT_FILE = os.path.abspath(
os.path.join(os.path.dirname(os.path.abspath(__file__)), "../docs/configuration/profiles.md")
)
HEADER = """Built-in Profile for isort
========
The following profiles are built into isort to allow easy interoperability with
common projects and code styles.
To use any of the listed profiles, use `isort --profile PROFILE_NAME` from the command line, or `profile=PROFILE_NAME` in your configuration file.
"""
def format_profile(profile_name: str, profile: Dict[str, Any]) -> str:
options = "\n".join(f" - **{name}**: `{repr(value)}`" for name, value in profile.items())
return f"""
#{profile_name}
{profile.get('descripiton', '')}
{options}
"""
def document_text() -> str:
return f"{HEADER}{''.join(format_profile(profile_name, profile) for profile_name, profile in profiles.items())}"
def write_document():
with open(OUTPUT_FILE, "w") as output_file:
output_file.write(document_text())
if __name__ == "__main__":
write_document()
Fix typo in profile doc description build | #! /bin/env python
import os
from typing import Any, Dict, Generator, Iterable, Type
from isort.profiles import profiles
OUTPUT_FILE = os.path.abspath(
os.path.join(os.path.dirname(os.path.abspath(__file__)), "../docs/configuration/profiles.md")
)
HEADER = """Built-in Profile for isort
========
The following profiles are built into isort to allow easy interoperability with
common projects and code styles.
To use any of the listed profiles, use `isort --profile PROFILE_NAME` from the command line, or `profile=PROFILE_NAME` in your configuration file.
"""
def format_profile(profile_name: str, profile: Dict[str, Any]) -> str:
options = "\n".join(f" - **{name}**: `{repr(value)}`" for name, value in profile.items())
return f"""
#{profile_name}
{profile.get('description', '')}
{options}
"""
def document_text() -> str:
return f"{HEADER}{''.join(format_profile(profile_name, profile) for profile_name, profile in profiles.items())}"
def write_document():
with open(OUTPUT_FILE, "w") as output_file:
output_file.write(document_text())
if __name__ == "__main__":
write_document()
| <commit_before>#! /bin/env python
import os
from typing import Any, Dict, Generator, Iterable, Type
from isort.profiles import profiles
OUTPUT_FILE = os.path.abspath(
os.path.join(os.path.dirname(os.path.abspath(__file__)), "../docs/configuration/profiles.md")
)
HEADER = """Built-in Profile for isort
========
The following profiles are built into isort to allow easy interoperability with
common projects and code styles.
To use any of the listed profiles, use `isort --profile PROFILE_NAME` from the command line, or `profile=PROFILE_NAME` in your configuration file.
"""
def format_profile(profile_name: str, profile: Dict[str, Any]) -> str:
options = "\n".join(f" - **{name}**: `{repr(value)}`" for name, value in profile.items())
return f"""
#{profile_name}
{profile.get('descripiton', '')}
{options}
"""
def document_text() -> str:
return f"{HEADER}{''.join(format_profile(profile_name, profile) for profile_name, profile in profiles.items())}"
def write_document():
with open(OUTPUT_FILE, "w") as output_file:
output_file.write(document_text())
if __name__ == "__main__":
write_document()
<commit_msg>Fix typo in profile doc description build<commit_after> | #! /bin/env python
import os
from typing import Any, Dict, Generator, Iterable, Type
from isort.profiles import profiles
OUTPUT_FILE = os.path.abspath(
os.path.join(os.path.dirname(os.path.abspath(__file__)), "../docs/configuration/profiles.md")
)
HEADER = """Built-in Profile for isort
========
The following profiles are built into isort to allow easy interoperability with
common projects and code styles.
To use any of the listed profiles, use `isort --profile PROFILE_NAME` from the command line, or `profile=PROFILE_NAME` in your configuration file.
"""
def format_profile(profile_name: str, profile: Dict[str, Any]) -> str:
options = "\n".join(f" - **{name}**: `{repr(value)}`" for name, value in profile.items())
return f"""
#{profile_name}
{profile.get('description', '')}
{options}
"""
def document_text() -> str:
return f"{HEADER}{''.join(format_profile(profile_name, profile) for profile_name, profile in profiles.items())}"
def write_document():
with open(OUTPUT_FILE, "w") as output_file:
output_file.write(document_text())
if __name__ == "__main__":
write_document()
| #! /bin/env python
import os
from typing import Any, Dict, Generator, Iterable, Type
from isort.profiles import profiles
OUTPUT_FILE = os.path.abspath(
os.path.join(os.path.dirname(os.path.abspath(__file__)), "../docs/configuration/profiles.md")
)
HEADER = """Built-in Profile for isort
========
The following profiles are built into isort to allow easy interoperability with
common projects and code styles.
To use any of the listed profiles, use `isort --profile PROFILE_NAME` from the command line, or `profile=PROFILE_NAME` in your configuration file.
"""
def format_profile(profile_name: str, profile: Dict[str, Any]) -> str:
options = "\n".join(f" - **{name}**: `{repr(value)}`" for name, value in profile.items())
return f"""
#{profile_name}
{profile.get('descripiton', '')}
{options}
"""
def document_text() -> str:
return f"{HEADER}{''.join(format_profile(profile_name, profile) for profile_name, profile in profiles.items())}"
def write_document():
with open(OUTPUT_FILE, "w") as output_file:
output_file.write(document_text())
if __name__ == "__main__":
write_document()
Fix typo in profile doc description build#! /bin/env python
import os
from typing import Any, Dict, Generator, Iterable, Type
from isort.profiles import profiles
OUTPUT_FILE = os.path.abspath(
os.path.join(os.path.dirname(os.path.abspath(__file__)), "../docs/configuration/profiles.md")
)
HEADER = """Built-in Profile for isort
========
The following profiles are built into isort to allow easy interoperability with
common projects and code styles.
To use any of the listed profiles, use `isort --profile PROFILE_NAME` from the command line, or `profile=PROFILE_NAME` in your configuration file.
"""
def format_profile(profile_name: str, profile: Dict[str, Any]) -> str:
options = "\n".join(f" - **{name}**: `{repr(value)}`" for name, value in profile.items())
return f"""
#{profile_name}
{profile.get('description', '')}
{options}
"""
def document_text() -> str:
return f"{HEADER}{''.join(format_profile(profile_name, profile) for profile_name, profile in profiles.items())}"
def write_document():
with open(OUTPUT_FILE, "w") as output_file:
output_file.write(document_text())
if __name__ == "__main__":
write_document()
| <commit_before>#! /bin/env python
import os
from typing import Any, Dict, Generator, Iterable, Type
from isort.profiles import profiles
OUTPUT_FILE = os.path.abspath(
os.path.join(os.path.dirname(os.path.abspath(__file__)), "../docs/configuration/profiles.md")
)
HEADER = """Built-in Profile for isort
========
The following profiles are built into isort to allow easy interoperability with
common projects and code styles.
To use any of the listed profiles, use `isort --profile PROFILE_NAME` from the command line, or `profile=PROFILE_NAME` in your configuration file.
"""
def format_profile(profile_name: str, profile: Dict[str, Any]) -> str:
options = "\n".join(f" - **{name}**: `{repr(value)}`" for name, value in profile.items())
return f"""
#{profile_name}
{profile.get('descripiton', '')}
{options}
"""
def document_text() -> str:
return f"{HEADER}{''.join(format_profile(profile_name, profile) for profile_name, profile in profiles.items())}"
def write_document():
with open(OUTPUT_FILE, "w") as output_file:
output_file.write(document_text())
if __name__ == "__main__":
write_document()
<commit_msg>Fix typo in profile doc description build<commit_after>#! /bin/env python
import os
from typing import Any, Dict, Generator, Iterable, Type
from isort.profiles import profiles
OUTPUT_FILE = os.path.abspath(
os.path.join(os.path.dirname(os.path.abspath(__file__)), "../docs/configuration/profiles.md")
)
HEADER = """Built-in Profile for isort
========
The following profiles are built into isort to allow easy interoperability with
common projects and code styles.
To use any of the listed profiles, use `isort --profile PROFILE_NAME` from the command line, or `profile=PROFILE_NAME` in your configuration file.
"""
def format_profile(profile_name: str, profile: Dict[str, Any]) -> str:
options = "\n".join(f" - **{name}**: `{repr(value)}`" for name, value in profile.items())
return f"""
#{profile_name}
{profile.get('description', '')}
{options}
"""
def document_text() -> str:
return f"{HEADER}{''.join(format_profile(profile_name, profile) for profile_name, profile in profiles.items())}"
def write_document():
with open(OUTPUT_FILE, "w") as output_file:
output_file.write(document_text())
if __name__ == "__main__":
write_document()
|
75f0db346adfcf53f71ff69aa61c163a84116e0e | seaworthy/tests/test_utils.py | seaworthy/tests/test_utils.py | from testtools.assertions import assert_that
from testtools.matchers import Equals
from seaworthy.utils import resource_name
def test_resource_name():
# Dummy test so that pytest passes
assert_that(resource_name('foo'), Equals('test_foo'))
| from testtools.assertions import assert_that
from testtools.matchers import Equals
from ..utils import resource_name
def test_resource_name():
# Dummy test so that pytest passes
assert_that(resource_name('foo'), Equals('test_foo'))
| Fix the import order lint error | Fix the import order lint error
| Python | bsd-3-clause | praekeltfoundation/seaworthy | from testtools.assertions import assert_that
from testtools.matchers import Equals
from seaworthy.utils import resource_name
def test_resource_name():
# Dummy test so that pytest passes
assert_that(resource_name('foo'), Equals('test_foo'))
Fix the import order lint error | from testtools.assertions import assert_that
from testtools.matchers import Equals
from ..utils import resource_name
def test_resource_name():
# Dummy test so that pytest passes
assert_that(resource_name('foo'), Equals('test_foo'))
| <commit_before>from testtools.assertions import assert_that
from testtools.matchers import Equals
from seaworthy.utils import resource_name
def test_resource_name():
# Dummy test so that pytest passes
assert_that(resource_name('foo'), Equals('test_foo'))
<commit_msg>Fix the import order lint error<commit_after> | from testtools.assertions import assert_that
from testtools.matchers import Equals
from ..utils import resource_name
def test_resource_name():
# Dummy test so that pytest passes
assert_that(resource_name('foo'), Equals('test_foo'))
| from testtools.assertions import assert_that
from testtools.matchers import Equals
from seaworthy.utils import resource_name
def test_resource_name():
# Dummy test so that pytest passes
assert_that(resource_name('foo'), Equals('test_foo'))
Fix the import order lint errorfrom testtools.assertions import assert_that
from testtools.matchers import Equals
from ..utils import resource_name
def test_resource_name():
# Dummy test so that pytest passes
assert_that(resource_name('foo'), Equals('test_foo'))
| <commit_before>from testtools.assertions import assert_that
from testtools.matchers import Equals
from seaworthy.utils import resource_name
def test_resource_name():
# Dummy test so that pytest passes
assert_that(resource_name('foo'), Equals('test_foo'))
<commit_msg>Fix the import order lint error<commit_after>from testtools.assertions import assert_that
from testtools.matchers import Equals
from ..utils import resource_name
def test_resource_name():
# Dummy test so that pytest passes
assert_that(resource_name('foo'), Equals('test_foo'))
|
21ab1204c1cb35a5d9b95124040e160f4f5edabd | solitude/settings/__init__.py | solitude/settings/__init__.py | from local import *
| from .base import *
try:
from .local import *
except ImportError, exc:
exc.args = tuple(['%s (did you rename settings/local.py-dist?)' % exc.args[0]])
raise exc
| Revert "some random settings changes" | Revert "some random settings changes"
This reverts commit 640eb2be2e32413718e93c1b8c77279ab5152170.
| Python | bsd-3-clause | muffinresearch/solitude,muffinresearch/solitude | from local import *
Revert "some random settings changes"
This reverts commit 640eb2be2e32413718e93c1b8c77279ab5152170. | from .base import *
try:
from .local import *
except ImportError, exc:
exc.args = tuple(['%s (did you rename settings/local.py-dist?)' % exc.args[0]])
raise exc
| <commit_before>from local import *
<commit_msg>Revert "some random settings changes"
This reverts commit 640eb2be2e32413718e93c1b8c77279ab5152170.<commit_after> | from .base import *
try:
from .local import *
except ImportError, exc:
exc.args = tuple(['%s (did you rename settings/local.py-dist?)' % exc.args[0]])
raise exc
| from local import *
Revert "some random settings changes"
This reverts commit 640eb2be2e32413718e93c1b8c77279ab5152170.from .base import *
try:
from .local import *
except ImportError, exc:
exc.args = tuple(['%s (did you rename settings/local.py-dist?)' % exc.args[0]])
raise exc
| <commit_before>from local import *
<commit_msg>Revert "some random settings changes"
This reverts commit 640eb2be2e32413718e93c1b8c77279ab5152170.<commit_after>from .base import *
try:
from .local import *
except ImportError, exc:
exc.args = tuple(['%s (did you rename settings/local.py-dist?)' % exc.args[0]])
raise exc
|
481028f075bf46696b8adc5904663e97bc883c52 | notfound.py | notfound.py | from google.appengine.ext.webapp import template
import webapp2
import os
class NotFound(webapp2.RequestHandler):
def get(self):
path = os.path.join(os.path.dirname(__file__), 'templates/notfound.html')
self.response.out.write(template.render(path, {}))
app = webapp2.WSGIApplication([('/.*', NotFound)])
| from google.appengine.ext.webapp import template
import webapp2
import os
class NotFound(webapp2.RequestHandler):
def get(self):
self.error(404)
path = os.path.join(os.path.dirname(__file__), 'templates/notfound.html')
self.response.out.write(template.render(path, {}))
app = webapp2.WSGIApplication([('/.*', NotFound)])
| Return HTTP Status Code 404 for not found errors | Return HTTP Status Code 404 for not found errors
| Python | mit | mback2k/appengine-oauth-profile,mback2k/appengine-oauth-profile | from google.appengine.ext.webapp import template
import webapp2
import os
class NotFound(webapp2.RequestHandler):
def get(self):
path = os.path.join(os.path.dirname(__file__), 'templates/notfound.html')
self.response.out.write(template.render(path, {}))
app = webapp2.WSGIApplication([('/.*', NotFound)])
Return HTTP Status Code 404 for not found errors | from google.appengine.ext.webapp import template
import webapp2
import os
class NotFound(webapp2.RequestHandler):
def get(self):
self.error(404)
path = os.path.join(os.path.dirname(__file__), 'templates/notfound.html')
self.response.out.write(template.render(path, {}))
app = webapp2.WSGIApplication([('/.*', NotFound)])
| <commit_before>from google.appengine.ext.webapp import template
import webapp2
import os
class NotFound(webapp2.RequestHandler):
def get(self):
path = os.path.join(os.path.dirname(__file__), 'templates/notfound.html')
self.response.out.write(template.render(path, {}))
app = webapp2.WSGIApplication([('/.*', NotFound)])
<commit_msg>Return HTTP Status Code 404 for not found errors<commit_after> | from google.appengine.ext.webapp import template
import webapp2
import os
class NotFound(webapp2.RequestHandler):
def get(self):
self.error(404)
path = os.path.join(os.path.dirname(__file__), 'templates/notfound.html')
self.response.out.write(template.render(path, {}))
app = webapp2.WSGIApplication([('/.*', NotFound)])
| from google.appengine.ext.webapp import template
import webapp2
import os
class NotFound(webapp2.RequestHandler):
def get(self):
path = os.path.join(os.path.dirname(__file__), 'templates/notfound.html')
self.response.out.write(template.render(path, {}))
app = webapp2.WSGIApplication([('/.*', NotFound)])
Return HTTP Status Code 404 for not found errorsfrom google.appengine.ext.webapp import template
import webapp2
import os
class NotFound(webapp2.RequestHandler):
def get(self):
self.error(404)
path = os.path.join(os.path.dirname(__file__), 'templates/notfound.html')
self.response.out.write(template.render(path, {}))
app = webapp2.WSGIApplication([('/.*', NotFound)])
| <commit_before>from google.appengine.ext.webapp import template
import webapp2
import os
class NotFound(webapp2.RequestHandler):
def get(self):
path = os.path.join(os.path.dirname(__file__), 'templates/notfound.html')
self.response.out.write(template.render(path, {}))
app = webapp2.WSGIApplication([('/.*', NotFound)])
<commit_msg>Return HTTP Status Code 404 for not found errors<commit_after>from google.appengine.ext.webapp import template
import webapp2
import os
class NotFound(webapp2.RequestHandler):
def get(self):
self.error(404)
path = os.path.join(os.path.dirname(__file__), 'templates/notfound.html')
self.response.out.write(template.render(path, {}))
app = webapp2.WSGIApplication([('/.*', NotFound)])
|
b77e2fa27e8e2cae133cc2bc0e2f130b999b83c5 | pythonFlaskStarter/app/welcome.py | pythonFlaskStarter/app/welcome.py | # Copyright 2015 IBM Corp. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the “License”);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an “AS IS” BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from flask import Flask
app = Flask(__name__)
@app.route('/')
def Welcome():
return app.send_static_file('index.html')
@app.route('/myapp')
def WelcomeToMyapp():
return 'Welcome again to my app running on Bluemix!'
port = os.getenv('VCAP_APP_PORT', '5000')
if __name__ == "__main__":
app.run(host='0.0.0.0', port=int(port)) | # Copyright 2015 IBM Corp. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from flask import Flask
app = Flask(__name__)
@app.route('/')
def Welcome():
return app.send_static_file('index.html')
@app.route('/myapp')
def WelcomeToMyapp():
return 'Welcome again to my app running on Bluemix!'
port = os.getenv('VCAP_APP_PORT', '5000')
if __name__ == "__main__":
app.run(host='0.0.0.0', port=int(port)) | Fix bad encoding in boilerplate | Fix bad encoding in boilerplate
| Python | apache-2.0 | javed120183/testingrepo,rvennam/starter-apps,javed120183/testingrepo,rvennam/starter-apps,rvennam/starter-apps,rvennam/starter-apps | # Copyright 2015 IBM Corp. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the “License”);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an “AS IS” BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from flask import Flask
app = Flask(__name__)
@app.route('/')
def Welcome():
return app.send_static_file('index.html')
@app.route('/myapp')
def WelcomeToMyapp():
return 'Welcome again to my app running on Bluemix!'
port = os.getenv('VCAP_APP_PORT', '5000')
if __name__ == "__main__":
app.run(host='0.0.0.0', port=int(port))Fix bad encoding in boilerplate | # Copyright 2015 IBM Corp. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from flask import Flask
app = Flask(__name__)
@app.route('/')
def Welcome():
return app.send_static_file('index.html')
@app.route('/myapp')
def WelcomeToMyapp():
return 'Welcome again to my app running on Bluemix!'
port = os.getenv('VCAP_APP_PORT', '5000')
if __name__ == "__main__":
app.run(host='0.0.0.0', port=int(port)) | <commit_before># Copyright 2015 IBM Corp. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the “License”);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an “AS IS” BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from flask import Flask
app = Flask(__name__)
@app.route('/')
def Welcome():
return app.send_static_file('index.html')
@app.route('/myapp')
def WelcomeToMyapp():
return 'Welcome again to my app running on Bluemix!'
port = os.getenv('VCAP_APP_PORT', '5000')
if __name__ == "__main__":
app.run(host='0.0.0.0', port=int(port))<commit_msg>Fix bad encoding in boilerplate<commit_after> | # Copyright 2015 IBM Corp. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from flask import Flask
app = Flask(__name__)
@app.route('/')
def Welcome():
return app.send_static_file('index.html')
@app.route('/myapp')
def WelcomeToMyapp():
return 'Welcome again to my app running on Bluemix!'
port = os.getenv('VCAP_APP_PORT', '5000')
if __name__ == "__main__":
app.run(host='0.0.0.0', port=int(port)) | # Copyright 2015 IBM Corp. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the “License”);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an “AS IS” BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from flask import Flask
app = Flask(__name__)
@app.route('/')
def Welcome():
return app.send_static_file('index.html')
@app.route('/myapp')
def WelcomeToMyapp():
return 'Welcome again to my app running on Bluemix!'
port = os.getenv('VCAP_APP_PORT', '5000')
if __name__ == "__main__":
app.run(host='0.0.0.0', port=int(port))Fix bad encoding in boilerplate# Copyright 2015 IBM Corp. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from flask import Flask
app = Flask(__name__)
@app.route('/')
def Welcome():
return app.send_static_file('index.html')
@app.route('/myapp')
def WelcomeToMyapp():
return 'Welcome again to my app running on Bluemix!'
port = os.getenv('VCAP_APP_PORT', '5000')
if __name__ == "__main__":
app.run(host='0.0.0.0', port=int(port)) | <commit_before># Copyright 2015 IBM Corp. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the “License”);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an “AS IS” BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from flask import Flask
app = Flask(__name__)
@app.route('/')
def Welcome():
return app.send_static_file('index.html')
@app.route('/myapp')
def WelcomeToMyapp():
return 'Welcome again to my app running on Bluemix!'
port = os.getenv('VCAP_APP_PORT', '5000')
if __name__ == "__main__":
app.run(host='0.0.0.0', port=int(port))<commit_msg>Fix bad encoding in boilerplate<commit_after># Copyright 2015 IBM Corp. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from flask import Flask
app = Flask(__name__)
@app.route('/')
def Welcome():
return app.send_static_file('index.html')
@app.route('/myapp')
def WelcomeToMyapp():
return 'Welcome again to my app running on Bluemix!'
port = os.getenv('VCAP_APP_PORT', '5000')
if __name__ == "__main__":
app.run(host='0.0.0.0', port=int(port)) |
91d63e50df5bc8b9fe7d98b28efd541bafd0bc08 | blankspot/node_registration/urls.py | blankspot/node_registration/urls.py | from django.conf.urls import patterns, url
from node_registration import views
urlpatterns = patterns('',
url(r'^add/$', views.PositionCreate.as_view(), name='position-add'),
url(r'^list', views.ListPosition.as_view(), name='position-list')
)
| from django.conf.urls import patterns, url
from node_registration import views
urlpatterns = patterns('',
url(r'^add/$', views.PositionCreate.as_view(), name='position-add'),
url(r'^list', views.ListPosition.as_view(), name='position-list')
)
| Replace \t by spaces for indention | Replace \t by spaces for indention
| Python | agpl-3.0 | frlan/blankspot | from django.conf.urls import patterns, url
from node_registration import views
urlpatterns = patterns('',
url(r'^add/$', views.PositionCreate.as_view(), name='position-add'),
url(r'^list', views.ListPosition.as_view(), name='position-list')
)
Replace \t by spaces for indention | from django.conf.urls import patterns, url
from node_registration import views
urlpatterns = patterns('',
url(r'^add/$', views.PositionCreate.as_view(), name='position-add'),
url(r'^list', views.ListPosition.as_view(), name='position-list')
)
| <commit_before>from django.conf.urls import patterns, url
from node_registration import views
urlpatterns = patterns('',
url(r'^add/$', views.PositionCreate.as_view(), name='position-add'),
url(r'^list', views.ListPosition.as_view(), name='position-list')
)
<commit_msg>Replace \t by spaces for indention<commit_after> | from django.conf.urls import patterns, url
from node_registration import views
urlpatterns = patterns('',
url(r'^add/$', views.PositionCreate.as_view(), name='position-add'),
url(r'^list', views.ListPosition.as_view(), name='position-list')
)
| from django.conf.urls import patterns, url
from node_registration import views
urlpatterns = patterns('',
url(r'^add/$', views.PositionCreate.as_view(), name='position-add'),
url(r'^list', views.ListPosition.as_view(), name='position-list')
)
Replace \t by spaces for indentionfrom django.conf.urls import patterns, url
from node_registration import views
urlpatterns = patterns('',
url(r'^add/$', views.PositionCreate.as_view(), name='position-add'),
url(r'^list', views.ListPosition.as_view(), name='position-list')
)
| <commit_before>from django.conf.urls import patterns, url
from node_registration import views
urlpatterns = patterns('',
url(r'^add/$', views.PositionCreate.as_view(), name='position-add'),
url(r'^list', views.ListPosition.as_view(), name='position-list')
)
<commit_msg>Replace \t by spaces for indention<commit_after>from django.conf.urls import patterns, url
from node_registration import views
urlpatterns = patterns('',
url(r'^add/$', views.PositionCreate.as_view(), name='position-add'),
url(r'^list', views.ListPosition.as_view(), name='position-list')
)
|
a136f7046b8df661713d3bcf6a7681894210def2 | ricker/__init__.py | ricker/__init__.py | """
Ricker wavelet generator for seismic simulation
===============================================
"""
from __future__ import division, print_function, absolute_import | """
Ricker wavelet generator for seismic simulation
===============================================
"""
from __future__ import division, print_function, absolute_import
from .ricker import ricker
| Make ricker funciton available in the top level. | Make ricker funciton available in the top level.
| Python | mit | gatechzhu/ricker | """
Ricker wavelet generator for seismic simulation
===============================================
"""
from __future__ import division, print_function, absolute_importMake ricker funciton available in the top level. | """
Ricker wavelet generator for seismic simulation
===============================================
"""
from __future__ import division, print_function, absolute_import
from .ricker import ricker
| <commit_before>"""
Ricker wavelet generator for seismic simulation
===============================================
"""
from __future__ import division, print_function, absolute_import<commit_msg>Make ricker funciton available in the top level.<commit_after> | """
Ricker wavelet generator for seismic simulation
===============================================
"""
from __future__ import division, print_function, absolute_import
from .ricker import ricker
| """
Ricker wavelet generator for seismic simulation
===============================================
"""
from __future__ import division, print_function, absolute_importMake ricker funciton available in the top level."""
Ricker wavelet generator for seismic simulation
===============================================
"""
from __future__ import division, print_function, absolute_import
from .ricker import ricker
| <commit_before>"""
Ricker wavelet generator for seismic simulation
===============================================
"""
from __future__ import division, print_function, absolute_import<commit_msg>Make ricker funciton available in the top level.<commit_after>"""
Ricker wavelet generator for seismic simulation
===============================================
"""
from __future__ import division, print_function, absolute_import
from .ricker import ricker
|
5307e9d879a5432db5f54fd61ea0060b6526a1a6 | sundaytasks/example/test_plugin.py | sundaytasks/example/test_plugin.py | from tornado import gen, ioloop
from tornado.ioloop import IOLoop
import sys
from pkg_resources import iter_entry_points
import json
@gen.coroutine
def main(plugin):
#print("plugin:",plugin['receiver'])
response = yield plugin['receiver']("Prufa")
print("Results: \n%s" % json.dumps(response, sort_keys=True,
indent=4, separators=(',', ': ')))
if __name__ == "__main__":
if(len(sys.argv) > 1):
iplugin = __import__("%s" % sys.argv[1])
plugin = iplugin.plugin
instance = IOLoop.instance()
instance.add_callback(callback=lambda: main(plugin))
instance.start()
else:
for object in iter_entry_points(group='sundaytasks.plugin', name=None):
print object.name
plugin = object.load()
instance = IOLoop.instance()
instance.add_callback(callback=lambda: main(plugin))
instance.start()
| from tornado import gen, ioloop
from tornado.ioloop import IOLoop
import sys
from pkg_resources import iter_entry_points
import json
@gen.coroutine
def main(plugin):
response = yield plugin['receiver']("Prufa")
print("Results: \n%s" % json.dumps(response, sort_keys=True,
indent=4, separators=(',', ': ')))
if __name__ == "__main__":
if(len(sys.argv) > 1):
iplugin = __import__("%s" % sys.argv[1])
plugin = iplugin.plugin
instance = IOLoop.instance()
instance.add_callback(callback=lambda: main(plugin))
instance.start()
else:
for object in iter_entry_points(group='sundaytasks.plugin', name=None):
print object.name
plugin = object.load()
instance = IOLoop.instance()
instance.add_callback(callback=lambda: main(plugin))
instance.start()
| Clear old method of calling plugins | Clear old method of calling plugins
| Python | apache-2.0 | olafura/sundaytasks-py | from tornado import gen, ioloop
from tornado.ioloop import IOLoop
import sys
from pkg_resources import iter_entry_points
import json
@gen.coroutine
def main(plugin):
#print("plugin:",plugin['receiver'])
response = yield plugin['receiver']("Prufa")
print("Results: \n%s" % json.dumps(response, sort_keys=True,
indent=4, separators=(',', ': ')))
if __name__ == "__main__":
if(len(sys.argv) > 1):
iplugin = __import__("%s" % sys.argv[1])
plugin = iplugin.plugin
instance = IOLoop.instance()
instance.add_callback(callback=lambda: main(plugin))
instance.start()
else:
for object in iter_entry_points(group='sundaytasks.plugin', name=None):
print object.name
plugin = object.load()
instance = IOLoop.instance()
instance.add_callback(callback=lambda: main(plugin))
instance.start()
Clear old method of calling plugins | from tornado import gen, ioloop
from tornado.ioloop import IOLoop
import sys
from pkg_resources import iter_entry_points
import json
@gen.coroutine
def main(plugin):
response = yield plugin['receiver']("Prufa")
print("Results: \n%s" % json.dumps(response, sort_keys=True,
indent=4, separators=(',', ': ')))
if __name__ == "__main__":
if(len(sys.argv) > 1):
iplugin = __import__("%s" % sys.argv[1])
plugin = iplugin.plugin
instance = IOLoop.instance()
instance.add_callback(callback=lambda: main(plugin))
instance.start()
else:
for object in iter_entry_points(group='sundaytasks.plugin', name=None):
print object.name
plugin = object.load()
instance = IOLoop.instance()
instance.add_callback(callback=lambda: main(plugin))
instance.start()
| <commit_before>from tornado import gen, ioloop
from tornado.ioloop import IOLoop
import sys
from pkg_resources import iter_entry_points
import json
@gen.coroutine
def main(plugin):
#print("plugin:",plugin['receiver'])
response = yield plugin['receiver']("Prufa")
print("Results: \n%s" % json.dumps(response, sort_keys=True,
indent=4, separators=(',', ': ')))
if __name__ == "__main__":
if(len(sys.argv) > 1):
iplugin = __import__("%s" % sys.argv[1])
plugin = iplugin.plugin
instance = IOLoop.instance()
instance.add_callback(callback=lambda: main(plugin))
instance.start()
else:
for object in iter_entry_points(group='sundaytasks.plugin', name=None):
print object.name
plugin = object.load()
instance = IOLoop.instance()
instance.add_callback(callback=lambda: main(plugin))
instance.start()
<commit_msg>Clear old method of calling plugins<commit_after> | from tornado import gen, ioloop
from tornado.ioloop import IOLoop
import sys
from pkg_resources import iter_entry_points
import json
@gen.coroutine
def main(plugin):
response = yield plugin['receiver']("Prufa")
print("Results: \n%s" % json.dumps(response, sort_keys=True,
indent=4, separators=(',', ': ')))
if __name__ == "__main__":
if(len(sys.argv) > 1):
iplugin = __import__("%s" % sys.argv[1])
plugin = iplugin.plugin
instance = IOLoop.instance()
instance.add_callback(callback=lambda: main(plugin))
instance.start()
else:
for object in iter_entry_points(group='sundaytasks.plugin', name=None):
print object.name
plugin = object.load()
instance = IOLoop.instance()
instance.add_callback(callback=lambda: main(plugin))
instance.start()
| from tornado import gen, ioloop
from tornado.ioloop import IOLoop
import sys
from pkg_resources import iter_entry_points
import json
@gen.coroutine
def main(plugin):
#print("plugin:",plugin['receiver'])
response = yield plugin['receiver']("Prufa")
print("Results: \n%s" % json.dumps(response, sort_keys=True,
indent=4, separators=(',', ': ')))
if __name__ == "__main__":
if(len(sys.argv) > 1):
iplugin = __import__("%s" % sys.argv[1])
plugin = iplugin.plugin
instance = IOLoop.instance()
instance.add_callback(callback=lambda: main(plugin))
instance.start()
else:
for object in iter_entry_points(group='sundaytasks.plugin', name=None):
print object.name
plugin = object.load()
instance = IOLoop.instance()
instance.add_callback(callback=lambda: main(plugin))
instance.start()
Clear old method of calling pluginsfrom tornado import gen, ioloop
from tornado.ioloop import IOLoop
import sys
from pkg_resources import iter_entry_points
import json
@gen.coroutine
def main(plugin):
response = yield plugin['receiver']("Prufa")
print("Results: \n%s" % json.dumps(response, sort_keys=True,
indent=4, separators=(',', ': ')))
if __name__ == "__main__":
if(len(sys.argv) > 1):
iplugin = __import__("%s" % sys.argv[1])
plugin = iplugin.plugin
instance = IOLoop.instance()
instance.add_callback(callback=lambda: main(plugin))
instance.start()
else:
for object in iter_entry_points(group='sundaytasks.plugin', name=None):
print object.name
plugin = object.load()
instance = IOLoop.instance()
instance.add_callback(callback=lambda: main(plugin))
instance.start()
| <commit_before>from tornado import gen, ioloop
from tornado.ioloop import IOLoop
import sys
from pkg_resources import iter_entry_points
import json
@gen.coroutine
def main(plugin):
#print("plugin:",plugin['receiver'])
response = yield plugin['receiver']("Prufa")
print("Results: \n%s" % json.dumps(response, sort_keys=True,
indent=4, separators=(',', ': ')))
if __name__ == "__main__":
if(len(sys.argv) > 1):
iplugin = __import__("%s" % sys.argv[1])
plugin = iplugin.plugin
instance = IOLoop.instance()
instance.add_callback(callback=lambda: main(plugin))
instance.start()
else:
for object in iter_entry_points(group='sundaytasks.plugin', name=None):
print object.name
plugin = object.load()
instance = IOLoop.instance()
instance.add_callback(callback=lambda: main(plugin))
instance.start()
<commit_msg>Clear old method of calling plugins<commit_after>from tornado import gen, ioloop
from tornado.ioloop import IOLoop
import sys
from pkg_resources import iter_entry_points
import json
@gen.coroutine
def main(plugin):
response = yield plugin['receiver']("Prufa")
print("Results: \n%s" % json.dumps(response, sort_keys=True,
indent=4, separators=(',', ': ')))
if __name__ == "__main__":
if(len(sys.argv) > 1):
iplugin = __import__("%s" % sys.argv[1])
plugin = iplugin.plugin
instance = IOLoop.instance()
instance.add_callback(callback=lambda: main(plugin))
instance.start()
else:
for object in iter_entry_points(group='sundaytasks.plugin', name=None):
print object.name
plugin = object.load()
instance = IOLoop.instance()
instance.add_callback(callback=lambda: main(plugin))
instance.start()
|
f50ef6d331afa5a55467a104bc307edbdb2cd650 | tests/test_auth.py | tests/test_auth.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import json
from unittest import TestCase
import httpretty
from faker import Faker
from polyaxon_schemas.user import UserConfig
from polyaxon_client.auth import AuthClient
faker = Faker()
class TestAuthClient(TestCase):
def setUp(self):
self.client = AuthClient(host='http://localhost', version='v1', token=faker.uuid4())
@httpretty.activate
def test_get_user(self):
user = UserConfig('user', 'user@test.com').to_dict()
httpretty.register_uri(
httpretty.GET,
AuthClient._build_url(
AuthClient.BASE_URL.format('http://localhost', 'v1'),
AuthClient.ENDPOINT),
body=json.dumps(user),
content_type='application/json', status=200)
user_result = self.client.get_user()
assert user == user_result.to_dict()
| # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import json
import uuid
from unittest import TestCase
import httpretty
from faker import Faker
from polyaxon_schemas.authentication import CredentialsConfig
from polyaxon_schemas.user import UserConfig
from polyaxon_client.auth import AuthClient
faker = Faker()
class TestAuthClient(TestCase):
def setUp(self):
self.client = AuthClient(host='http://localhost', version='v1', token=faker.uuid4())
self.base_url = AuthClient.BASE_URL.format('http://localhost', 'v1')
@httpretty.activate
def test_get_user(self):
user = UserConfig('user', 'user@test.com').to_dict()
httpretty.register_uri(
httpretty.GET,
AuthClient._build_url(
self.base_url,
AuthClient.ENDPOINT),
body=json.dumps(user),
content_type='application/json', status=200)
user_result = self.client.get_user()
assert user == user_result.to_dict()
@httpretty.activate
def test_login(self):
token = uuid.uuid4().hex
httpretty.register_uri(
httpretty.POST,
AuthClient._build_url(
self.base_url,
AuthClient.ENDPOINT,
'token'
),
body=json.dumps({'token': token}),
content_type='application/json', status=200)
credentials = CredentialsConfig('user', 'password')
assert token == self.client.login(credentials=credentials)
| Fix auth tests and add login test | Fix auth tests and add login test
| Python | apache-2.0 | polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import json
from unittest import TestCase
import httpretty
from faker import Faker
from polyaxon_schemas.user import UserConfig
from polyaxon_client.auth import AuthClient
faker = Faker()
class TestAuthClient(TestCase):
def setUp(self):
self.client = AuthClient(host='http://localhost', version='v1', token=faker.uuid4())
@httpretty.activate
def test_get_user(self):
user = UserConfig('user', 'user@test.com').to_dict()
httpretty.register_uri(
httpretty.GET,
AuthClient._build_url(
AuthClient.BASE_URL.format('http://localhost', 'v1'),
AuthClient.ENDPOINT),
body=json.dumps(user),
content_type='application/json', status=200)
user_result = self.client.get_user()
assert user == user_result.to_dict()
Fix auth tests and add login test | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import json
import uuid
from unittest import TestCase
import httpretty
from faker import Faker
from polyaxon_schemas.authentication import CredentialsConfig
from polyaxon_schemas.user import UserConfig
from polyaxon_client.auth import AuthClient
faker = Faker()
class TestAuthClient(TestCase):
def setUp(self):
self.client = AuthClient(host='http://localhost', version='v1', token=faker.uuid4())
self.base_url = AuthClient.BASE_URL.format('http://localhost', 'v1')
@httpretty.activate
def test_get_user(self):
user = UserConfig('user', 'user@test.com').to_dict()
httpretty.register_uri(
httpretty.GET,
AuthClient._build_url(
self.base_url,
AuthClient.ENDPOINT),
body=json.dumps(user),
content_type='application/json', status=200)
user_result = self.client.get_user()
assert user == user_result.to_dict()
@httpretty.activate
def test_login(self):
token = uuid.uuid4().hex
httpretty.register_uri(
httpretty.POST,
AuthClient._build_url(
self.base_url,
AuthClient.ENDPOINT,
'token'
),
body=json.dumps({'token': token}),
content_type='application/json', status=200)
credentials = CredentialsConfig('user', 'password')
assert token == self.client.login(credentials=credentials)
| <commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import json
from unittest import TestCase
import httpretty
from faker import Faker
from polyaxon_schemas.user import UserConfig
from polyaxon_client.auth import AuthClient
faker = Faker()
class TestAuthClient(TestCase):
def setUp(self):
self.client = AuthClient(host='http://localhost', version='v1', token=faker.uuid4())
@httpretty.activate
def test_get_user(self):
user = UserConfig('user', 'user@test.com').to_dict()
httpretty.register_uri(
httpretty.GET,
AuthClient._build_url(
AuthClient.BASE_URL.format('http://localhost', 'v1'),
AuthClient.ENDPOINT),
body=json.dumps(user),
content_type='application/json', status=200)
user_result = self.client.get_user()
assert user == user_result.to_dict()
<commit_msg>Fix auth tests and add login test<commit_after> | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import json
import uuid
from unittest import TestCase
import httpretty
from faker import Faker
from polyaxon_schemas.authentication import CredentialsConfig
from polyaxon_schemas.user import UserConfig
from polyaxon_client.auth import AuthClient
faker = Faker()
class TestAuthClient(TestCase):
def setUp(self):
self.client = AuthClient(host='http://localhost', version='v1', token=faker.uuid4())
self.base_url = AuthClient.BASE_URL.format('http://localhost', 'v1')
@httpretty.activate
def test_get_user(self):
user = UserConfig('user', 'user@test.com').to_dict()
httpretty.register_uri(
httpretty.GET,
AuthClient._build_url(
self.base_url,
AuthClient.ENDPOINT),
body=json.dumps(user),
content_type='application/json', status=200)
user_result = self.client.get_user()
assert user == user_result.to_dict()
@httpretty.activate
def test_login(self):
token = uuid.uuid4().hex
httpretty.register_uri(
httpretty.POST,
AuthClient._build_url(
self.base_url,
AuthClient.ENDPOINT,
'token'
),
body=json.dumps({'token': token}),
content_type='application/json', status=200)
credentials = CredentialsConfig('user', 'password')
assert token == self.client.login(credentials=credentials)
| # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import json
from unittest import TestCase
import httpretty
from faker import Faker
from polyaxon_schemas.user import UserConfig
from polyaxon_client.auth import AuthClient
faker = Faker()
class TestAuthClient(TestCase):
def setUp(self):
self.client = AuthClient(host='http://localhost', version='v1', token=faker.uuid4())
@httpretty.activate
def test_get_user(self):
user = UserConfig('user', 'user@test.com').to_dict()
httpretty.register_uri(
httpretty.GET,
AuthClient._build_url(
AuthClient.BASE_URL.format('http://localhost', 'v1'),
AuthClient.ENDPOINT),
body=json.dumps(user),
content_type='application/json', status=200)
user_result = self.client.get_user()
assert user == user_result.to_dict()
Fix auth tests and add login test# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import json
import uuid
from unittest import TestCase
import httpretty
from faker import Faker
from polyaxon_schemas.authentication import CredentialsConfig
from polyaxon_schemas.user import UserConfig
from polyaxon_client.auth import AuthClient
faker = Faker()
class TestAuthClient(TestCase):
def setUp(self):
self.client = AuthClient(host='http://localhost', version='v1', token=faker.uuid4())
self.base_url = AuthClient.BASE_URL.format('http://localhost', 'v1')
@httpretty.activate
def test_get_user(self):
user = UserConfig('user', 'user@test.com').to_dict()
httpretty.register_uri(
httpretty.GET,
AuthClient._build_url(
self.base_url,
AuthClient.ENDPOINT),
body=json.dumps(user),
content_type='application/json', status=200)
user_result = self.client.get_user()
assert user == user_result.to_dict()
@httpretty.activate
def test_login(self):
token = uuid.uuid4().hex
httpretty.register_uri(
httpretty.POST,
AuthClient._build_url(
self.base_url,
AuthClient.ENDPOINT,
'token'
),
body=json.dumps({'token': token}),
content_type='application/json', status=200)
credentials = CredentialsConfig('user', 'password')
assert token == self.client.login(credentials=credentials)
| <commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import json
from unittest import TestCase
import httpretty
from faker import Faker
from polyaxon_schemas.user import UserConfig
from polyaxon_client.auth import AuthClient
faker = Faker()
class TestAuthClient(TestCase):
def setUp(self):
self.client = AuthClient(host='http://localhost', version='v1', token=faker.uuid4())
@httpretty.activate
def test_get_user(self):
user = UserConfig('user', 'user@test.com').to_dict()
httpretty.register_uri(
httpretty.GET,
AuthClient._build_url(
AuthClient.BASE_URL.format('http://localhost', 'v1'),
AuthClient.ENDPOINT),
body=json.dumps(user),
content_type='application/json', status=200)
user_result = self.client.get_user()
assert user == user_result.to_dict()
<commit_msg>Fix auth tests and add login test<commit_after># -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import json
import uuid
from unittest import TestCase
import httpretty
from faker import Faker
from polyaxon_schemas.authentication import CredentialsConfig
from polyaxon_schemas.user import UserConfig
from polyaxon_client.auth import AuthClient
faker = Faker()
class TestAuthClient(TestCase):
def setUp(self):
self.client = AuthClient(host='http://localhost', version='v1', token=faker.uuid4())
self.base_url = AuthClient.BASE_URL.format('http://localhost', 'v1')
@httpretty.activate
def test_get_user(self):
user = UserConfig('user', 'user@test.com').to_dict()
httpretty.register_uri(
httpretty.GET,
AuthClient._build_url(
self.base_url,
AuthClient.ENDPOINT),
body=json.dumps(user),
content_type='application/json', status=200)
user_result = self.client.get_user()
assert user == user_result.to_dict()
@httpretty.activate
def test_login(self):
token = uuid.uuid4().hex
httpretty.register_uri(
httpretty.POST,
AuthClient._build_url(
self.base_url,
AuthClient.ENDPOINT,
'token'
),
body=json.dumps({'token': token}),
content_type='application/json', status=200)
credentials = CredentialsConfig('user', 'password')
assert token == self.client.login(credentials=credentials)
|
89bec483ce88fb1a310d4dd06220ace412148257 | tests/test_auth.py | tests/test_auth.py | from __future__ import absolute_import
import random
import unittest
from .config import *
from tweepy import API, OAuthHandler
class TweepyAuthTests(unittest.TestCase):
def testoauth(self):
auth = OAuthHandler(oauth_consumer_key, oauth_consumer_secret)
# test getting access token
auth_url = auth.get_authorization_url()
print('Please authorize: ' + auth_url)
verifier = raw_input('PIN: ').strip()
self.assertTrue(len(verifier) > 0)
access_token = auth.get_access_token(verifier)
self.assertTrue(access_token is not None)
# build api object test using oauth
api = API(auth)
s = api.update_status('test %i' % random.randint(0, 1000))
api.destroy_status(s.id)
def testaccesstype(self):
auth = OAuthHandler(oauth_consumer_key, oauth_consumer_secret)
auth_url = auth.get_authorization_url(access_type='read')
print('Please open: ' + auth_url)
answer = raw_input('Did Twitter only request read permissions? (y/n) ')
self.assertEqual('y', answer.lower())
| from __future__ import absolute_import
import random
import unittest
from six.moves import input
from .config import *
from tweepy import API, OAuthHandler
class TweepyAuthTests(unittest.TestCase):
def testoauth(self):
auth = OAuthHandler(oauth_consumer_key, oauth_consumer_secret)
# test getting access token
auth_url = auth.get_authorization_url()
print('Please authorize: ' + auth_url)
verifier = input('PIN: ').strip()
self.assertTrue(len(verifier) > 0)
access_token = auth.get_access_token(verifier)
self.assertTrue(access_token is not None)
# build api object test using oauth
api = API(auth)
s = api.update_status('test %i' % random.randint(0, 1000))
api.destroy_status(s.id)
def testaccesstype(self):
auth = OAuthHandler(oauth_consumer_key, oauth_consumer_secret)
auth_url = auth.get_authorization_url(access_type='read')
print('Please open: ' + auth_url)
answer = input('Did Twitter only request read permissions? (y/n) ')
self.assertEqual('y', answer.lower())
| Update auth tests to be compatible with Python 3 | Update auth tests to be compatible with Python 3
| Python | mit | svven/tweepy,tweepy/tweepy | from __future__ import absolute_import
import random
import unittest
from .config import *
from tweepy import API, OAuthHandler
class TweepyAuthTests(unittest.TestCase):
def testoauth(self):
auth = OAuthHandler(oauth_consumer_key, oauth_consumer_secret)
# test getting access token
auth_url = auth.get_authorization_url()
print('Please authorize: ' + auth_url)
verifier = raw_input('PIN: ').strip()
self.assertTrue(len(verifier) > 0)
access_token = auth.get_access_token(verifier)
self.assertTrue(access_token is not None)
# build api object test using oauth
api = API(auth)
s = api.update_status('test %i' % random.randint(0, 1000))
api.destroy_status(s.id)
def testaccesstype(self):
auth = OAuthHandler(oauth_consumer_key, oauth_consumer_secret)
auth_url = auth.get_authorization_url(access_type='read')
print('Please open: ' + auth_url)
answer = raw_input('Did Twitter only request read permissions? (y/n) ')
self.assertEqual('y', answer.lower())
Update auth tests to be compatible with Python 3 | from __future__ import absolute_import
import random
import unittest
from six.moves import input
from .config import *
from tweepy import API, OAuthHandler
class TweepyAuthTests(unittest.TestCase):
def testoauth(self):
auth = OAuthHandler(oauth_consumer_key, oauth_consumer_secret)
# test getting access token
auth_url = auth.get_authorization_url()
print('Please authorize: ' + auth_url)
verifier = input('PIN: ').strip()
self.assertTrue(len(verifier) > 0)
access_token = auth.get_access_token(verifier)
self.assertTrue(access_token is not None)
# build api object test using oauth
api = API(auth)
s = api.update_status('test %i' % random.randint(0, 1000))
api.destroy_status(s.id)
def testaccesstype(self):
auth = OAuthHandler(oauth_consumer_key, oauth_consumer_secret)
auth_url = auth.get_authorization_url(access_type='read')
print('Please open: ' + auth_url)
answer = input('Did Twitter only request read permissions? (y/n) ')
self.assertEqual('y', answer.lower())
| <commit_before>from __future__ import absolute_import
import random
import unittest
from .config import *
from tweepy import API, OAuthHandler
class TweepyAuthTests(unittest.TestCase):
def testoauth(self):
auth = OAuthHandler(oauth_consumer_key, oauth_consumer_secret)
# test getting access token
auth_url = auth.get_authorization_url()
print('Please authorize: ' + auth_url)
verifier = raw_input('PIN: ').strip()
self.assertTrue(len(verifier) > 0)
access_token = auth.get_access_token(verifier)
self.assertTrue(access_token is not None)
# build api object test using oauth
api = API(auth)
s = api.update_status('test %i' % random.randint(0, 1000))
api.destroy_status(s.id)
def testaccesstype(self):
auth = OAuthHandler(oauth_consumer_key, oauth_consumer_secret)
auth_url = auth.get_authorization_url(access_type='read')
print('Please open: ' + auth_url)
answer = raw_input('Did Twitter only request read permissions? (y/n) ')
self.assertEqual('y', answer.lower())
<commit_msg>Update auth tests to be compatible with Python 3<commit_after> | from __future__ import absolute_import
import random
import unittest
from six.moves import input
from .config import *
from tweepy import API, OAuthHandler
class TweepyAuthTests(unittest.TestCase):
def testoauth(self):
auth = OAuthHandler(oauth_consumer_key, oauth_consumer_secret)
# test getting access token
auth_url = auth.get_authorization_url()
print('Please authorize: ' + auth_url)
verifier = input('PIN: ').strip()
self.assertTrue(len(verifier) > 0)
access_token = auth.get_access_token(verifier)
self.assertTrue(access_token is not None)
# build api object test using oauth
api = API(auth)
s = api.update_status('test %i' % random.randint(0, 1000))
api.destroy_status(s.id)
def testaccesstype(self):
auth = OAuthHandler(oauth_consumer_key, oauth_consumer_secret)
auth_url = auth.get_authorization_url(access_type='read')
print('Please open: ' + auth_url)
answer = input('Did Twitter only request read permissions? (y/n) ')
self.assertEqual('y', answer.lower())
| from __future__ import absolute_import
import random
import unittest
from .config import *
from tweepy import API, OAuthHandler
class TweepyAuthTests(unittest.TestCase):
def testoauth(self):
auth = OAuthHandler(oauth_consumer_key, oauth_consumer_secret)
# test getting access token
auth_url = auth.get_authorization_url()
print('Please authorize: ' + auth_url)
verifier = raw_input('PIN: ').strip()
self.assertTrue(len(verifier) > 0)
access_token = auth.get_access_token(verifier)
self.assertTrue(access_token is not None)
# build api object test using oauth
api = API(auth)
s = api.update_status('test %i' % random.randint(0, 1000))
api.destroy_status(s.id)
def testaccesstype(self):
auth = OAuthHandler(oauth_consumer_key, oauth_consumer_secret)
auth_url = auth.get_authorization_url(access_type='read')
print('Please open: ' + auth_url)
answer = raw_input('Did Twitter only request read permissions? (y/n) ')
self.assertEqual('y', answer.lower())
Update auth tests to be compatible with Python 3from __future__ import absolute_import
import random
import unittest
from six.moves import input
from .config import *
from tweepy import API, OAuthHandler
class TweepyAuthTests(unittest.TestCase):
def testoauth(self):
auth = OAuthHandler(oauth_consumer_key, oauth_consumer_secret)
# test getting access token
auth_url = auth.get_authorization_url()
print('Please authorize: ' + auth_url)
verifier = input('PIN: ').strip()
self.assertTrue(len(verifier) > 0)
access_token = auth.get_access_token(verifier)
self.assertTrue(access_token is not None)
# build api object test using oauth
api = API(auth)
s = api.update_status('test %i' % random.randint(0, 1000))
api.destroy_status(s.id)
def testaccesstype(self):
auth = OAuthHandler(oauth_consumer_key, oauth_consumer_secret)
auth_url = auth.get_authorization_url(access_type='read')
print('Please open: ' + auth_url)
answer = input('Did Twitter only request read permissions? (y/n) ')
self.assertEqual('y', answer.lower())
| <commit_before>from __future__ import absolute_import
import random
import unittest
from .config import *
from tweepy import API, OAuthHandler
class TweepyAuthTests(unittest.TestCase):
def testoauth(self):
auth = OAuthHandler(oauth_consumer_key, oauth_consumer_secret)
# test getting access token
auth_url = auth.get_authorization_url()
print('Please authorize: ' + auth_url)
verifier = raw_input('PIN: ').strip()
self.assertTrue(len(verifier) > 0)
access_token = auth.get_access_token(verifier)
self.assertTrue(access_token is not None)
# build api object test using oauth
api = API(auth)
s = api.update_status('test %i' % random.randint(0, 1000))
api.destroy_status(s.id)
def testaccesstype(self):
auth = OAuthHandler(oauth_consumer_key, oauth_consumer_secret)
auth_url = auth.get_authorization_url(access_type='read')
print('Please open: ' + auth_url)
answer = raw_input('Did Twitter only request read permissions? (y/n) ')
self.assertEqual('y', answer.lower())
<commit_msg>Update auth tests to be compatible with Python 3<commit_after>from __future__ import absolute_import
import random
import unittest
from six.moves import input
from .config import *
from tweepy import API, OAuthHandler
class TweepyAuthTests(unittest.TestCase):
def testoauth(self):
auth = OAuthHandler(oauth_consumer_key, oauth_consumer_secret)
# test getting access token
auth_url = auth.get_authorization_url()
print('Please authorize: ' + auth_url)
verifier = input('PIN: ').strip()
self.assertTrue(len(verifier) > 0)
access_token = auth.get_access_token(verifier)
self.assertTrue(access_token is not None)
# build api object test using oauth
api = API(auth)
s = api.update_status('test %i' % random.randint(0, 1000))
api.destroy_status(s.id)
def testaccesstype(self):
auth = OAuthHandler(oauth_consumer_key, oauth_consumer_secret)
auth_url = auth.get_authorization_url(access_type='read')
print('Please open: ' + auth_url)
answer = input('Did Twitter only request read permissions? (y/n) ')
self.assertEqual('y', answer.lower())
|
d604128015826444be4585c7204030840e9efc88 | tests/test_java.py | tests/test_java.py | def test_java_exists(Command):
version_result = Command("java -version")
assert version_result.rc == 0
| def test_java_exists(Command):
version_result = Command("java -version")
assert version_result.rc == 0
def test_java_certs_exist(File):
assert File("/etc/ssl/certs/java/cacerts").exists
| Add test to make sure SSL certs are installed. | Add test to make sure SSL certs are installed.
| Python | apache-2.0 | azavea/ansible-java,flibbertigibbet/ansible-java | def test_java_exists(Command):
version_result = Command("java -version")
assert version_result.rc == 0
Add test to make sure SSL certs are installed. | def test_java_exists(Command):
version_result = Command("java -version")
assert version_result.rc == 0
def test_java_certs_exist(File):
assert File("/etc/ssl/certs/java/cacerts").exists
| <commit_before>def test_java_exists(Command):
version_result = Command("java -version")
assert version_result.rc == 0
<commit_msg>Add test to make sure SSL certs are installed.<commit_after> | def test_java_exists(Command):
version_result = Command("java -version")
assert version_result.rc == 0
def test_java_certs_exist(File):
assert File("/etc/ssl/certs/java/cacerts").exists
| def test_java_exists(Command):
version_result = Command("java -version")
assert version_result.rc == 0
Add test to make sure SSL certs are installed.def test_java_exists(Command):
version_result = Command("java -version")
assert version_result.rc == 0
def test_java_certs_exist(File):
assert File("/etc/ssl/certs/java/cacerts").exists
| <commit_before>def test_java_exists(Command):
version_result = Command("java -version")
assert version_result.rc == 0
<commit_msg>Add test to make sure SSL certs are installed.<commit_after>def test_java_exists(Command):
version_result = Command("java -version")
assert version_result.rc == 0
def test_java_certs_exist(File):
assert File("/etc/ssl/certs/java/cacerts").exists
|
1db05fb528295456e2127be5ba5225d697655676 | metashare/accounts/urls.py | metashare/accounts/urls.py | from django.conf.urls.defaults import patterns
from metashare.settings import DJANGO_BASE
urlpatterns = patterns('metashare.accounts.views',
(r'create/$',
'create'),
(r'confirm/(?P<uuid>[0-9a-f]{32})/$',
'confirm'),
(r'contact/$',
'contact'),
(r'reset/(?:(?P<uuid>[0-9a-f]{32})/)?$',
'reset'),
(r'profile/$',
'edit_profile'),
(r'editor_group_application/$',
'editor_group_application'),
(r'organization_application/$',
'organization_application'),
(r'add_default_editor_groups/$',
'add_default_editor_groups'),
(r'remove_default_editor_groups/$',
'remove_default_editor_groups'),
)
urlpatterns += patterns('django.contrib.auth.views',
(r'^profile/change_password/$', 'password_change',
{'post_change_redirect' : '/{0}accounts/profile/change_password/done/'.format(DJANGO_BASE), 'template_name': 'accounts/change_password.html'}),
(r'^profile/change_password/done/$', 'password_change_done',
{'template_name': 'accounts/change_password_done.html'}),
)
| from django.conf.urls.defaults import patterns
from metashare.settings import DJANGO_BASE
urlpatterns = patterns('metashare.accounts.views',
(r'create/$',
'create'),
(r'confirm/(?P<uuid>[0-9a-f]{32})/$',
'confirm'),
(r'contact/$',
'contact'),
(r'reset/(?:(?P<uuid>[0-9a-f]{32})/)?$',
'reset'),
(r'profile/$',
'edit_profile'),
(r'editor_group_application/$',
'editor_group_application'),
(r'organization_application/$',
'organization_application'),
(r'update_default_editor_groups/$',
'update_default_editor_groups'),
)
urlpatterns += patterns('django.contrib.auth.views',
(r'^profile/change_password/$', 'password_change',
{'post_change_redirect' : '/{0}accounts/profile/change_password/done/'.format(DJANGO_BASE), 'template_name': 'accounts/change_password.html'}),
(r'^profile/change_password/done/$', 'password_change_done',
{'template_name': 'accounts/change_password_done.html'}),
)
| Manage default editor group on a single page | Manage default editor group on a single page
| Python | bsd-3-clause | zeehio/META-SHARE,MiltosD/CEF-ELRC,zeehio/META-SHARE,MiltosD/CEFELRC,MiltosD/CEF-ELRC,zeehio/META-SHARE,MiltosD/CEFELRC,MiltosD/CEF-ELRC,MiltosD/CEF-ELRC,JuliBakagianni/CEF-ELRC,MiltosD/CEFELRC,MiltosD/CEFELRC,zeehio/META-SHARE,JuliBakagianni/META-SHARE,JuliBakagianni/CEF-ELRC,JuliBakagianni/META-SHARE,JuliBakagianni/CEF-ELRC,zeehio/META-SHARE,MiltosD/CEF-ELRC,JuliBakagianni/META-SHARE,JuliBakagianni/CEF-ELRC,zeehio/META-SHARE,JuliBakagianni/META-SHARE,JuliBakagianni/CEF-ELRC,MiltosD/CEFELRC,zeehio/META-SHARE,JuliBakagianni/CEF-ELRC,JuliBakagianni/META-SHARE,MiltosD/CEFELRC,JuliBakagianni/CEF-ELRC,JuliBakagianni/META-SHARE,JuliBakagianni/META-SHARE,MiltosD/CEF-ELRC,MiltosD/CEF-ELRC,MiltosD/CEFELRC | from django.conf.urls.defaults import patterns
from metashare.settings import DJANGO_BASE
urlpatterns = patterns('metashare.accounts.views',
(r'create/$',
'create'),
(r'confirm/(?P<uuid>[0-9a-f]{32})/$',
'confirm'),
(r'contact/$',
'contact'),
(r'reset/(?:(?P<uuid>[0-9a-f]{32})/)?$',
'reset'),
(r'profile/$',
'edit_profile'),
(r'editor_group_application/$',
'editor_group_application'),
(r'organization_application/$',
'organization_application'),
(r'add_default_editor_groups/$',
'add_default_editor_groups'),
(r'remove_default_editor_groups/$',
'remove_default_editor_groups'),
)
urlpatterns += patterns('django.contrib.auth.views',
(r'^profile/change_password/$', 'password_change',
{'post_change_redirect' : '/{0}accounts/profile/change_password/done/'.format(DJANGO_BASE), 'template_name': 'accounts/change_password.html'}),
(r'^profile/change_password/done/$', 'password_change_done',
{'template_name': 'accounts/change_password_done.html'}),
)
Manage default editor group on a single page | from django.conf.urls.defaults import patterns
from metashare.settings import DJANGO_BASE
urlpatterns = patterns('metashare.accounts.views',
(r'create/$',
'create'),
(r'confirm/(?P<uuid>[0-9a-f]{32})/$',
'confirm'),
(r'contact/$',
'contact'),
(r'reset/(?:(?P<uuid>[0-9a-f]{32})/)?$',
'reset'),
(r'profile/$',
'edit_profile'),
(r'editor_group_application/$',
'editor_group_application'),
(r'organization_application/$',
'organization_application'),
(r'update_default_editor_groups/$',
'update_default_editor_groups'),
)
urlpatterns += patterns('django.contrib.auth.views',
(r'^profile/change_password/$', 'password_change',
{'post_change_redirect' : '/{0}accounts/profile/change_password/done/'.format(DJANGO_BASE), 'template_name': 'accounts/change_password.html'}),
(r'^profile/change_password/done/$', 'password_change_done',
{'template_name': 'accounts/change_password_done.html'}),
)
| <commit_before>from django.conf.urls.defaults import patterns
from metashare.settings import DJANGO_BASE
urlpatterns = patterns('metashare.accounts.views',
(r'create/$',
'create'),
(r'confirm/(?P<uuid>[0-9a-f]{32})/$',
'confirm'),
(r'contact/$',
'contact'),
(r'reset/(?:(?P<uuid>[0-9a-f]{32})/)?$',
'reset'),
(r'profile/$',
'edit_profile'),
(r'editor_group_application/$',
'editor_group_application'),
(r'organization_application/$',
'organization_application'),
(r'add_default_editor_groups/$',
'add_default_editor_groups'),
(r'remove_default_editor_groups/$',
'remove_default_editor_groups'),
)
urlpatterns += patterns('django.contrib.auth.views',
(r'^profile/change_password/$', 'password_change',
{'post_change_redirect' : '/{0}accounts/profile/change_password/done/'.format(DJANGO_BASE), 'template_name': 'accounts/change_password.html'}),
(r'^profile/change_password/done/$', 'password_change_done',
{'template_name': 'accounts/change_password_done.html'}),
)
<commit_msg>Manage default editor group on a single page<commit_after> | from django.conf.urls.defaults import patterns
from metashare.settings import DJANGO_BASE
urlpatterns = patterns('metashare.accounts.views',
(r'create/$',
'create'),
(r'confirm/(?P<uuid>[0-9a-f]{32})/$',
'confirm'),
(r'contact/$',
'contact'),
(r'reset/(?:(?P<uuid>[0-9a-f]{32})/)?$',
'reset'),
(r'profile/$',
'edit_profile'),
(r'editor_group_application/$',
'editor_group_application'),
(r'organization_application/$',
'organization_application'),
(r'update_default_editor_groups/$',
'update_default_editor_groups'),
)
urlpatterns += patterns('django.contrib.auth.views',
(r'^profile/change_password/$', 'password_change',
{'post_change_redirect' : '/{0}accounts/profile/change_password/done/'.format(DJANGO_BASE), 'template_name': 'accounts/change_password.html'}),
(r'^profile/change_password/done/$', 'password_change_done',
{'template_name': 'accounts/change_password_done.html'}),
)
| from django.conf.urls.defaults import patterns
from metashare.settings import DJANGO_BASE
urlpatterns = patterns('metashare.accounts.views',
(r'create/$',
'create'),
(r'confirm/(?P<uuid>[0-9a-f]{32})/$',
'confirm'),
(r'contact/$',
'contact'),
(r'reset/(?:(?P<uuid>[0-9a-f]{32})/)?$',
'reset'),
(r'profile/$',
'edit_profile'),
(r'editor_group_application/$',
'editor_group_application'),
(r'organization_application/$',
'organization_application'),
(r'add_default_editor_groups/$',
'add_default_editor_groups'),
(r'remove_default_editor_groups/$',
'remove_default_editor_groups'),
)
urlpatterns += patterns('django.contrib.auth.views',
(r'^profile/change_password/$', 'password_change',
{'post_change_redirect' : '/{0}accounts/profile/change_password/done/'.format(DJANGO_BASE), 'template_name': 'accounts/change_password.html'}),
(r'^profile/change_password/done/$', 'password_change_done',
{'template_name': 'accounts/change_password_done.html'}),
)
Manage default editor group on a single pagefrom django.conf.urls.defaults import patterns
from metashare.settings import DJANGO_BASE
urlpatterns = patterns('metashare.accounts.views',
(r'create/$',
'create'),
(r'confirm/(?P<uuid>[0-9a-f]{32})/$',
'confirm'),
(r'contact/$',
'contact'),
(r'reset/(?:(?P<uuid>[0-9a-f]{32})/)?$',
'reset'),
(r'profile/$',
'edit_profile'),
(r'editor_group_application/$',
'editor_group_application'),
(r'organization_application/$',
'organization_application'),
(r'update_default_editor_groups/$',
'update_default_editor_groups'),
)
urlpatterns += patterns('django.contrib.auth.views',
(r'^profile/change_password/$', 'password_change',
{'post_change_redirect' : '/{0}accounts/profile/change_password/done/'.format(DJANGO_BASE), 'template_name': 'accounts/change_password.html'}),
(r'^profile/change_password/done/$', 'password_change_done',
{'template_name': 'accounts/change_password_done.html'}),
)
| <commit_before>from django.conf.urls.defaults import patterns
from metashare.settings import DJANGO_BASE
urlpatterns = patterns('metashare.accounts.views',
(r'create/$',
'create'),
(r'confirm/(?P<uuid>[0-9a-f]{32})/$',
'confirm'),
(r'contact/$',
'contact'),
(r'reset/(?:(?P<uuid>[0-9a-f]{32})/)?$',
'reset'),
(r'profile/$',
'edit_profile'),
(r'editor_group_application/$',
'editor_group_application'),
(r'organization_application/$',
'organization_application'),
(r'add_default_editor_groups/$',
'add_default_editor_groups'),
(r'remove_default_editor_groups/$',
'remove_default_editor_groups'),
)
urlpatterns += patterns('django.contrib.auth.views',
(r'^profile/change_password/$', 'password_change',
{'post_change_redirect' : '/{0}accounts/profile/change_password/done/'.format(DJANGO_BASE), 'template_name': 'accounts/change_password.html'}),
(r'^profile/change_password/done/$', 'password_change_done',
{'template_name': 'accounts/change_password_done.html'}),
)
<commit_msg>Manage default editor group on a single page<commit_after>from django.conf.urls.defaults import patterns
from metashare.settings import DJANGO_BASE
urlpatterns = patterns('metashare.accounts.views',
(r'create/$',
'create'),
(r'confirm/(?P<uuid>[0-9a-f]{32})/$',
'confirm'),
(r'contact/$',
'contact'),
(r'reset/(?:(?P<uuid>[0-9a-f]{32})/)?$',
'reset'),
(r'profile/$',
'edit_profile'),
(r'editor_group_application/$',
'editor_group_application'),
(r'organization_application/$',
'organization_application'),
(r'update_default_editor_groups/$',
'update_default_editor_groups'),
)
urlpatterns += patterns('django.contrib.auth.views',
(r'^profile/change_password/$', 'password_change',
{'post_change_redirect' : '/{0}accounts/profile/change_password/done/'.format(DJANGO_BASE), 'template_name': 'accounts/change_password.html'}),
(r'^profile/change_password/done/$', 'password_change_done',
{'template_name': 'accounts/change_password_done.html'}),
)
|
29978337158d06c6c761294fd1e3c5c54de847ae | src/webassets/filter/uglifyjs.py | src/webassets/filter/uglifyjs.py | """Minify Javascript using `UglifyJS <https://github.com/mishoo/UglifyJS/>`_.
UglifyJS is an external tool written for NodeJS; this filter assumes that
the ``uglifyjs`` executable is in the path. Otherwise, you may define
a ``UGLIFYJS_BIN`` setting. Additional options may be passed to ``uglifyjs``
by setting ``UGLIFYJS_EXTRA_ARGS``, which expects a list of strings.
"""
import subprocess
from webassets.exceptions import FilterError
from webassets.filter import Filter
__all__ = ('UglifySFilter',)
class UglifySFilter(Filter):
name = 'uglifyjs'
def setup(self):
self.binary = self.get_config(
'UGLIFYJS_BIN', require=False) or 'uglifyjs'
self.extra_args = self.get_config('UGLIFYJS_EXTRA_ARGS',
require=False)
def output(self, _in, out, **kw):
args = [self.binary]
if self.extra_args:
args.extend(self.extra_args)
proc = subprocess.Popen(
args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate(_in.read())
if proc.returncode != 0:
raise FilterError(('uglifyjs: subprocess had error: stderr=%s, '+
'stdout=%s, returncode=%s') % (
stderr, stdout, proc.returncode))
out.write(stdout)
| """Minify Javascript using `UglifyJS <https://github.com/mishoo/UglifyJS/>`_.
UglifyJS is an external tool written for NodeJS; this filter assumes that
the ``uglifyjs`` executable is in the path. Otherwise, you may define
a ``UGLIFYJS_BIN`` setting. Additional options may be passed to ``uglifyjs``
by setting ``UGLIFYJS_EXTRA_ARGS``, which expects a list of strings.
"""
import subprocess
from webassets.exceptions import FilterError
from webassets.filter import Filter
__all__ = ('UglifyJSFilter',)
class UglifyJSFilter(Filter):
name = 'uglifyjs'
def setup(self):
self.binary = self.get_config(
'UGLIFYJS_BIN', require=False) or 'uglifyjs'
self.extra_args = self.get_config('UGLIFYJS_EXTRA_ARGS',
require=False)
def output(self, _in, out, **kw):
args = [self.binary]
if self.extra_args:
args.extend(self.extra_args)
proc = subprocess.Popen(
args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate(_in.read())
if proc.returncode != 0:
raise FilterError(('uglifyjs: subprocess had error: stderr=%s, '+
'stdout=%s, returncode=%s') % (
stderr, stdout, proc.returncode))
out.write(stdout)
| Fix misspelled UglifyJS filter class name | Fix misspelled UglifyJS filter class name
| Python | bsd-2-clause | scorphus/webassets,heynemann/webassets,glorpen/webassets,john2x/webassets,JDeuce/webassets,wijerasa/webassets,glorpen/webassets,0x1997/webassets,aconrad/webassets,JDeuce/webassets,aconrad/webassets,john2x/webassets,heynemann/webassets,florianjacob/webassets,wijerasa/webassets,glorpen/webassets,aconrad/webassets,heynemann/webassets,scorphus/webassets,0x1997/webassets,florianjacob/webassets | """Minify Javascript using `UglifyJS <https://github.com/mishoo/UglifyJS/>`_.
UglifyJS is an external tool written for NodeJS; this filter assumes that
the ``uglifyjs`` executable is in the path. Otherwise, you may define
a ``UGLIFYJS_BIN`` setting. Additional options may be passed to ``uglifyjs``
by setting ``UGLIFYJS_EXTRA_ARGS``, which expects a list of strings.
"""
import subprocess
from webassets.exceptions import FilterError
from webassets.filter import Filter
__all__ = ('UglifySFilter',)
class UglifySFilter(Filter):
name = 'uglifyjs'
def setup(self):
self.binary = self.get_config(
'UGLIFYJS_BIN', require=False) or 'uglifyjs'
self.extra_args = self.get_config('UGLIFYJS_EXTRA_ARGS',
require=False)
def output(self, _in, out, **kw):
args = [self.binary]
if self.extra_args:
args.extend(self.extra_args)
proc = subprocess.Popen(
args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate(_in.read())
if proc.returncode != 0:
raise FilterError(('uglifyjs: subprocess had error: stderr=%s, '+
'stdout=%s, returncode=%s') % (
stderr, stdout, proc.returncode))
out.write(stdout)
Fix misspelled UglifyJS filter class name | """Minify Javascript using `UglifyJS <https://github.com/mishoo/UglifyJS/>`_.
UglifyJS is an external tool written for NodeJS; this filter assumes that
the ``uglifyjs`` executable is in the path. Otherwise, you may define
a ``UGLIFYJS_BIN`` setting. Additional options may be passed to ``uglifyjs``
by setting ``UGLIFYJS_EXTRA_ARGS``, which expects a list of strings.
"""
import subprocess
from webassets.exceptions import FilterError
from webassets.filter import Filter
__all__ = ('UglifyJSFilter',)
class UglifyJSFilter(Filter):
name = 'uglifyjs'
def setup(self):
self.binary = self.get_config(
'UGLIFYJS_BIN', require=False) or 'uglifyjs'
self.extra_args = self.get_config('UGLIFYJS_EXTRA_ARGS',
require=False)
def output(self, _in, out, **kw):
args = [self.binary]
if self.extra_args:
args.extend(self.extra_args)
proc = subprocess.Popen(
args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate(_in.read())
if proc.returncode != 0:
raise FilterError(('uglifyjs: subprocess had error: stderr=%s, '+
'stdout=%s, returncode=%s') % (
stderr, stdout, proc.returncode))
out.write(stdout)
| <commit_before>"""Minify Javascript using `UglifyJS <https://github.com/mishoo/UglifyJS/>`_.
UglifyJS is an external tool written for NodeJS; this filter assumes that
the ``uglifyjs`` executable is in the path. Otherwise, you may define
a ``UGLIFYJS_BIN`` setting. Additional options may be passed to ``uglifyjs``
by setting ``UGLIFYJS_EXTRA_ARGS``, which expects a list of strings.
"""
import subprocess
from webassets.exceptions import FilterError
from webassets.filter import Filter
__all__ = ('UglifySFilter',)
class UglifySFilter(Filter):
name = 'uglifyjs'
def setup(self):
self.binary = self.get_config(
'UGLIFYJS_BIN', require=False) or 'uglifyjs'
self.extra_args = self.get_config('UGLIFYJS_EXTRA_ARGS',
require=False)
def output(self, _in, out, **kw):
args = [self.binary]
if self.extra_args:
args.extend(self.extra_args)
proc = subprocess.Popen(
args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate(_in.read())
if proc.returncode != 0:
raise FilterError(('uglifyjs: subprocess had error: stderr=%s, '+
'stdout=%s, returncode=%s') % (
stderr, stdout, proc.returncode))
out.write(stdout)
<commit_msg>Fix misspelled UglifyJS filter class name<commit_after> | """Minify Javascript using `UglifyJS <https://github.com/mishoo/UglifyJS/>`_.
UglifyJS is an external tool written for NodeJS; this filter assumes that
the ``uglifyjs`` executable is in the path. Otherwise, you may define
a ``UGLIFYJS_BIN`` setting. Additional options may be passed to ``uglifyjs``
by setting ``UGLIFYJS_EXTRA_ARGS``, which expects a list of strings.
"""
import subprocess
from webassets.exceptions import FilterError
from webassets.filter import Filter
__all__ = ('UglifyJSFilter',)
class UglifyJSFilter(Filter):
name = 'uglifyjs'
def setup(self):
self.binary = self.get_config(
'UGLIFYJS_BIN', require=False) or 'uglifyjs'
self.extra_args = self.get_config('UGLIFYJS_EXTRA_ARGS',
require=False)
def output(self, _in, out, **kw):
args = [self.binary]
if self.extra_args:
args.extend(self.extra_args)
proc = subprocess.Popen(
args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate(_in.read())
if proc.returncode != 0:
raise FilterError(('uglifyjs: subprocess had error: stderr=%s, '+
'stdout=%s, returncode=%s') % (
stderr, stdout, proc.returncode))
out.write(stdout)
| """Minify Javascript using `UglifyJS <https://github.com/mishoo/UglifyJS/>`_.
UglifyJS is an external tool written for NodeJS; this filter assumes that
the ``uglifyjs`` executable is in the path. Otherwise, you may define
a ``UGLIFYJS_BIN`` setting. Additional options may be passed to ``uglifyjs``
by setting ``UGLIFYJS_EXTRA_ARGS``, which expects a list of strings.
"""
import subprocess
from webassets.exceptions import FilterError
from webassets.filter import Filter
__all__ = ('UglifySFilter',)
class UglifySFilter(Filter):
name = 'uglifyjs'
def setup(self):
self.binary = self.get_config(
'UGLIFYJS_BIN', require=False) or 'uglifyjs'
self.extra_args = self.get_config('UGLIFYJS_EXTRA_ARGS',
require=False)
def output(self, _in, out, **kw):
args = [self.binary]
if self.extra_args:
args.extend(self.extra_args)
proc = subprocess.Popen(
args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate(_in.read())
if proc.returncode != 0:
raise FilterError(('uglifyjs: subprocess had error: stderr=%s, '+
'stdout=%s, returncode=%s') % (
stderr, stdout, proc.returncode))
out.write(stdout)
Fix misspelled UglifyJS filter class name"""Minify Javascript using `UglifyJS <https://github.com/mishoo/UglifyJS/>`_.
UglifyJS is an external tool written for NodeJS; this filter assumes that
the ``uglifyjs`` executable is in the path. Otherwise, you may define
a ``UGLIFYJS_BIN`` setting. Additional options may be passed to ``uglifyjs``
by setting ``UGLIFYJS_EXTRA_ARGS``, which expects a list of strings.
"""
import subprocess
from webassets.exceptions import FilterError
from webassets.filter import Filter
__all__ = ('UglifyJSFilter',)
class UglifyJSFilter(Filter):
name = 'uglifyjs'
def setup(self):
self.binary = self.get_config(
'UGLIFYJS_BIN', require=False) or 'uglifyjs'
self.extra_args = self.get_config('UGLIFYJS_EXTRA_ARGS',
require=False)
def output(self, _in, out, **kw):
args = [self.binary]
if self.extra_args:
args.extend(self.extra_args)
proc = subprocess.Popen(
args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate(_in.read())
if proc.returncode != 0:
raise FilterError(('uglifyjs: subprocess had error: stderr=%s, '+
'stdout=%s, returncode=%s') % (
stderr, stdout, proc.returncode))
out.write(stdout)
| <commit_before>"""Minify Javascript using `UglifyJS <https://github.com/mishoo/UglifyJS/>`_.
UglifyJS is an external tool written for NodeJS; this filter assumes that
the ``uglifyjs`` executable is in the path. Otherwise, you may define
a ``UGLIFYJS_BIN`` setting. Additional options may be passed to ``uglifyjs``
by setting ``UGLIFYJS_EXTRA_ARGS``, which expects a list of strings.
"""
import subprocess
from webassets.exceptions import FilterError
from webassets.filter import Filter
__all__ = ('UglifySFilter',)
class UglifySFilter(Filter):
name = 'uglifyjs'
def setup(self):
self.binary = self.get_config(
'UGLIFYJS_BIN', require=False) or 'uglifyjs'
self.extra_args = self.get_config('UGLIFYJS_EXTRA_ARGS',
require=False)
def output(self, _in, out, **kw):
args = [self.binary]
if self.extra_args:
args.extend(self.extra_args)
proc = subprocess.Popen(
args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate(_in.read())
if proc.returncode != 0:
raise FilterError(('uglifyjs: subprocess had error: stderr=%s, '+
'stdout=%s, returncode=%s') % (
stderr, stdout, proc.returncode))
out.write(stdout)
<commit_msg>Fix misspelled UglifyJS filter class name<commit_after>"""Minify Javascript using `UglifyJS <https://github.com/mishoo/UglifyJS/>`_.
UglifyJS is an external tool written for NodeJS; this filter assumes that
the ``uglifyjs`` executable is in the path. Otherwise, you may define
a ``UGLIFYJS_BIN`` setting. Additional options may be passed to ``uglifyjs``
by setting ``UGLIFYJS_EXTRA_ARGS``, which expects a list of strings.
"""
import subprocess
from webassets.exceptions import FilterError
from webassets.filter import Filter
__all__ = ('UglifyJSFilter',)
class UglifyJSFilter(Filter):
name = 'uglifyjs'
def setup(self):
self.binary = self.get_config(
'UGLIFYJS_BIN', require=False) or 'uglifyjs'
self.extra_args = self.get_config('UGLIFYJS_EXTRA_ARGS',
require=False)
def output(self, _in, out, **kw):
args = [self.binary]
if self.extra_args:
args.extend(self.extra_args)
proc = subprocess.Popen(
args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate(_in.read())
if proc.returncode != 0:
raise FilterError(('uglifyjs: subprocess had error: stderr=%s, '+
'stdout=%s, returncode=%s') % (
stderr, stdout, proc.returncode))
out.write(stdout)
|
722228a023aca35660bc493b812727f6c665b3cb | posts.py | posts.py | import json
import pprint
import requests
def sample_valid_reddit_response():
r = requests.get('http://www.reddit.com/r/cscareerquestions/top.json')
response_json = r.json()
if 'data' not in response_json:
print("Trying again")
response_json = sample_valid_reddit_response()
return response_json
def save_sample():
response_json = sample_valid_reddit_response()
del response_json['data']['children']
with open('sample_response.json', 'w+') as f:
json.dump(response_json, f, indent=5)
def get_next_reddit_response():
response = {}
with open('sample_response.json', 'r') as f:
response = json.load(f)
after = response['data']['after']
print(after)
if '__main__' == __name__:
get_next_reddit_response()
| import json
import pprint
import requests
SAMPLE_REDDIT_URL = 'http://www.reddit.com/r/cscareerquestions/top.json'
def sample_valid_reddit_response():
r = requests.get(SAMPLE_REDDIT_URL)
response_json = r.json()
if 'data' not in response_json:
print("Trying again")
response_json = sample_valid_reddit_response()
return response_json
def save_sample():
response_json = sample_valid_reddit_response()
del response_json['data']['children']
with open('sample_response.json', 'w+') as f:
json.dump(response_json, f, indent=5)
def get_next_reddit_response():
response = {}
with open('sample_response.json', 'r') as f:
response = json.load(f)
after = response['data']['after']
print(after)
if '__main__' == __name__:
get_next_reddit_response()
| Make reddit url a constant | Make reddit url a constant
| Python | mit | RossCarriga/repost-data | import json
import pprint
import requests
def sample_valid_reddit_response():
r = requests.get('http://www.reddit.com/r/cscareerquestions/top.json')
response_json = r.json()
if 'data' not in response_json:
print("Trying again")
response_json = sample_valid_reddit_response()
return response_json
def save_sample():
response_json = sample_valid_reddit_response()
del response_json['data']['children']
with open('sample_response.json', 'w+') as f:
json.dump(response_json, f, indent=5)
def get_next_reddit_response():
response = {}
with open('sample_response.json', 'r') as f:
response = json.load(f)
after = response['data']['after']
print(after)
if '__main__' == __name__:
get_next_reddit_response()
Make reddit url a constant | import json
import pprint
import requests
SAMPLE_REDDIT_URL = 'http://www.reddit.com/r/cscareerquestions/top.json'
def sample_valid_reddit_response():
r = requests.get(SAMPLE_REDDIT_URL)
response_json = r.json()
if 'data' not in response_json:
print("Trying again")
response_json = sample_valid_reddit_response()
return response_json
def save_sample():
response_json = sample_valid_reddit_response()
del response_json['data']['children']
with open('sample_response.json', 'w+') as f:
json.dump(response_json, f, indent=5)
def get_next_reddit_response():
response = {}
with open('sample_response.json', 'r') as f:
response = json.load(f)
after = response['data']['after']
print(after)
if '__main__' == __name__:
get_next_reddit_response()
| <commit_before>import json
import pprint
import requests
def sample_valid_reddit_response():
r = requests.get('http://www.reddit.com/r/cscareerquestions/top.json')
response_json = r.json()
if 'data' not in response_json:
print("Trying again")
response_json = sample_valid_reddit_response()
return response_json
def save_sample():
response_json = sample_valid_reddit_response()
del response_json['data']['children']
with open('sample_response.json', 'w+') as f:
json.dump(response_json, f, indent=5)
def get_next_reddit_response():
response = {}
with open('sample_response.json', 'r') as f:
response = json.load(f)
after = response['data']['after']
print(after)
if '__main__' == __name__:
get_next_reddit_response()
<commit_msg>Make reddit url a constant<commit_after> | import json
import pprint
import requests
SAMPLE_REDDIT_URL = 'http://www.reddit.com/r/cscareerquestions/top.json'
def sample_valid_reddit_response():
r = requests.get(SAMPLE_REDDIT_URL)
response_json = r.json()
if 'data' not in response_json:
print("Trying again")
response_json = sample_valid_reddit_response()
return response_json
def save_sample():
response_json = sample_valid_reddit_response()
del response_json['data']['children']
with open('sample_response.json', 'w+') as f:
json.dump(response_json, f, indent=5)
def get_next_reddit_response():
response = {}
with open('sample_response.json', 'r') as f:
response = json.load(f)
after = response['data']['after']
print(after)
if '__main__' == __name__:
get_next_reddit_response()
| import json
import pprint
import requests
def sample_valid_reddit_response():
r = requests.get('http://www.reddit.com/r/cscareerquestions/top.json')
response_json = r.json()
if 'data' not in response_json:
print("Trying again")
response_json = sample_valid_reddit_response()
return response_json
def save_sample():
response_json = sample_valid_reddit_response()
del response_json['data']['children']
with open('sample_response.json', 'w+') as f:
json.dump(response_json, f, indent=5)
def get_next_reddit_response():
response = {}
with open('sample_response.json', 'r') as f:
response = json.load(f)
after = response['data']['after']
print(after)
if '__main__' == __name__:
get_next_reddit_response()
Make reddit url a constantimport json
import pprint
import requests
SAMPLE_REDDIT_URL = 'http://www.reddit.com/r/cscareerquestions/top.json'
def sample_valid_reddit_response():
r = requests.get(SAMPLE_REDDIT_URL)
response_json = r.json()
if 'data' not in response_json:
print("Trying again")
response_json = sample_valid_reddit_response()
return response_json
def save_sample():
response_json = sample_valid_reddit_response()
del response_json['data']['children']
with open('sample_response.json', 'w+') as f:
json.dump(response_json, f, indent=5)
def get_next_reddit_response():
response = {}
with open('sample_response.json', 'r') as f:
response = json.load(f)
after = response['data']['after']
print(after)
if '__main__' == __name__:
get_next_reddit_response()
| <commit_before>import json
import pprint
import requests
def sample_valid_reddit_response():
r = requests.get('http://www.reddit.com/r/cscareerquestions/top.json')
response_json = r.json()
if 'data' not in response_json:
print("Trying again")
response_json = sample_valid_reddit_response()
return response_json
def save_sample():
response_json = sample_valid_reddit_response()
del response_json['data']['children']
with open('sample_response.json', 'w+') as f:
json.dump(response_json, f, indent=5)
def get_next_reddit_response():
response = {}
with open('sample_response.json', 'r') as f:
response = json.load(f)
after = response['data']['after']
print(after)
if '__main__' == __name__:
get_next_reddit_response()
<commit_msg>Make reddit url a constant<commit_after>import json
import pprint
import requests
SAMPLE_REDDIT_URL = 'http://www.reddit.com/r/cscareerquestions/top.json'
def sample_valid_reddit_response():
r = requests.get(SAMPLE_REDDIT_URL)
response_json = r.json()
if 'data' not in response_json:
print("Trying again")
response_json = sample_valid_reddit_response()
return response_json
def save_sample():
response_json = sample_valid_reddit_response()
del response_json['data']['children']
with open('sample_response.json', 'w+') as f:
json.dump(response_json, f, indent=5)
def get_next_reddit_response():
response = {}
with open('sample_response.json', 'r') as f:
response = json.load(f)
after = response['data']['after']
print(after)
if '__main__' == __name__:
get_next_reddit_response()
|
d3d5c0c6d13b6cf84b8a7e12e40e9740ca960529 | spillway/mixins.py | spillway/mixins.py | from rest_framework.exceptions import ValidationError
class ModelSerializerMixin(object):
"""Provides generic model serializer classes to views."""
model_serializer_class = None
def get_serializer_class(self):
if self.serializer_class:
return self.serializer_class
class DefaultSerializer(self.model_serializer_class):
class Meta:
model = self.queryset.model
return DefaultSerializer
class QueryFormMixin(object):
"""Provides form based handling of GET or POST requests."""
query_form_class = None
def clean_params(self):
"""Returns a validated form dict from Request parameters."""
form = self.query_form_class(
self.request.query_params or self.request.data,
self.request.FILES or None)
if form.is_valid():
return form.cleaned_data
raise ValidationError(form.errors)
| from rest_framework.exceptions import ValidationError
class ModelSerializerMixin(object):
"""Provides generic model serializer classes to views."""
model_serializer_class = None
def get_serializer_class(self):
if self.serializer_class:
return self.serializer_class
class DefaultSerializer(self.model_serializer_class):
class Meta:
model = self.queryset.model
return DefaultSerializer
class QueryFormMixin(object):
"""Provides form based handling of GET or POST requests."""
query_form_class = None
def clean_params(self):
"""Returns a validated form dict from Request parameters."""
form = self.query_form_class(
self.request.query_params or self.request.data)
if form.is_valid():
return form.cleaned_data
raise ValidationError(form.errors)
| Use request.data to access file uploads | Use request.data to access file uploads
| Python | bsd-3-clause | kuzmich/django-spillway,barseghyanartur/django-spillway,bkg/django-spillway | from rest_framework.exceptions import ValidationError
class ModelSerializerMixin(object):
"""Provides generic model serializer classes to views."""
model_serializer_class = None
def get_serializer_class(self):
if self.serializer_class:
return self.serializer_class
class DefaultSerializer(self.model_serializer_class):
class Meta:
model = self.queryset.model
return DefaultSerializer
class QueryFormMixin(object):
"""Provides form based handling of GET or POST requests."""
query_form_class = None
def clean_params(self):
"""Returns a validated form dict from Request parameters."""
form = self.query_form_class(
self.request.query_params or self.request.data,
self.request.FILES or None)
if form.is_valid():
return form.cleaned_data
raise ValidationError(form.errors)
Use request.data to access file uploads | from rest_framework.exceptions import ValidationError
class ModelSerializerMixin(object):
"""Provides generic model serializer classes to views."""
model_serializer_class = None
def get_serializer_class(self):
if self.serializer_class:
return self.serializer_class
class DefaultSerializer(self.model_serializer_class):
class Meta:
model = self.queryset.model
return DefaultSerializer
class QueryFormMixin(object):
"""Provides form based handling of GET or POST requests."""
query_form_class = None
def clean_params(self):
"""Returns a validated form dict from Request parameters."""
form = self.query_form_class(
self.request.query_params or self.request.data)
if form.is_valid():
return form.cleaned_data
raise ValidationError(form.errors)
| <commit_before>from rest_framework.exceptions import ValidationError
class ModelSerializerMixin(object):
"""Provides generic model serializer classes to views."""
model_serializer_class = None
def get_serializer_class(self):
if self.serializer_class:
return self.serializer_class
class DefaultSerializer(self.model_serializer_class):
class Meta:
model = self.queryset.model
return DefaultSerializer
class QueryFormMixin(object):
"""Provides form based handling of GET or POST requests."""
query_form_class = None
def clean_params(self):
"""Returns a validated form dict from Request parameters."""
form = self.query_form_class(
self.request.query_params or self.request.data,
self.request.FILES or None)
if form.is_valid():
return form.cleaned_data
raise ValidationError(form.errors)
<commit_msg>Use request.data to access file uploads<commit_after> | from rest_framework.exceptions import ValidationError
class ModelSerializerMixin(object):
"""Provides generic model serializer classes to views."""
model_serializer_class = None
def get_serializer_class(self):
if self.serializer_class:
return self.serializer_class
class DefaultSerializer(self.model_serializer_class):
class Meta:
model = self.queryset.model
return DefaultSerializer
class QueryFormMixin(object):
"""Provides form based handling of GET or POST requests."""
query_form_class = None
def clean_params(self):
"""Returns a validated form dict from Request parameters."""
form = self.query_form_class(
self.request.query_params or self.request.data)
if form.is_valid():
return form.cleaned_data
raise ValidationError(form.errors)
| from rest_framework.exceptions import ValidationError
class ModelSerializerMixin(object):
"""Provides generic model serializer classes to views."""
model_serializer_class = None
def get_serializer_class(self):
if self.serializer_class:
return self.serializer_class
class DefaultSerializer(self.model_serializer_class):
class Meta:
model = self.queryset.model
return DefaultSerializer
class QueryFormMixin(object):
"""Provides form based handling of GET or POST requests."""
query_form_class = None
def clean_params(self):
"""Returns a validated form dict from Request parameters."""
form = self.query_form_class(
self.request.query_params or self.request.data,
self.request.FILES or None)
if form.is_valid():
return form.cleaned_data
raise ValidationError(form.errors)
Use request.data to access file uploadsfrom rest_framework.exceptions import ValidationError
class ModelSerializerMixin(object):
"""Provides generic model serializer classes to views."""
model_serializer_class = None
def get_serializer_class(self):
if self.serializer_class:
return self.serializer_class
class DefaultSerializer(self.model_serializer_class):
class Meta:
model = self.queryset.model
return DefaultSerializer
class QueryFormMixin(object):
"""Provides form based handling of GET or POST requests."""
query_form_class = None
def clean_params(self):
"""Returns a validated form dict from Request parameters."""
form = self.query_form_class(
self.request.query_params or self.request.data)
if form.is_valid():
return form.cleaned_data
raise ValidationError(form.errors)
| <commit_before>from rest_framework.exceptions import ValidationError
class ModelSerializerMixin(object):
"""Provides generic model serializer classes to views."""
model_serializer_class = None
def get_serializer_class(self):
if self.serializer_class:
return self.serializer_class
class DefaultSerializer(self.model_serializer_class):
class Meta:
model = self.queryset.model
return DefaultSerializer
class QueryFormMixin(object):
"""Provides form based handling of GET or POST requests."""
query_form_class = None
def clean_params(self):
"""Returns a validated form dict from Request parameters."""
form = self.query_form_class(
self.request.query_params or self.request.data,
self.request.FILES or None)
if form.is_valid():
return form.cleaned_data
raise ValidationError(form.errors)
<commit_msg>Use request.data to access file uploads<commit_after>from rest_framework.exceptions import ValidationError
class ModelSerializerMixin(object):
"""Provides generic model serializer classes to views."""
model_serializer_class = None
def get_serializer_class(self):
if self.serializer_class:
return self.serializer_class
class DefaultSerializer(self.model_serializer_class):
class Meta:
model = self.queryset.model
return DefaultSerializer
class QueryFormMixin(object):
"""Provides form based handling of GET or POST requests."""
query_form_class = None
def clean_params(self):
"""Returns a validated form dict from Request parameters."""
form = self.query_form_class(
self.request.query_params or self.request.data)
if form.is_valid():
return form.cleaned_data
raise ValidationError(form.errors)
|
7a5cb8ba82b79372226f9ac4ba3a71e4209cdd72 | sheldon/storage.py | sheldon/storage.py | # -*- coding: utf-8 -*-
"""
Interface to Redis-storage.
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
class Storage:
pass | # -*- coding: utf-8 -*-
"""
Interface to Redis-storage.
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from .utils import logger
# We will catch all import exceptions in bot.py
from redis import StrictRedis
class Storage:
def __init__(self, bot):
"""
Create new storage for bot
:param bot: Bot object
:return:
"""
self.bot = bot
# Connect to Redis.
# If we had problems with Redis - just set self.redis to None.
# Not redis-required modules must work without Redis.
try:
self.redis = StrictRedis(host=bot.config.get('SHELDON_REDIS_HOST',
'localhost'),
port=int(
bot.config.get('SHELDON_REDIS_PORT',
'6379')
),
db=int(
bot.config.get('SHELDON_REDIS_DB', '0')
)
)
except Exception as error:
logger.error_log_message('Error while connection Redis:')
logger.error_log_message(str(error.__traceback__))
self.redis = None
return
| Create init of Storage class | Create init of Storage class
| Python | mit | lises/sheldon | # -*- coding: utf-8 -*-
"""
Interface to Redis-storage.
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
class Storage:
passCreate init of Storage class | # -*- coding: utf-8 -*-
"""
Interface to Redis-storage.
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from .utils import logger
# We will catch all import exceptions in bot.py
from redis import StrictRedis
class Storage:
def __init__(self, bot):
"""
Create new storage for bot
:param bot: Bot object
:return:
"""
self.bot = bot
# Connect to Redis.
# If we had problems with Redis - just set self.redis to None.
# Not redis-required modules must work without Redis.
try:
self.redis = StrictRedis(host=bot.config.get('SHELDON_REDIS_HOST',
'localhost'),
port=int(
bot.config.get('SHELDON_REDIS_PORT',
'6379')
),
db=int(
bot.config.get('SHELDON_REDIS_DB', '0')
)
)
except Exception as error:
logger.error_log_message('Error while connection Redis:')
logger.error_log_message(str(error.__traceback__))
self.redis = None
return
| <commit_before># -*- coding: utf-8 -*-
"""
Interface to Redis-storage.
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
class Storage:
pass<commit_msg>Create init of Storage class<commit_after> | # -*- coding: utf-8 -*-
"""
Interface to Redis-storage.
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from .utils import logger
# We will catch all import exceptions in bot.py
from redis import StrictRedis
class Storage:
def __init__(self, bot):
"""
Create new storage for bot
:param bot: Bot object
:return:
"""
self.bot = bot
# Connect to Redis.
# If we had problems with Redis - just set self.redis to None.
# Not redis-required modules must work without Redis.
try:
self.redis = StrictRedis(host=bot.config.get('SHELDON_REDIS_HOST',
'localhost'),
port=int(
bot.config.get('SHELDON_REDIS_PORT',
'6379')
),
db=int(
bot.config.get('SHELDON_REDIS_DB', '0')
)
)
except Exception as error:
logger.error_log_message('Error while connection Redis:')
logger.error_log_message(str(error.__traceback__))
self.redis = None
return
| # -*- coding: utf-8 -*-
"""
Interface to Redis-storage.
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
class Storage:
passCreate init of Storage class# -*- coding: utf-8 -*-
"""
Interface to Redis-storage.
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from .utils import logger
# We will catch all import exceptions in bot.py
from redis import StrictRedis
class Storage:
def __init__(self, bot):
"""
Create new storage for bot
:param bot: Bot object
:return:
"""
self.bot = bot
# Connect to Redis.
# If we had problems with Redis - just set self.redis to None.
# Not redis-required modules must work without Redis.
try:
self.redis = StrictRedis(host=bot.config.get('SHELDON_REDIS_HOST',
'localhost'),
port=int(
bot.config.get('SHELDON_REDIS_PORT',
'6379')
),
db=int(
bot.config.get('SHELDON_REDIS_DB', '0')
)
)
except Exception as error:
logger.error_log_message('Error while connection Redis:')
logger.error_log_message(str(error.__traceback__))
self.redis = None
return
| <commit_before># -*- coding: utf-8 -*-
"""
Interface to Redis-storage.
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
class Storage:
pass<commit_msg>Create init of Storage class<commit_after># -*- coding: utf-8 -*-
"""
Interface to Redis-storage.
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from .utils import logger
# We will catch all import exceptions in bot.py
from redis import StrictRedis
class Storage:
def __init__(self, bot):
"""
Create new storage for bot
:param bot: Bot object
:return:
"""
self.bot = bot
# Connect to Redis.
# If we had problems with Redis - just set self.redis to None.
# Not redis-required modules must work without Redis.
try:
self.redis = StrictRedis(host=bot.config.get('SHELDON_REDIS_HOST',
'localhost'),
port=int(
bot.config.get('SHELDON_REDIS_PORT',
'6379')
),
db=int(
bot.config.get('SHELDON_REDIS_DB', '0')
)
)
except Exception as error:
logger.error_log_message('Error while connection Redis:')
logger.error_log_message(str(error.__traceback__))
self.redis = None
return
|
7cc8699f7100cfc969b1b76efbcc47e1fafb2363 | paiji2_shoutbox/models.py | paiji2_shoutbox/models.py | from django.db import models
from django.utils.translation import ugettext as _
from django.utils.timezone import now
try:
from django.contrib.auth import get_user_model
User = get_user_model()
except:
from django.contrib.auth.models import User
class Note(models.Model):
author = models.ForeignKey(
User,
verbose_name=_('author'),
related_name='notes',
)
message = models.CharField(
_('message'),
max_length=200,
)
posted_at = models.DateTimeField(
_('publication date'),
)
def save(self, *args, **kwargs):
if self.pk is None:
self.posted_at = now()
super(Note, self).save(*args, **kwargs)
class Meta:
verbose_name = _('note')
verbose_name_plural = _('notes')
ordering = ('-posted_at', )
| from django.db import models
from django.utils.translation import ugettext as _
from django.utils.timezone import now
try:
from django.contrib.auth import get_user_model
User = get_user_model()
except:
from django.contrib.auth.models import User
class Note(models.Model):
author = models.ForeignKey(
User,
verbose_name=_('author'),
related_name='notes',
)
message = models.CharField(
_('message'),
max_length=200,
)
posted_at = models.DateTimeField(
_('publication date'),
auto_now_add=True,
)
class Meta:
verbose_name = _('note')
verbose_name_plural = _('notes')
ordering = ('-posted_at', )
| Remove save method for auto_now_add=True | Remove save method for auto_now_add=True
| Python | agpl-3.0 | rezometz/django-paiji2-shoutbox,rezometz/django-paiji2-shoutbox | from django.db import models
from django.utils.translation import ugettext as _
from django.utils.timezone import now
try:
from django.contrib.auth import get_user_model
User = get_user_model()
except:
from django.contrib.auth.models import User
class Note(models.Model):
author = models.ForeignKey(
User,
verbose_name=_('author'),
related_name='notes',
)
message = models.CharField(
_('message'),
max_length=200,
)
posted_at = models.DateTimeField(
_('publication date'),
)
def save(self, *args, **kwargs):
if self.pk is None:
self.posted_at = now()
super(Note, self).save(*args, **kwargs)
class Meta:
verbose_name = _('note')
verbose_name_plural = _('notes')
ordering = ('-posted_at', )
Remove save method for auto_now_add=True | from django.db import models
from django.utils.translation import ugettext as _
from django.utils.timezone import now
try:
from django.contrib.auth import get_user_model
User = get_user_model()
except:
from django.contrib.auth.models import User
class Note(models.Model):
author = models.ForeignKey(
User,
verbose_name=_('author'),
related_name='notes',
)
message = models.CharField(
_('message'),
max_length=200,
)
posted_at = models.DateTimeField(
_('publication date'),
auto_now_add=True,
)
class Meta:
verbose_name = _('note')
verbose_name_plural = _('notes')
ordering = ('-posted_at', )
| <commit_before>from django.db import models
from django.utils.translation import ugettext as _
from django.utils.timezone import now
try:
from django.contrib.auth import get_user_model
User = get_user_model()
except:
from django.contrib.auth.models import User
class Note(models.Model):
author = models.ForeignKey(
User,
verbose_name=_('author'),
related_name='notes',
)
message = models.CharField(
_('message'),
max_length=200,
)
posted_at = models.DateTimeField(
_('publication date'),
)
def save(self, *args, **kwargs):
if self.pk is None:
self.posted_at = now()
super(Note, self).save(*args, **kwargs)
class Meta:
verbose_name = _('note')
verbose_name_plural = _('notes')
ordering = ('-posted_at', )
<commit_msg>Remove save method for auto_now_add=True<commit_after> | from django.db import models
from django.utils.translation import ugettext as _
from django.utils.timezone import now
try:
from django.contrib.auth import get_user_model
User = get_user_model()
except:
from django.contrib.auth.models import User
class Note(models.Model):
author = models.ForeignKey(
User,
verbose_name=_('author'),
related_name='notes',
)
message = models.CharField(
_('message'),
max_length=200,
)
posted_at = models.DateTimeField(
_('publication date'),
auto_now_add=True,
)
class Meta:
verbose_name = _('note')
verbose_name_plural = _('notes')
ordering = ('-posted_at', )
| from django.db import models
from django.utils.translation import ugettext as _
from django.utils.timezone import now
try:
from django.contrib.auth import get_user_model
User = get_user_model()
except:
from django.contrib.auth.models import User
class Note(models.Model):
author = models.ForeignKey(
User,
verbose_name=_('author'),
related_name='notes',
)
message = models.CharField(
_('message'),
max_length=200,
)
posted_at = models.DateTimeField(
_('publication date'),
)
def save(self, *args, **kwargs):
if self.pk is None:
self.posted_at = now()
super(Note, self).save(*args, **kwargs)
class Meta:
verbose_name = _('note')
verbose_name_plural = _('notes')
ordering = ('-posted_at', )
Remove save method for auto_now_add=Truefrom django.db import models
from django.utils.translation import ugettext as _
from django.utils.timezone import now
try:
from django.contrib.auth import get_user_model
User = get_user_model()
except:
from django.contrib.auth.models import User
class Note(models.Model):
author = models.ForeignKey(
User,
verbose_name=_('author'),
related_name='notes',
)
message = models.CharField(
_('message'),
max_length=200,
)
posted_at = models.DateTimeField(
_('publication date'),
auto_now_add=True,
)
class Meta:
verbose_name = _('note')
verbose_name_plural = _('notes')
ordering = ('-posted_at', )
| <commit_before>from django.db import models
from django.utils.translation import ugettext as _
from django.utils.timezone import now
try:
from django.contrib.auth import get_user_model
User = get_user_model()
except:
from django.contrib.auth.models import User
class Note(models.Model):
author = models.ForeignKey(
User,
verbose_name=_('author'),
related_name='notes',
)
message = models.CharField(
_('message'),
max_length=200,
)
posted_at = models.DateTimeField(
_('publication date'),
)
def save(self, *args, **kwargs):
if self.pk is None:
self.posted_at = now()
super(Note, self).save(*args, **kwargs)
class Meta:
verbose_name = _('note')
verbose_name_plural = _('notes')
ordering = ('-posted_at', )
<commit_msg>Remove save method for auto_now_add=True<commit_after>from django.db import models
from django.utils.translation import ugettext as _
from django.utils.timezone import now
try:
from django.contrib.auth import get_user_model
User = get_user_model()
except:
from django.contrib.auth.models import User
class Note(models.Model):
author = models.ForeignKey(
User,
verbose_name=_('author'),
related_name='notes',
)
message = models.CharField(
_('message'),
max_length=200,
)
posted_at = models.DateTimeField(
_('publication date'),
auto_now_add=True,
)
class Meta:
verbose_name = _('note')
verbose_name_plural = _('notes')
ordering = ('-posted_at', )
|
f4a73fcc591d877003e9963f087d2473568bfa9d | python/ql/test/experimental/query-tests/Security/CWE-079/sendgrid_via_mail_send_post_request_body_bad.py | python/ql/test/experimental/query-tests/Security/CWE-079/sendgrid_via_mail_send_post_request_body_bad.py | # This tests that the developer doesn't pass tainted user data into the mail.send.post() method in the SendGrid library.
import sendgrid
import os
sg = sendgrid.SendGridAPIClient(os.environ.get('SENDGRID_API_KEY'))
data = {
"content": [
{
"type": "text/html",
"value": "<html><p>Hello, world!</p><img src=[CID GOES HERE]></img></html>"
}
],
"from": {
"email": "sam.smith@example.com",
"name": "Sam Smith"
},
"headers": {},
"mail_settings": {
"footer": {
"enable": True,
"html": "<p>Thanks</br>The SendGrid Team</p>",
"text": "Thanks,/n The SendGrid Team"
},
},
"reply_to": {
"email": "sam.smith@example.com",
"name": "Sam Smith"
},
"send_at": 1409348513,
"subject": "Hello, World!",
"template_id": "[YOUR TEMPLATE ID GOES HERE]",
"tracking_settings": {
"subscription_tracking": {
"enable": True,
"html": "If you would like to unsubscribe and stop receiving these emails <% clickhere %>.",
"substitution_tag": "<%click here%>",
"text": "If you would like to unsubscribe and stop receiving these emails <% click here %>."
}
}
}
response = sg.client.mail.send.post(request_body=data)
| import sendgrid
import os
from flask import request, Flask
app = Flask(__name__)
@app.route("/sendgrid")
def send():
sg = sendgrid.SendGridAPIClient(os.environ.get('SENDGRID_API_KEY'))
data = {
"content": [
{
"type": "text/html",
"value": "<html>{}</html>".format(request.args["html_content"])
}
],
"from": {
"email": "sam.smith@example.com",
"name": "Sam Smith"
},
"headers": {},
"mail_settings": {
"footer": {
"enable": True,
"html": "<html>{}</html>".format(request.args["html_footer"]),
"text": "Thanks,/n The SendGrid Team"
},
},
"reply_to": {
"email": "sam.smith@example.com",
"name": "Sam Smith"
},
"send_at": 1409348513,
"subject": "Hello, World!",
"template_id": "[YOUR TEMPLATE ID GOES HERE]",
"tracking_settings": {
"subscription_tracking": {
"enable": True,
"html": "<html>{}</html>".format(request.args["html_tracking"]),
"substitution_tag": "<%click here%>",
"text": "If you would like to unsubscribe and stop receiving these emails <% click here %>."
}
}
}
response = sg.client.mail.send.post(request_body=data)
| Add RFS to `sendgrid` test | Add RFS to `sendgrid` test
| Python | mit | github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql | # This tests that the developer doesn't pass tainted user data into the mail.send.post() method in the SendGrid library.
import sendgrid
import os
sg = sendgrid.SendGridAPIClient(os.environ.get('SENDGRID_API_KEY'))
data = {
"content": [
{
"type": "text/html",
"value": "<html><p>Hello, world!</p><img src=[CID GOES HERE]></img></html>"
}
],
"from": {
"email": "sam.smith@example.com",
"name": "Sam Smith"
},
"headers": {},
"mail_settings": {
"footer": {
"enable": True,
"html": "<p>Thanks</br>The SendGrid Team</p>",
"text": "Thanks,/n The SendGrid Team"
},
},
"reply_to": {
"email": "sam.smith@example.com",
"name": "Sam Smith"
},
"send_at": 1409348513,
"subject": "Hello, World!",
"template_id": "[YOUR TEMPLATE ID GOES HERE]",
"tracking_settings": {
"subscription_tracking": {
"enable": True,
"html": "If you would like to unsubscribe and stop receiving these emails <% clickhere %>.",
"substitution_tag": "<%click here%>",
"text": "If you would like to unsubscribe and stop receiving these emails <% click here %>."
}
}
}
response = sg.client.mail.send.post(request_body=data)
Add RFS to `sendgrid` test | import sendgrid
import os
from flask import request, Flask
app = Flask(__name__)
@app.route("/sendgrid")
def send():
sg = sendgrid.SendGridAPIClient(os.environ.get('SENDGRID_API_KEY'))
data = {
"content": [
{
"type": "text/html",
"value": "<html>{}</html>".format(request.args["html_content"])
}
],
"from": {
"email": "sam.smith@example.com",
"name": "Sam Smith"
},
"headers": {},
"mail_settings": {
"footer": {
"enable": True,
"html": "<html>{}</html>".format(request.args["html_footer"]),
"text": "Thanks,/n The SendGrid Team"
},
},
"reply_to": {
"email": "sam.smith@example.com",
"name": "Sam Smith"
},
"send_at": 1409348513,
"subject": "Hello, World!",
"template_id": "[YOUR TEMPLATE ID GOES HERE]",
"tracking_settings": {
"subscription_tracking": {
"enable": True,
"html": "<html>{}</html>".format(request.args["html_tracking"]),
"substitution_tag": "<%click here%>",
"text": "If you would like to unsubscribe and stop receiving these emails <% click here %>."
}
}
}
response = sg.client.mail.send.post(request_body=data)
| <commit_before># This tests that the developer doesn't pass tainted user data into the mail.send.post() method in the SendGrid library.
import sendgrid
import os
sg = sendgrid.SendGridAPIClient(os.environ.get('SENDGRID_API_KEY'))
data = {
"content": [
{
"type": "text/html",
"value": "<html><p>Hello, world!</p><img src=[CID GOES HERE]></img></html>"
}
],
"from": {
"email": "sam.smith@example.com",
"name": "Sam Smith"
},
"headers": {},
"mail_settings": {
"footer": {
"enable": True,
"html": "<p>Thanks</br>The SendGrid Team</p>",
"text": "Thanks,/n The SendGrid Team"
},
},
"reply_to": {
"email": "sam.smith@example.com",
"name": "Sam Smith"
},
"send_at": 1409348513,
"subject": "Hello, World!",
"template_id": "[YOUR TEMPLATE ID GOES HERE]",
"tracking_settings": {
"subscription_tracking": {
"enable": True,
"html": "If you would like to unsubscribe and stop receiving these emails <% clickhere %>.",
"substitution_tag": "<%click here%>",
"text": "If you would like to unsubscribe and stop receiving these emails <% click here %>."
}
}
}
response = sg.client.mail.send.post(request_body=data)
<commit_msg>Add RFS to `sendgrid` test<commit_after> | import sendgrid
import os
from flask import request, Flask
app = Flask(__name__)
@app.route("/sendgrid")
def send():
sg = sendgrid.SendGridAPIClient(os.environ.get('SENDGRID_API_KEY'))
data = {
"content": [
{
"type": "text/html",
"value": "<html>{}</html>".format(request.args["html_content"])
}
],
"from": {
"email": "sam.smith@example.com",
"name": "Sam Smith"
},
"headers": {},
"mail_settings": {
"footer": {
"enable": True,
"html": "<html>{}</html>".format(request.args["html_footer"]),
"text": "Thanks,/n The SendGrid Team"
},
},
"reply_to": {
"email": "sam.smith@example.com",
"name": "Sam Smith"
},
"send_at": 1409348513,
"subject": "Hello, World!",
"template_id": "[YOUR TEMPLATE ID GOES HERE]",
"tracking_settings": {
"subscription_tracking": {
"enable": True,
"html": "<html>{}</html>".format(request.args["html_tracking"]),
"substitution_tag": "<%click here%>",
"text": "If you would like to unsubscribe and stop receiving these emails <% click here %>."
}
}
}
response = sg.client.mail.send.post(request_body=data)
| # This tests that the developer doesn't pass tainted user data into the mail.send.post() method in the SendGrid library.
import sendgrid
import os
sg = sendgrid.SendGridAPIClient(os.environ.get('SENDGRID_API_KEY'))
data = {
"content": [
{
"type": "text/html",
"value": "<html><p>Hello, world!</p><img src=[CID GOES HERE]></img></html>"
}
],
"from": {
"email": "sam.smith@example.com",
"name": "Sam Smith"
},
"headers": {},
"mail_settings": {
"footer": {
"enable": True,
"html": "<p>Thanks</br>The SendGrid Team</p>",
"text": "Thanks,/n The SendGrid Team"
},
},
"reply_to": {
"email": "sam.smith@example.com",
"name": "Sam Smith"
},
"send_at": 1409348513,
"subject": "Hello, World!",
"template_id": "[YOUR TEMPLATE ID GOES HERE]",
"tracking_settings": {
"subscription_tracking": {
"enable": True,
"html": "If you would like to unsubscribe and stop receiving these emails <% clickhere %>.",
"substitution_tag": "<%click here%>",
"text": "If you would like to unsubscribe and stop receiving these emails <% click here %>."
}
}
}
response = sg.client.mail.send.post(request_body=data)
Add RFS to `sendgrid` testimport sendgrid
import os
from flask import request, Flask
app = Flask(__name__)
@app.route("/sendgrid")
def send():
sg = sendgrid.SendGridAPIClient(os.environ.get('SENDGRID_API_KEY'))
data = {
"content": [
{
"type": "text/html",
"value": "<html>{}</html>".format(request.args["html_content"])
}
],
"from": {
"email": "sam.smith@example.com",
"name": "Sam Smith"
},
"headers": {},
"mail_settings": {
"footer": {
"enable": True,
"html": "<html>{}</html>".format(request.args["html_footer"]),
"text": "Thanks,/n The SendGrid Team"
},
},
"reply_to": {
"email": "sam.smith@example.com",
"name": "Sam Smith"
},
"send_at": 1409348513,
"subject": "Hello, World!",
"template_id": "[YOUR TEMPLATE ID GOES HERE]",
"tracking_settings": {
"subscription_tracking": {
"enable": True,
"html": "<html>{}</html>".format(request.args["html_tracking"]),
"substitution_tag": "<%click here%>",
"text": "If you would like to unsubscribe and stop receiving these emails <% click here %>."
}
}
}
response = sg.client.mail.send.post(request_body=data)
| <commit_before># This tests that the developer doesn't pass tainted user data into the mail.send.post() method in the SendGrid library.
import sendgrid
import os
sg = sendgrid.SendGridAPIClient(os.environ.get('SENDGRID_API_KEY'))
data = {
"content": [
{
"type": "text/html",
"value": "<html><p>Hello, world!</p><img src=[CID GOES HERE]></img></html>"
}
],
"from": {
"email": "sam.smith@example.com",
"name": "Sam Smith"
},
"headers": {},
"mail_settings": {
"footer": {
"enable": True,
"html": "<p>Thanks</br>The SendGrid Team</p>",
"text": "Thanks,/n The SendGrid Team"
},
},
"reply_to": {
"email": "sam.smith@example.com",
"name": "Sam Smith"
},
"send_at": 1409348513,
"subject": "Hello, World!",
"template_id": "[YOUR TEMPLATE ID GOES HERE]",
"tracking_settings": {
"subscription_tracking": {
"enable": True,
"html": "If you would like to unsubscribe and stop receiving these emails <% clickhere %>.",
"substitution_tag": "<%click here%>",
"text": "If you would like to unsubscribe and stop receiving these emails <% click here %>."
}
}
}
response = sg.client.mail.send.post(request_body=data)
<commit_msg>Add RFS to `sendgrid` test<commit_after>import sendgrid
import os
from flask import request, Flask
app = Flask(__name__)
@app.route("/sendgrid")
def send():
sg = sendgrid.SendGridAPIClient(os.environ.get('SENDGRID_API_KEY'))
data = {
"content": [
{
"type": "text/html",
"value": "<html>{}</html>".format(request.args["html_content"])
}
],
"from": {
"email": "sam.smith@example.com",
"name": "Sam Smith"
},
"headers": {},
"mail_settings": {
"footer": {
"enable": True,
"html": "<html>{}</html>".format(request.args["html_footer"]),
"text": "Thanks,/n The SendGrid Team"
},
},
"reply_to": {
"email": "sam.smith@example.com",
"name": "Sam Smith"
},
"send_at": 1409348513,
"subject": "Hello, World!",
"template_id": "[YOUR TEMPLATE ID GOES HERE]",
"tracking_settings": {
"subscription_tracking": {
"enable": True,
"html": "<html>{}</html>".format(request.args["html_tracking"]),
"substitution_tag": "<%click here%>",
"text": "If you would like to unsubscribe and stop receiving these emails <% click here %>."
}
}
}
response = sg.client.mail.send.post(request_body=data)
|
00a05d86e83f95ecab589313459212a6d6ec4355 | setup.py | setup.py | from setuptools import setup
setup(
name='discord-curious',
version='0.2.0.post1',
packages=['curious', 'curious.core', 'curious.http', 'curious.commands', 'curious.dataclasses', 'curious.voice',
'curious.ext.loapi', 'curious.ext.paginator'],
url='https://github.com/SunDwarf/curious',
license='MIT',
author='Laura Dickinson',
author_email='l@veriny.tf',
description='A curio library for the Discord API',
install_requires=[
"cuiows>=0.1.10",
"curio==0.5.0",
"h11==0.7.0",
"multidict==2.1.4",
"pylru==1.0.9",
"yarl==0.8.1",
],
extras_require={
"voice": ["opuslib==1.1.0",
"PyNaCL==1.0.1",]
}
)
| from setuptools import setup
setup(
name='discord-curious',
version='0.2.0.post1',
packages=['curious', 'curious.core', 'curious.http', 'curious.commands', 'curious.dataclasses', 'curious.voice',
'curious.ext.loapi', 'curious.ext.paginator'],
url='https://github.com/SunDwarf/curious',
license='MIT',
author='Laura Dickinson',
author_email='l@veriny.tf',
description='A curio library for the Discord API',
install_requires=[
"cuiows>=0.1.10",
"curio==0.6.0",
"h11==0.7.0",
"multidict==2.1.4",
"pylru==1.0.9",
"yarl==0.8.1",
],
extras_require={
"voice": ["opuslib==1.1.0",
"PyNaCL==1.0.1",]
}
)
| Update `curio` pin to 0.6.0. | Update `curio` pin to 0.6.0.
Signed-off-by: Laura <07c342be6e560e7f43842e2e21b774e61d85f047@veriny.tf>
| Python | mit | SunDwarf/curious | from setuptools import setup
setup(
name='discord-curious',
version='0.2.0.post1',
packages=['curious', 'curious.core', 'curious.http', 'curious.commands', 'curious.dataclasses', 'curious.voice',
'curious.ext.loapi', 'curious.ext.paginator'],
url='https://github.com/SunDwarf/curious',
license='MIT',
author='Laura Dickinson',
author_email='l@veriny.tf',
description='A curio library for the Discord API',
install_requires=[
"cuiows>=0.1.10",
"curio==0.5.0",
"h11==0.7.0",
"multidict==2.1.4",
"pylru==1.0.9",
"yarl==0.8.1",
],
extras_require={
"voice": ["opuslib==1.1.0",
"PyNaCL==1.0.1",]
}
)
Update `curio` pin to 0.6.0.
Signed-off-by: Laura <07c342be6e560e7f43842e2e21b774e61d85f047@veriny.tf> | from setuptools import setup
setup(
name='discord-curious',
version='0.2.0.post1',
packages=['curious', 'curious.core', 'curious.http', 'curious.commands', 'curious.dataclasses', 'curious.voice',
'curious.ext.loapi', 'curious.ext.paginator'],
url='https://github.com/SunDwarf/curious',
license='MIT',
author='Laura Dickinson',
author_email='l@veriny.tf',
description='A curio library for the Discord API',
install_requires=[
"cuiows>=0.1.10",
"curio==0.6.0",
"h11==0.7.0",
"multidict==2.1.4",
"pylru==1.0.9",
"yarl==0.8.1",
],
extras_require={
"voice": ["opuslib==1.1.0",
"PyNaCL==1.0.1",]
}
)
| <commit_before>from setuptools import setup
setup(
name='discord-curious',
version='0.2.0.post1',
packages=['curious', 'curious.core', 'curious.http', 'curious.commands', 'curious.dataclasses', 'curious.voice',
'curious.ext.loapi', 'curious.ext.paginator'],
url='https://github.com/SunDwarf/curious',
license='MIT',
author='Laura Dickinson',
author_email='l@veriny.tf',
description='A curio library for the Discord API',
install_requires=[
"cuiows>=0.1.10",
"curio==0.5.0",
"h11==0.7.0",
"multidict==2.1.4",
"pylru==1.0.9",
"yarl==0.8.1",
],
extras_require={
"voice": ["opuslib==1.1.0",
"PyNaCL==1.0.1",]
}
)
<commit_msg>Update `curio` pin to 0.6.0.
Signed-off-by: Laura <07c342be6e560e7f43842e2e21b774e61d85f047@veriny.tf><commit_after> | from setuptools import setup
setup(
name='discord-curious',
version='0.2.0.post1',
packages=['curious', 'curious.core', 'curious.http', 'curious.commands', 'curious.dataclasses', 'curious.voice',
'curious.ext.loapi', 'curious.ext.paginator'],
url='https://github.com/SunDwarf/curious',
license='MIT',
author='Laura Dickinson',
author_email='l@veriny.tf',
description='A curio library for the Discord API',
install_requires=[
"cuiows>=0.1.10",
"curio==0.6.0",
"h11==0.7.0",
"multidict==2.1.4",
"pylru==1.0.9",
"yarl==0.8.1",
],
extras_require={
"voice": ["opuslib==1.1.0",
"PyNaCL==1.0.1",]
}
)
| from setuptools import setup
setup(
name='discord-curious',
version='0.2.0.post1',
packages=['curious', 'curious.core', 'curious.http', 'curious.commands', 'curious.dataclasses', 'curious.voice',
'curious.ext.loapi', 'curious.ext.paginator'],
url='https://github.com/SunDwarf/curious',
license='MIT',
author='Laura Dickinson',
author_email='l@veriny.tf',
description='A curio library for the Discord API',
install_requires=[
"cuiows>=0.1.10",
"curio==0.5.0",
"h11==0.7.0",
"multidict==2.1.4",
"pylru==1.0.9",
"yarl==0.8.1",
],
extras_require={
"voice": ["opuslib==1.1.0",
"PyNaCL==1.0.1",]
}
)
Update `curio` pin to 0.6.0.
Signed-off-by: Laura <07c342be6e560e7f43842e2e21b774e61d85f047@veriny.tf>from setuptools import setup
setup(
name='discord-curious',
version='0.2.0.post1',
packages=['curious', 'curious.core', 'curious.http', 'curious.commands', 'curious.dataclasses', 'curious.voice',
'curious.ext.loapi', 'curious.ext.paginator'],
url='https://github.com/SunDwarf/curious',
license='MIT',
author='Laura Dickinson',
author_email='l@veriny.tf',
description='A curio library for the Discord API',
install_requires=[
"cuiows>=0.1.10",
"curio==0.6.0",
"h11==0.7.0",
"multidict==2.1.4",
"pylru==1.0.9",
"yarl==0.8.1",
],
extras_require={
"voice": ["opuslib==1.1.0",
"PyNaCL==1.0.1",]
}
)
| <commit_before>from setuptools import setup
setup(
name='discord-curious',
version='0.2.0.post1',
packages=['curious', 'curious.core', 'curious.http', 'curious.commands', 'curious.dataclasses', 'curious.voice',
'curious.ext.loapi', 'curious.ext.paginator'],
url='https://github.com/SunDwarf/curious',
license='MIT',
author='Laura Dickinson',
author_email='l@veriny.tf',
description='A curio library for the Discord API',
install_requires=[
"cuiows>=0.1.10",
"curio==0.5.0",
"h11==0.7.0",
"multidict==2.1.4",
"pylru==1.0.9",
"yarl==0.8.1",
],
extras_require={
"voice": ["opuslib==1.1.0",
"PyNaCL==1.0.1",]
}
)
<commit_msg>Update `curio` pin to 0.6.0.
Signed-off-by: Laura <07c342be6e560e7f43842e2e21b774e61d85f047@veriny.tf><commit_after>from setuptools import setup
setup(
name='discord-curious',
version='0.2.0.post1',
packages=['curious', 'curious.core', 'curious.http', 'curious.commands', 'curious.dataclasses', 'curious.voice',
'curious.ext.loapi', 'curious.ext.paginator'],
url='https://github.com/SunDwarf/curious',
license='MIT',
author='Laura Dickinson',
author_email='l@veriny.tf',
description='A curio library for the Discord API',
install_requires=[
"cuiows>=0.1.10",
"curio==0.6.0",
"h11==0.7.0",
"multidict==2.1.4",
"pylru==1.0.9",
"yarl==0.8.1",
],
extras_require={
"voice": ["opuslib==1.1.0",
"PyNaCL==1.0.1",]
}
)
|
bace6c5562b8c085858824168ba3ed4bf73fe3ae | setup.py | setup.py | #! /usr/bin/env python
'''
This file is part of ConfigShell.
Copyright (c) 2011-2013 by Datera, Inc
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
'''
from setuptools import setup
setup(
name = 'configshell-fb',
version = '1.1.23',
description = 'A framework to implement simple but nice CLIs.',
license = 'Apache 2.0',
maintainer = 'Andy Grover',
maintainer_email = 'agrover@redhat.com',
url = 'http://github.com/open-iscsi/configshell-fb',
packages = ['configshell', 'configshell_fb'],
classifiers = [
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
],
)
| #! /usr/bin/env python
'''
This file is part of ConfigShell.
Copyright (c) 2011-2013 by Datera, Inc
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
'''
from setuptools import setup
setup(
name = 'configshell-fb',
version = '1.1.23',
description = 'A framework to implement simple but nice CLIs.',
license = 'Apache 2.0',
maintainer = 'Andy Grover',
maintainer_email = 'agrover@redhat.com',
url = 'http://github.com/open-iscsi/configshell-fb',
packages = ['configshell', 'configshell_fb'],
install_requires = [
'pyparsing',
'six',
],
classifiers = [
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
],
)
| Add missing dependency on pyparsing and six | Add missing dependency on pyparsing and six
Signed-off-by: Christophe Vu-Brugier <1930e27f67e1e10d51770b88cb06d386f1aa46ae@fastmail.fm>
| Python | apache-2.0 | agrover/configshell-fb,cvubrugier/configshell-fb | #! /usr/bin/env python
'''
This file is part of ConfigShell.
Copyright (c) 2011-2013 by Datera, Inc
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
'''
from setuptools import setup
setup(
name = 'configshell-fb',
version = '1.1.23',
description = 'A framework to implement simple but nice CLIs.',
license = 'Apache 2.0',
maintainer = 'Andy Grover',
maintainer_email = 'agrover@redhat.com',
url = 'http://github.com/open-iscsi/configshell-fb',
packages = ['configshell', 'configshell_fb'],
classifiers = [
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
],
)
Add missing dependency on pyparsing and six
Signed-off-by: Christophe Vu-Brugier <1930e27f67e1e10d51770b88cb06d386f1aa46ae@fastmail.fm> | #! /usr/bin/env python
'''
This file is part of ConfigShell.
Copyright (c) 2011-2013 by Datera, Inc
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
'''
from setuptools import setup
setup(
name = 'configshell-fb',
version = '1.1.23',
description = 'A framework to implement simple but nice CLIs.',
license = 'Apache 2.0',
maintainer = 'Andy Grover',
maintainer_email = 'agrover@redhat.com',
url = 'http://github.com/open-iscsi/configshell-fb',
packages = ['configshell', 'configshell_fb'],
install_requires = [
'pyparsing',
'six',
],
classifiers = [
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
],
)
| <commit_before>#! /usr/bin/env python
'''
This file is part of ConfigShell.
Copyright (c) 2011-2013 by Datera, Inc
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
'''
from setuptools import setup
setup(
name = 'configshell-fb',
version = '1.1.23',
description = 'A framework to implement simple but nice CLIs.',
license = 'Apache 2.0',
maintainer = 'Andy Grover',
maintainer_email = 'agrover@redhat.com',
url = 'http://github.com/open-iscsi/configshell-fb',
packages = ['configshell', 'configshell_fb'],
classifiers = [
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
],
)
<commit_msg>Add missing dependency on pyparsing and six
Signed-off-by: Christophe Vu-Brugier <1930e27f67e1e10d51770b88cb06d386f1aa46ae@fastmail.fm><commit_after> | #! /usr/bin/env python
'''
This file is part of ConfigShell.
Copyright (c) 2011-2013 by Datera, Inc
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
'''
from setuptools import setup
setup(
name = 'configshell-fb',
version = '1.1.23',
description = 'A framework to implement simple but nice CLIs.',
license = 'Apache 2.0',
maintainer = 'Andy Grover',
maintainer_email = 'agrover@redhat.com',
url = 'http://github.com/open-iscsi/configshell-fb',
packages = ['configshell', 'configshell_fb'],
install_requires = [
'pyparsing',
'six',
],
classifiers = [
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
],
)
| #! /usr/bin/env python
'''
This file is part of ConfigShell.
Copyright (c) 2011-2013 by Datera, Inc
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
'''
from setuptools import setup
setup(
name = 'configshell-fb',
version = '1.1.23',
description = 'A framework to implement simple but nice CLIs.',
license = 'Apache 2.0',
maintainer = 'Andy Grover',
maintainer_email = 'agrover@redhat.com',
url = 'http://github.com/open-iscsi/configshell-fb',
packages = ['configshell', 'configshell_fb'],
classifiers = [
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
],
)
Add missing dependency on pyparsing and six
Signed-off-by: Christophe Vu-Brugier <1930e27f67e1e10d51770b88cb06d386f1aa46ae@fastmail.fm>#! /usr/bin/env python
'''
This file is part of ConfigShell.
Copyright (c) 2011-2013 by Datera, Inc
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
'''
from setuptools import setup
setup(
name = 'configshell-fb',
version = '1.1.23',
description = 'A framework to implement simple but nice CLIs.',
license = 'Apache 2.0',
maintainer = 'Andy Grover',
maintainer_email = 'agrover@redhat.com',
url = 'http://github.com/open-iscsi/configshell-fb',
packages = ['configshell', 'configshell_fb'],
install_requires = [
'pyparsing',
'six',
],
classifiers = [
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
],
)
| <commit_before>#! /usr/bin/env python
'''
This file is part of ConfigShell.
Copyright (c) 2011-2013 by Datera, Inc
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
'''
from setuptools import setup
setup(
name = 'configshell-fb',
version = '1.1.23',
description = 'A framework to implement simple but nice CLIs.',
license = 'Apache 2.0',
maintainer = 'Andy Grover',
maintainer_email = 'agrover@redhat.com',
url = 'http://github.com/open-iscsi/configshell-fb',
packages = ['configshell', 'configshell_fb'],
classifiers = [
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
],
)
<commit_msg>Add missing dependency on pyparsing and six
Signed-off-by: Christophe Vu-Brugier <1930e27f67e1e10d51770b88cb06d386f1aa46ae@fastmail.fm><commit_after>#! /usr/bin/env python
'''
This file is part of ConfigShell.
Copyright (c) 2011-2013 by Datera, Inc
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
'''
from setuptools import setup
setup(
name = 'configshell-fb',
version = '1.1.23',
description = 'A framework to implement simple but nice CLIs.',
license = 'Apache 2.0',
maintainer = 'Andy Grover',
maintainer_email = 'agrover@redhat.com',
url = 'http://github.com/open-iscsi/configshell-fb',
packages = ['configshell', 'configshell_fb'],
install_requires = [
'pyparsing',
'six',
],
classifiers = [
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
],
)
|
6fed8b08e280b88a491ca6c04e0a2c429e7f493f | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from distutils.core import setup
from distutils.core import setup
setup(
name='django-banking',
version='0.1-dev',
description='Banking (SWIFT) classes for Python/Django',
long_description=open('README').read(),
author='Benjamin P. Jung',
author_email='headcr4sh@gmail.com',
url='https://github.com/headcr4sh/dango-banking',
download_url='https://github.com/headcr4sh/django-banking/downloads/',
packages = ['django_banking',],
license='BSD',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from distutils.core import setup
from distutils.core import setup
setup(
name='django-banking',
version='0.1-dev',
description='Banking (SWIFT) classes for Python/Django',
long_description=open('README').read(),
author='Benjamin P. Jung',
author_email='headcr4sh@gmail.com',
url='https://github.com/headcr4sh/dango-banking',
download_url='https://github.com/headcr4sh/django-banking/downloads/',
packages = ['django_banking', 'django_banking.models'],
license='BSD',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
| Include 'models' subdirectory in package | Include 'models' subdirectory in package
| Python | bsd-3-clause | headcr4sh/django-banking | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from distutils.core import setup
from distutils.core import setup
setup(
name='django-banking',
version='0.1-dev',
description='Banking (SWIFT) classes for Python/Django',
long_description=open('README').read(),
author='Benjamin P. Jung',
author_email='headcr4sh@gmail.com',
url='https://github.com/headcr4sh/dango-banking',
download_url='https://github.com/headcr4sh/django-banking/downloads/',
packages = ['django_banking',],
license='BSD',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
Include 'models' subdirectory in package | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from distutils.core import setup
from distutils.core import setup
setup(
name='django-banking',
version='0.1-dev',
description='Banking (SWIFT) classes for Python/Django',
long_description=open('README').read(),
author='Benjamin P. Jung',
author_email='headcr4sh@gmail.com',
url='https://github.com/headcr4sh/dango-banking',
download_url='https://github.com/headcr4sh/django-banking/downloads/',
packages = ['django_banking', 'django_banking.models'],
license='BSD',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from distutils.core import setup
from distutils.core import setup
setup(
name='django-banking',
version='0.1-dev',
description='Banking (SWIFT) classes for Python/Django',
long_description=open('README').read(),
author='Benjamin P. Jung',
author_email='headcr4sh@gmail.com',
url='https://github.com/headcr4sh/dango-banking',
download_url='https://github.com/headcr4sh/django-banking/downloads/',
packages = ['django_banking',],
license='BSD',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
<commit_msg>Include 'models' subdirectory in package<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from distutils.core import setup
from distutils.core import setup
setup(
name='django-banking',
version='0.1-dev',
description='Banking (SWIFT) classes for Python/Django',
long_description=open('README').read(),
author='Benjamin P. Jung',
author_email='headcr4sh@gmail.com',
url='https://github.com/headcr4sh/dango-banking',
download_url='https://github.com/headcr4sh/django-banking/downloads/',
packages = ['django_banking', 'django_banking.models'],
license='BSD',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from distutils.core import setup
from distutils.core import setup
setup(
name='django-banking',
version='0.1-dev',
description='Banking (SWIFT) classes for Python/Django',
long_description=open('README').read(),
author='Benjamin P. Jung',
author_email='headcr4sh@gmail.com',
url='https://github.com/headcr4sh/dango-banking',
download_url='https://github.com/headcr4sh/django-banking/downloads/',
packages = ['django_banking',],
license='BSD',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
Include 'models' subdirectory in package#!/usr/bin/env python
# -*- coding: utf-8 -*-
from distutils.core import setup
from distutils.core import setup
setup(
name='django-banking',
version='0.1-dev',
description='Banking (SWIFT) classes for Python/Django',
long_description=open('README').read(),
author='Benjamin P. Jung',
author_email='headcr4sh@gmail.com',
url='https://github.com/headcr4sh/dango-banking',
download_url='https://github.com/headcr4sh/django-banking/downloads/',
packages = ['django_banking', 'django_banking.models'],
license='BSD',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from distutils.core import setup
from distutils.core import setup
setup(
name='django-banking',
version='0.1-dev',
description='Banking (SWIFT) classes for Python/Django',
long_description=open('README').read(),
author='Benjamin P. Jung',
author_email='headcr4sh@gmail.com',
url='https://github.com/headcr4sh/dango-banking',
download_url='https://github.com/headcr4sh/django-banking/downloads/',
packages = ['django_banking',],
license='BSD',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
<commit_msg>Include 'models' subdirectory in package<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from distutils.core import setup
from distutils.core import setup
setup(
name='django-banking',
version='0.1-dev',
description='Banking (SWIFT) classes for Python/Django',
long_description=open('README').read(),
author='Benjamin P. Jung',
author_email='headcr4sh@gmail.com',
url='https://github.com/headcr4sh/dango-banking',
download_url='https://github.com/headcr4sh/django-banking/downloads/',
packages = ['django_banking', 'django_banking.models'],
license='BSD',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
7375a9c8adbc14932af2638cf1067c379457da48 | setup.py | setup.py | """
nubo
----
An easy way to deploy Linux VMs on different cloud providers.
Links
`````
* `GitHub Repository <https://github.com/ema/nubo>`_
* `Development Version
<http://github.com/ema/nubo/zipball/master#egg=nubo-dev>`_
"""
from setuptools import setup
setup(
name='nubo',
version='0.4',
url='http://pythonhosted.org/nubo',
license='BSD',
author='Emanuele Rocca',
author_email='ema@linux.it',
description='Virtual Machine deployments on multiple cloud providers',
long_description=__doc__,
install_requires=[
'setuptools',
'apache-libcloud',
'paramiko',
'texttable'
],
packages=['nubo', 'nubo.clouds'],
scripts=['scripts/nubo'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Topic :: Internet',
'Topic :: System',
],
keywords='cloud vm startup devops ec2 rackspace',
)
| """
nubo
----
An easy way to deploy Linux VMs on different cloud providers.
Links
`````
* `GitHub Repository <https://github.com/ema/nubo>`_
* `Development Version
<http://github.com/ema/nubo/zipball/master#egg=nubo-dev>`_
"""
from setuptools import setup
install_requires = [
'setuptools',
'apache-libcloud',
'paramiko',
'texttable'
]
try:
import importlib
except ImportError:
install_requires.append('importlib')
setup(
name='nubo',
version='0.4',
url='http://pythonhosted.org/nubo',
license='BSD',
author='Emanuele Rocca',
author_email='ema@linux.it',
description='Virtual Machine deployments on multiple cloud providers',
long_description=__doc__,
install_requires=install_requires,
packages=['nubo', 'nubo.clouds'],
scripts=['scripts/nubo'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Topic :: Internet',
'Topic :: System',
],
keywords='cloud vm startup devops ec2 rackspace',
)
| Add importlib to install_requires if necessary | Add importlib to install_requires if necessary
| Python | bsd-3-clause | ema/nubo | """
nubo
----
An easy way to deploy Linux VMs on different cloud providers.
Links
`````
* `GitHub Repository <https://github.com/ema/nubo>`_
* `Development Version
<http://github.com/ema/nubo/zipball/master#egg=nubo-dev>`_
"""
from setuptools import setup
setup(
name='nubo',
version='0.4',
url='http://pythonhosted.org/nubo',
license='BSD',
author='Emanuele Rocca',
author_email='ema@linux.it',
description='Virtual Machine deployments on multiple cloud providers',
long_description=__doc__,
install_requires=[
'setuptools',
'apache-libcloud',
'paramiko',
'texttable'
],
packages=['nubo', 'nubo.clouds'],
scripts=['scripts/nubo'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Topic :: Internet',
'Topic :: System',
],
keywords='cloud vm startup devops ec2 rackspace',
)
Add importlib to install_requires if necessary | """
nubo
----
An easy way to deploy Linux VMs on different cloud providers.
Links
`````
* `GitHub Repository <https://github.com/ema/nubo>`_
* `Development Version
<http://github.com/ema/nubo/zipball/master#egg=nubo-dev>`_
"""
from setuptools import setup
install_requires = [
'setuptools',
'apache-libcloud',
'paramiko',
'texttable'
]
try:
import importlib
except ImportError:
install_requires.append('importlib')
setup(
name='nubo',
version='0.4',
url='http://pythonhosted.org/nubo',
license='BSD',
author='Emanuele Rocca',
author_email='ema@linux.it',
description='Virtual Machine deployments on multiple cloud providers',
long_description=__doc__,
install_requires=install_requires,
packages=['nubo', 'nubo.clouds'],
scripts=['scripts/nubo'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Topic :: Internet',
'Topic :: System',
],
keywords='cloud vm startup devops ec2 rackspace',
)
| <commit_before>"""
nubo
----
An easy way to deploy Linux VMs on different cloud providers.
Links
`````
* `GitHub Repository <https://github.com/ema/nubo>`_
* `Development Version
<http://github.com/ema/nubo/zipball/master#egg=nubo-dev>`_
"""
from setuptools import setup
setup(
name='nubo',
version='0.4',
url='http://pythonhosted.org/nubo',
license='BSD',
author='Emanuele Rocca',
author_email='ema@linux.it',
description='Virtual Machine deployments on multiple cloud providers',
long_description=__doc__,
install_requires=[
'setuptools',
'apache-libcloud',
'paramiko',
'texttable'
],
packages=['nubo', 'nubo.clouds'],
scripts=['scripts/nubo'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Topic :: Internet',
'Topic :: System',
],
keywords='cloud vm startup devops ec2 rackspace',
)
<commit_msg>Add importlib to install_requires if necessary<commit_after> | """
nubo
----
An easy way to deploy Linux VMs on different cloud providers.
Links
`````
* `GitHub Repository <https://github.com/ema/nubo>`_
* `Development Version
<http://github.com/ema/nubo/zipball/master#egg=nubo-dev>`_
"""
from setuptools import setup
install_requires = [
'setuptools',
'apache-libcloud',
'paramiko',
'texttable'
]
try:
import importlib
except ImportError:
install_requires.append('importlib')
setup(
name='nubo',
version='0.4',
url='http://pythonhosted.org/nubo',
license='BSD',
author='Emanuele Rocca',
author_email='ema@linux.it',
description='Virtual Machine deployments on multiple cloud providers',
long_description=__doc__,
install_requires=install_requires,
packages=['nubo', 'nubo.clouds'],
scripts=['scripts/nubo'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Topic :: Internet',
'Topic :: System',
],
keywords='cloud vm startup devops ec2 rackspace',
)
| """
nubo
----
An easy way to deploy Linux VMs on different cloud providers.
Links
`````
* `GitHub Repository <https://github.com/ema/nubo>`_
* `Development Version
<http://github.com/ema/nubo/zipball/master#egg=nubo-dev>`_
"""
from setuptools import setup
setup(
name='nubo',
version='0.4',
url='http://pythonhosted.org/nubo',
license='BSD',
author='Emanuele Rocca',
author_email='ema@linux.it',
description='Virtual Machine deployments on multiple cloud providers',
long_description=__doc__,
install_requires=[
'setuptools',
'apache-libcloud',
'paramiko',
'texttable'
],
packages=['nubo', 'nubo.clouds'],
scripts=['scripts/nubo'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Topic :: Internet',
'Topic :: System',
],
keywords='cloud vm startup devops ec2 rackspace',
)
Add importlib to install_requires if necessary"""
nubo
----
An easy way to deploy Linux VMs on different cloud providers.
Links
`````
* `GitHub Repository <https://github.com/ema/nubo>`_
* `Development Version
<http://github.com/ema/nubo/zipball/master#egg=nubo-dev>`_
"""
from setuptools import setup
install_requires = [
'setuptools',
'apache-libcloud',
'paramiko',
'texttable'
]
try:
import importlib
except ImportError:
install_requires.append('importlib')
setup(
name='nubo',
version='0.4',
url='http://pythonhosted.org/nubo',
license='BSD',
author='Emanuele Rocca',
author_email='ema@linux.it',
description='Virtual Machine deployments on multiple cloud providers',
long_description=__doc__,
install_requires=install_requires,
packages=['nubo', 'nubo.clouds'],
scripts=['scripts/nubo'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Topic :: Internet',
'Topic :: System',
],
keywords='cloud vm startup devops ec2 rackspace',
)
| <commit_before>"""
nubo
----
An easy way to deploy Linux VMs on different cloud providers.
Links
`````
* `GitHub Repository <https://github.com/ema/nubo>`_
* `Development Version
<http://github.com/ema/nubo/zipball/master#egg=nubo-dev>`_
"""
from setuptools import setup
setup(
name='nubo',
version='0.4',
url='http://pythonhosted.org/nubo',
license='BSD',
author='Emanuele Rocca',
author_email='ema@linux.it',
description='Virtual Machine deployments on multiple cloud providers',
long_description=__doc__,
install_requires=[
'setuptools',
'apache-libcloud',
'paramiko',
'texttable'
],
packages=['nubo', 'nubo.clouds'],
scripts=['scripts/nubo'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Topic :: Internet',
'Topic :: System',
],
keywords='cloud vm startup devops ec2 rackspace',
)
<commit_msg>Add importlib to install_requires if necessary<commit_after>"""
nubo
----
An easy way to deploy Linux VMs on different cloud providers.
Links
`````
* `GitHub Repository <https://github.com/ema/nubo>`_
* `Development Version
<http://github.com/ema/nubo/zipball/master#egg=nubo-dev>`_
"""
from setuptools import setup
install_requires = [
'setuptools',
'apache-libcloud',
'paramiko',
'texttable'
]
try:
import importlib
except ImportError:
install_requires.append('importlib')
setup(
name='nubo',
version='0.4',
url='http://pythonhosted.org/nubo',
license='BSD',
author='Emanuele Rocca',
author_email='ema@linux.it',
description='Virtual Machine deployments on multiple cloud providers',
long_description=__doc__,
install_requires=install_requires,
packages=['nubo', 'nubo.clouds'],
scripts=['scripts/nubo'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Topic :: Internet',
'Topic :: System',
],
keywords='cloud vm startup devops ec2 rackspace',
)
|
d237dd2c68ed083d65d69b31d0a1905262a9edca | setup.py | setup.py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
from setuptools import setup, Extension
import os
from Cython.Build import cythonize
if (os.name == "nt"):
compile_args = ['/EHs', '/D_CRT_SECURE_NO_DEPRECATE']
else:
compile_args = ['-Wno-switch-enum', '-Wno-switch', '-Wno-switch-default',
'-Wno-deprecated', '-Wno-parentheses']
extensions = [Extension("*", ["Chandra/Time/_axTime3.pyx"],
extra_compile_args=compile_args)]
try:
from testr.setup_helper import cmdclass
except ImportError:
cmdclass = {}
setup(name='Chandra.Time',
author='Tom Aldcroft',
description='Convert between various time formats relevant to Chandra',
author_email='taldcroft@cfa.harvard.edu',
use_scm_version=True,
setup_requires=['setuptools_scm', 'setuptools_scm_git_archive'],
zip_safe=False,
packages=['Chandra', 'Chandra.Time', 'Chandra.Time.tests'],
ext_modules=cythonize(extensions),
tests_require=['pytest'],
cmdclass=cmdclass,
)
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
from setuptools import setup, Extension
import platform
from Cython.Build import cythonize
os_name = platform.system()
if (os_name == "Windows"):
compile_args = ['/EHs', '/D_CRT_SECURE_NO_DEPRECATE']
else:
compile_args = ['-Wno-switch-enum', '-Wno-switch', '-Wno-switch-default',
'-Wno-deprecated', '-Wno-parentheses']
if os_name == 'Darwin':
compile_args += ['-stdlib=libc++']
extensions = [Extension("*", ["Chandra/Time/_axTime3.pyx"],
extra_compile_args=compile_args)]
try:
from testr.setup_helper import cmdclass
except ImportError:
cmdclass = {}
setup(name='Chandra.Time',
author='Tom Aldcroft',
description='Convert between various time formats relevant to Chandra',
author_email='taldcroft@cfa.harvard.edu',
use_scm_version=True,
setup_requires=['setuptools_scm', 'setuptools_scm_git_archive'],
zip_safe=False,
packages=['Chandra', 'Chandra.Time', 'Chandra.Time.tests'],
ext_modules=cythonize(extensions),
tests_require=['pytest'],
cmdclass=cmdclass,
)
| Add compile arg for building on MacOS Catalina / Xcode 11.2 | Add compile arg for building on MacOS Catalina / Xcode 11.2
| Python | bsd-3-clause | sot/Chandra.Time,sot/Chandra.Time,sot/Chandra.Time | # Licensed under a 3-clause BSD style license - see LICENSE.rst
from setuptools import setup, Extension
import os
from Cython.Build import cythonize
if (os.name == "nt"):
compile_args = ['/EHs', '/D_CRT_SECURE_NO_DEPRECATE']
else:
compile_args = ['-Wno-switch-enum', '-Wno-switch', '-Wno-switch-default',
'-Wno-deprecated', '-Wno-parentheses']
extensions = [Extension("*", ["Chandra/Time/_axTime3.pyx"],
extra_compile_args=compile_args)]
try:
from testr.setup_helper import cmdclass
except ImportError:
cmdclass = {}
setup(name='Chandra.Time',
author='Tom Aldcroft',
description='Convert between various time formats relevant to Chandra',
author_email='taldcroft@cfa.harvard.edu',
use_scm_version=True,
setup_requires=['setuptools_scm', 'setuptools_scm_git_archive'],
zip_safe=False,
packages=['Chandra', 'Chandra.Time', 'Chandra.Time.tests'],
ext_modules=cythonize(extensions),
tests_require=['pytest'],
cmdclass=cmdclass,
)
Add compile arg for building on MacOS Catalina / Xcode 11.2 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
from setuptools import setup, Extension
import platform
from Cython.Build import cythonize
os_name = platform.system()
if (os_name == "Windows"):
compile_args = ['/EHs', '/D_CRT_SECURE_NO_DEPRECATE']
else:
compile_args = ['-Wno-switch-enum', '-Wno-switch', '-Wno-switch-default',
'-Wno-deprecated', '-Wno-parentheses']
if os_name == 'Darwin':
compile_args += ['-stdlib=libc++']
extensions = [Extension("*", ["Chandra/Time/_axTime3.pyx"],
extra_compile_args=compile_args)]
try:
from testr.setup_helper import cmdclass
except ImportError:
cmdclass = {}
setup(name='Chandra.Time',
author='Tom Aldcroft',
description='Convert between various time formats relevant to Chandra',
author_email='taldcroft@cfa.harvard.edu',
use_scm_version=True,
setup_requires=['setuptools_scm', 'setuptools_scm_git_archive'],
zip_safe=False,
packages=['Chandra', 'Chandra.Time', 'Chandra.Time.tests'],
ext_modules=cythonize(extensions),
tests_require=['pytest'],
cmdclass=cmdclass,
)
| <commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
from setuptools import setup, Extension
import os
from Cython.Build import cythonize
if (os.name == "nt"):
compile_args = ['/EHs', '/D_CRT_SECURE_NO_DEPRECATE']
else:
compile_args = ['-Wno-switch-enum', '-Wno-switch', '-Wno-switch-default',
'-Wno-deprecated', '-Wno-parentheses']
extensions = [Extension("*", ["Chandra/Time/_axTime3.pyx"],
extra_compile_args=compile_args)]
try:
from testr.setup_helper import cmdclass
except ImportError:
cmdclass = {}
setup(name='Chandra.Time',
author='Tom Aldcroft',
description='Convert between various time formats relevant to Chandra',
author_email='taldcroft@cfa.harvard.edu',
use_scm_version=True,
setup_requires=['setuptools_scm', 'setuptools_scm_git_archive'],
zip_safe=False,
packages=['Chandra', 'Chandra.Time', 'Chandra.Time.tests'],
ext_modules=cythonize(extensions),
tests_require=['pytest'],
cmdclass=cmdclass,
)
<commit_msg>Add compile arg for building on MacOS Catalina / Xcode 11.2<commit_after> | # Licensed under a 3-clause BSD style license - see LICENSE.rst
from setuptools import setup, Extension
import platform
from Cython.Build import cythonize
os_name = platform.system()
if (os_name == "Windows"):
compile_args = ['/EHs', '/D_CRT_SECURE_NO_DEPRECATE']
else:
compile_args = ['-Wno-switch-enum', '-Wno-switch', '-Wno-switch-default',
'-Wno-deprecated', '-Wno-parentheses']
if os_name == 'Darwin':
compile_args += ['-stdlib=libc++']
extensions = [Extension("*", ["Chandra/Time/_axTime3.pyx"],
extra_compile_args=compile_args)]
try:
from testr.setup_helper import cmdclass
except ImportError:
cmdclass = {}
setup(name='Chandra.Time',
author='Tom Aldcroft',
description='Convert between various time formats relevant to Chandra',
author_email='taldcroft@cfa.harvard.edu',
use_scm_version=True,
setup_requires=['setuptools_scm', 'setuptools_scm_git_archive'],
zip_safe=False,
packages=['Chandra', 'Chandra.Time', 'Chandra.Time.tests'],
ext_modules=cythonize(extensions),
tests_require=['pytest'],
cmdclass=cmdclass,
)
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
from setuptools import setup, Extension
import os
from Cython.Build import cythonize
if (os.name == "nt"):
compile_args = ['/EHs', '/D_CRT_SECURE_NO_DEPRECATE']
else:
compile_args = ['-Wno-switch-enum', '-Wno-switch', '-Wno-switch-default',
'-Wno-deprecated', '-Wno-parentheses']
extensions = [Extension("*", ["Chandra/Time/_axTime3.pyx"],
extra_compile_args=compile_args)]
try:
from testr.setup_helper import cmdclass
except ImportError:
cmdclass = {}
setup(name='Chandra.Time',
author='Tom Aldcroft',
description='Convert between various time formats relevant to Chandra',
author_email='taldcroft@cfa.harvard.edu',
use_scm_version=True,
setup_requires=['setuptools_scm', 'setuptools_scm_git_archive'],
zip_safe=False,
packages=['Chandra', 'Chandra.Time', 'Chandra.Time.tests'],
ext_modules=cythonize(extensions),
tests_require=['pytest'],
cmdclass=cmdclass,
)
Add compile arg for building on MacOS Catalina / Xcode 11.2# Licensed under a 3-clause BSD style license - see LICENSE.rst
from setuptools import setup, Extension
import platform
from Cython.Build import cythonize
os_name = platform.system()
if (os_name == "Windows"):
compile_args = ['/EHs', '/D_CRT_SECURE_NO_DEPRECATE']
else:
compile_args = ['-Wno-switch-enum', '-Wno-switch', '-Wno-switch-default',
'-Wno-deprecated', '-Wno-parentheses']
if os_name == 'Darwin':
compile_args += ['-stdlib=libc++']
extensions = [Extension("*", ["Chandra/Time/_axTime3.pyx"],
extra_compile_args=compile_args)]
try:
from testr.setup_helper import cmdclass
except ImportError:
cmdclass = {}
setup(name='Chandra.Time',
author='Tom Aldcroft',
description='Convert between various time formats relevant to Chandra',
author_email='taldcroft@cfa.harvard.edu',
use_scm_version=True,
setup_requires=['setuptools_scm', 'setuptools_scm_git_archive'],
zip_safe=False,
packages=['Chandra', 'Chandra.Time', 'Chandra.Time.tests'],
ext_modules=cythonize(extensions),
tests_require=['pytest'],
cmdclass=cmdclass,
)
| <commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
from setuptools import setup, Extension
import os
from Cython.Build import cythonize
if (os.name == "nt"):
compile_args = ['/EHs', '/D_CRT_SECURE_NO_DEPRECATE']
else:
compile_args = ['-Wno-switch-enum', '-Wno-switch', '-Wno-switch-default',
'-Wno-deprecated', '-Wno-parentheses']
extensions = [Extension("*", ["Chandra/Time/_axTime3.pyx"],
extra_compile_args=compile_args)]
try:
from testr.setup_helper import cmdclass
except ImportError:
cmdclass = {}
setup(name='Chandra.Time',
author='Tom Aldcroft',
description='Convert between various time formats relevant to Chandra',
author_email='taldcroft@cfa.harvard.edu',
use_scm_version=True,
setup_requires=['setuptools_scm', 'setuptools_scm_git_archive'],
zip_safe=False,
packages=['Chandra', 'Chandra.Time', 'Chandra.Time.tests'],
ext_modules=cythonize(extensions),
tests_require=['pytest'],
cmdclass=cmdclass,
)
<commit_msg>Add compile arg for building on MacOS Catalina / Xcode 11.2<commit_after># Licensed under a 3-clause BSD style license - see LICENSE.rst
from setuptools import setup, Extension
import platform
from Cython.Build import cythonize
os_name = platform.system()
if (os_name == "Windows"):
compile_args = ['/EHs', '/D_CRT_SECURE_NO_DEPRECATE']
else:
compile_args = ['-Wno-switch-enum', '-Wno-switch', '-Wno-switch-default',
'-Wno-deprecated', '-Wno-parentheses']
if os_name == 'Darwin':
compile_args += ['-stdlib=libc++']
extensions = [Extension("*", ["Chandra/Time/_axTime3.pyx"],
extra_compile_args=compile_args)]
try:
from testr.setup_helper import cmdclass
except ImportError:
cmdclass = {}
setup(name='Chandra.Time',
author='Tom Aldcroft',
description='Convert between various time formats relevant to Chandra',
author_email='taldcroft@cfa.harvard.edu',
use_scm_version=True,
setup_requires=['setuptools_scm', 'setuptools_scm_git_archive'],
zip_safe=False,
packages=['Chandra', 'Chandra.Time', 'Chandra.Time.tests'],
ext_modules=cythonize(extensions),
tests_require=['pytest'],
cmdclass=cmdclass,
)
|
0ae34253829e0d51049edf5f7d270b404bc22354 | setup.py | setup.py | import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
setup(
name="django-nopassword",
version='1.1.0',
url='http://github.com/relekang/django-nopassword',
author='Rolf Erik Lekang',
author_email='me@rolflekang.com',
description='Authentication backend for django that uses '
'email verification instead of passwords',
packages=find_packages(exclude='tests'),
tests_require=[
'django>=1.4',
'twilio>=3.6.8',
'mock>=1.0'
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
| import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
setup(
name="django-nopassword",
version='1.1.0',
url='http://github.com/relekang/django-nopassword',
author='Rolf Erik Lekang',
author_email='me@rolflekang.com',
description='Authentication backend for django that uses a one time code instead of passwords',
packages=find_packages(exclude='tests'),
tests_require=[
'django>=1.4',
'twilio>=3.6.8',
'mock>=1.0'
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
| Remove reference to email from short description | Remove reference to email from short description
| Python | mit | relekang/django-nopassword,smajda/django-nopassword,relekang/django-nopassword,mjumbewu/django-nopassword,smajda/django-nopassword,mjumbewu/django-nopassword | import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
setup(
name="django-nopassword",
version='1.1.0',
url='http://github.com/relekang/django-nopassword',
author='Rolf Erik Lekang',
author_email='me@rolflekang.com',
description='Authentication backend for django that uses '
'email verification instead of passwords',
packages=find_packages(exclude='tests'),
tests_require=[
'django>=1.4',
'twilio>=3.6.8',
'mock>=1.0'
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
Remove reference to email from short description | import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
setup(
name="django-nopassword",
version='1.1.0',
url='http://github.com/relekang/django-nopassword',
author='Rolf Erik Lekang',
author_email='me@rolflekang.com',
description='Authentication backend for django that uses a one time code instead of passwords',
packages=find_packages(exclude='tests'),
tests_require=[
'django>=1.4',
'twilio>=3.6.8',
'mock>=1.0'
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
| <commit_before>import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
setup(
name="django-nopassword",
version='1.1.0',
url='http://github.com/relekang/django-nopassword',
author='Rolf Erik Lekang',
author_email='me@rolflekang.com',
description='Authentication backend for django that uses '
'email verification instead of passwords',
packages=find_packages(exclude='tests'),
tests_require=[
'django>=1.4',
'twilio>=3.6.8',
'mock>=1.0'
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
<commit_msg>Remove reference to email from short description<commit_after> | import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
setup(
name="django-nopassword",
version='1.1.0',
url='http://github.com/relekang/django-nopassword',
author='Rolf Erik Lekang',
author_email='me@rolflekang.com',
description='Authentication backend for django that uses a one time code instead of passwords',
packages=find_packages(exclude='tests'),
tests_require=[
'django>=1.4',
'twilio>=3.6.8',
'mock>=1.0'
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
| import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
setup(
name="django-nopassword",
version='1.1.0',
url='http://github.com/relekang/django-nopassword',
author='Rolf Erik Lekang',
author_email='me@rolflekang.com',
description='Authentication backend for django that uses '
'email verification instead of passwords',
packages=find_packages(exclude='tests'),
tests_require=[
'django>=1.4',
'twilio>=3.6.8',
'mock>=1.0'
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
Remove reference to email from short descriptionimport os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
setup(
name="django-nopassword",
version='1.1.0',
url='http://github.com/relekang/django-nopassword',
author='Rolf Erik Lekang',
author_email='me@rolflekang.com',
description='Authentication backend for django that uses a one time code instead of passwords',
packages=find_packages(exclude='tests'),
tests_require=[
'django>=1.4',
'twilio>=3.6.8',
'mock>=1.0'
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
| <commit_before>import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
setup(
name="django-nopassword",
version='1.1.0',
url='http://github.com/relekang/django-nopassword',
author='Rolf Erik Lekang',
author_email='me@rolflekang.com',
description='Authentication backend for django that uses '
'email verification instead of passwords',
packages=find_packages(exclude='tests'),
tests_require=[
'django>=1.4',
'twilio>=3.6.8',
'mock>=1.0'
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
<commit_msg>Remove reference to email from short description<commit_after>import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
setup(
name="django-nopassword",
version='1.1.0',
url='http://github.com/relekang/django-nopassword',
author='Rolf Erik Lekang',
author_email='me@rolflekang.com',
description='Authentication backend for django that uses a one time code instead of passwords',
packages=find_packages(exclude='tests'),
tests_require=[
'django>=1.4',
'twilio>=3.6.8',
'mock>=1.0'
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
|
78a596ba34a3a8a7435dd6ca997e6b6cb79fbdd6 | setup.py | setup.py | #!/usr/bin/env python2.7
from __future__ import print_function
from distutils.core import setup
import os
version = '1.0.0b'
# Append TeamCity build number if it gives us one.
if 'TC_BUILD_NUMBER' in os.environ and version.endswith('b'):
version += '' + os.environ['TC_BUILD_NUMBER']
setup(name='fetch',
maintainer='Jeremy Hooke',
maintainer_email='jeremy.hooke@ga.gov.au',
version=version,
description='Automatic retrieval of ancillary and data',
packages=[
'fetch',
],
scripts=[
'bin/fetch-service'
],
requires=[
'neocommon',
'requests',
'feedparser',
'lxml',
'setproctitle',
'pyyaml',
'arrow'
]
)
| #!/usr/bin/env python2.7
from __future__ import print_function
from distutils.core import setup
import os
version = '1.0.0b'
# Append TeamCity build number if it gives us one.
if 'TC_BUILD_NUMBER' in os.environ and version.endswith('b'):
version += '' + os.environ['TC_BUILD_NUMBER']
setup(name='fetch',
maintainer='Jeremy Hooke',
maintainer_email='jeremy.hooke@ga.gov.au',
version=version,
description='Automatic retrieval of ancillary and data',
packages=[
'fetch',
],
scripts=[
'bin/fetch-service'
],
requires=[
'arrow',
'croniter',
'feedparser',
'lxml',
'neocommon',
'pyyaml',
'requests',
'setproctitle',
]
)
| Add croniter dependency. Sort deps. | Add croniter dependency. Sort deps.
| Python | apache-2.0 | GeoscienceAustralia/fetch,GeoscienceAustralia/fetch | #!/usr/bin/env python2.7
from __future__ import print_function
from distutils.core import setup
import os
version = '1.0.0b'
# Append TeamCity build number if it gives us one.
if 'TC_BUILD_NUMBER' in os.environ and version.endswith('b'):
version += '' + os.environ['TC_BUILD_NUMBER']
setup(name='fetch',
maintainer='Jeremy Hooke',
maintainer_email='jeremy.hooke@ga.gov.au',
version=version,
description='Automatic retrieval of ancillary and data',
packages=[
'fetch',
],
scripts=[
'bin/fetch-service'
],
requires=[
'neocommon',
'requests',
'feedparser',
'lxml',
'setproctitle',
'pyyaml',
'arrow'
]
)
Add croniter dependency. Sort deps. | #!/usr/bin/env python2.7
from __future__ import print_function
from distutils.core import setup
import os
version = '1.0.0b'
# Append TeamCity build number if it gives us one.
if 'TC_BUILD_NUMBER' in os.environ and version.endswith('b'):
version += '' + os.environ['TC_BUILD_NUMBER']
setup(name='fetch',
maintainer='Jeremy Hooke',
maintainer_email='jeremy.hooke@ga.gov.au',
version=version,
description='Automatic retrieval of ancillary and data',
packages=[
'fetch',
],
scripts=[
'bin/fetch-service'
],
requires=[
'arrow',
'croniter',
'feedparser',
'lxml',
'neocommon',
'pyyaml',
'requests',
'setproctitle',
]
)
| <commit_before>#!/usr/bin/env python2.7
from __future__ import print_function
from distutils.core import setup
import os
version = '1.0.0b'
# Append TeamCity build number if it gives us one.
if 'TC_BUILD_NUMBER' in os.environ and version.endswith('b'):
version += '' + os.environ['TC_BUILD_NUMBER']
setup(name='fetch',
maintainer='Jeremy Hooke',
maintainer_email='jeremy.hooke@ga.gov.au',
version=version,
description='Automatic retrieval of ancillary and data',
packages=[
'fetch',
],
scripts=[
'bin/fetch-service'
],
requires=[
'neocommon',
'requests',
'feedparser',
'lxml',
'setproctitle',
'pyyaml',
'arrow'
]
)
<commit_msg>Add croniter dependency. Sort deps.<commit_after> | #!/usr/bin/env python2.7
from __future__ import print_function
from distutils.core import setup
import os
version = '1.0.0b'
# Append TeamCity build number if it gives us one.
if 'TC_BUILD_NUMBER' in os.environ and version.endswith('b'):
version += '' + os.environ['TC_BUILD_NUMBER']
setup(name='fetch',
maintainer='Jeremy Hooke',
maintainer_email='jeremy.hooke@ga.gov.au',
version=version,
description='Automatic retrieval of ancillary and data',
packages=[
'fetch',
],
scripts=[
'bin/fetch-service'
],
requires=[
'arrow',
'croniter',
'feedparser',
'lxml',
'neocommon',
'pyyaml',
'requests',
'setproctitle',
]
)
| #!/usr/bin/env python2.7
from __future__ import print_function
from distutils.core import setup
import os
version = '1.0.0b'
# Append TeamCity build number if it gives us one.
if 'TC_BUILD_NUMBER' in os.environ and version.endswith('b'):
version += '' + os.environ['TC_BUILD_NUMBER']
setup(name='fetch',
maintainer='Jeremy Hooke',
maintainer_email='jeremy.hooke@ga.gov.au',
version=version,
description='Automatic retrieval of ancillary and data',
packages=[
'fetch',
],
scripts=[
'bin/fetch-service'
],
requires=[
'neocommon',
'requests',
'feedparser',
'lxml',
'setproctitle',
'pyyaml',
'arrow'
]
)
Add croniter dependency. Sort deps.#!/usr/bin/env python2.7
from __future__ import print_function
from distutils.core import setup
import os
version = '1.0.0b'
# Append TeamCity build number if it gives us one.
if 'TC_BUILD_NUMBER' in os.environ and version.endswith('b'):
version += '' + os.environ['TC_BUILD_NUMBER']
setup(name='fetch',
maintainer='Jeremy Hooke',
maintainer_email='jeremy.hooke@ga.gov.au',
version=version,
description='Automatic retrieval of ancillary and data',
packages=[
'fetch',
],
scripts=[
'bin/fetch-service'
],
requires=[
'arrow',
'croniter',
'feedparser',
'lxml',
'neocommon',
'pyyaml',
'requests',
'setproctitle',
]
)
| <commit_before>#!/usr/bin/env python2.7
from __future__ import print_function
from distutils.core import setup
import os
version = '1.0.0b'
# Append TeamCity build number if it gives us one.
if 'TC_BUILD_NUMBER' in os.environ and version.endswith('b'):
version += '' + os.environ['TC_BUILD_NUMBER']
setup(name='fetch',
maintainer='Jeremy Hooke',
maintainer_email='jeremy.hooke@ga.gov.au',
version=version,
description='Automatic retrieval of ancillary and data',
packages=[
'fetch',
],
scripts=[
'bin/fetch-service'
],
requires=[
'neocommon',
'requests',
'feedparser',
'lxml',
'setproctitle',
'pyyaml',
'arrow'
]
)
<commit_msg>Add croniter dependency. Sort deps.<commit_after>#!/usr/bin/env python2.7
from __future__ import print_function
from distutils.core import setup
import os
version = '1.0.0b'
# Append TeamCity build number if it gives us one.
if 'TC_BUILD_NUMBER' in os.environ and version.endswith('b'):
version += '' + os.environ['TC_BUILD_NUMBER']
setup(name='fetch',
maintainer='Jeremy Hooke',
maintainer_email='jeremy.hooke@ga.gov.au',
version=version,
description='Automatic retrieval of ancillary and data',
packages=[
'fetch',
],
scripts=[
'bin/fetch-service'
],
requires=[
'arrow',
'croniter',
'feedparser',
'lxml',
'neocommon',
'pyyaml',
'requests',
'setproctitle',
]
)
|
ee4bda5802a601485027a3ea91607dc5077ca73d | setup.py | setup.py | import os
import sys
from setuptools import setup, find_packages
from tethys_apps.app_installation import custom_develop_command, custom_install_command
### Apps Definition ###
app_package = 'canned_gssha'
release_package = 'tethysapp-' + app_package
app_class = 'canned_gssha.app:CannedGSSHA'
app_package_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'tethysapp', app_package)
### Python Dependencies ###
dependencies = ['django',]
setup(
name=release_package,
version='0.0.1',
description='Access GSSHA model results that have been put away for a rainy day.',
long_description='',
keywords='',
author='Nathan Swain, Herman Dolder',
author_email='nathan.swain@byu.net',
url='tethys.ci-water.org',
license='BSD 2-Clause',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=['tethysapp', 'tethysapp.' + app_package],
include_package_data=True,
zip_safe=False,
install_requires=dependencies,
cmdclass={
'install': custom_install_command(app_package, app_package_dir, dependencies),
'develop': custom_develop_command(app_package, app_package_dir, dependencies)
}
)
| import os
import sys
from setuptools import setup, find_packages
from tethys_apps.app_installation import custom_develop_command, custom_install_command
### Apps Definition ###
app_package = 'canned_gssha'
release_package = 'tethysapp-' + app_package
app_class = 'canned_gssha.app:CannedGSSHA'
app_package_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'tethysapp', app_package)
### Python Dependencies ###
dependencies = ['django',]
setup(
name=release_package,
version='0.1.0',
description='Access GSSHA model results that have been put away for a rainy day.',
long_description='',
keywords='',
author='Nathan Swain, Herman Dolder',
author_email='nathan.swain@byu.net',
url='tethys.ci-water.org',
license='BSD 2-Clause',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=['tethysapp', 'tethysapp.' + app_package],
include_package_data=True,
zip_safe=False,
install_requires=dependencies,
cmdclass={
'install': custom_install_command(app_package, app_package_dir, dependencies),
'develop': custom_develop_command(app_package, app_package_dir, dependencies)
}
)
| Set maximum bounds for both plots to provide a common scale for comparison between the different scenarios. | Set maximum bounds for both plots to provide a common scale for comparison between the different scenarios.
| Python | bsd-2-clause | CI-WATER/tethysapp-canned_gssha,CI-WATER/tethysapp-canned_gssha,CI-WATER/tethysapp-canned_gssha | import os
import sys
from setuptools import setup, find_packages
from tethys_apps.app_installation import custom_develop_command, custom_install_command
### Apps Definition ###
app_package = 'canned_gssha'
release_package = 'tethysapp-' + app_package
app_class = 'canned_gssha.app:CannedGSSHA'
app_package_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'tethysapp', app_package)
### Python Dependencies ###
dependencies = ['django',]
setup(
name=release_package,
version='0.0.1',
description='Access GSSHA model results that have been put away for a rainy day.',
long_description='',
keywords='',
author='Nathan Swain, Herman Dolder',
author_email='nathan.swain@byu.net',
url='tethys.ci-water.org',
license='BSD 2-Clause',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=['tethysapp', 'tethysapp.' + app_package],
include_package_data=True,
zip_safe=False,
install_requires=dependencies,
cmdclass={
'install': custom_install_command(app_package, app_package_dir, dependencies),
'develop': custom_develop_command(app_package, app_package_dir, dependencies)
}
)
Set maximum bounds for both plots to provide a common scale for comparison between the different scenarios. | import os
import sys
from setuptools import setup, find_packages
from tethys_apps.app_installation import custom_develop_command, custom_install_command
### Apps Definition ###
app_package = 'canned_gssha'
release_package = 'tethysapp-' + app_package
app_class = 'canned_gssha.app:CannedGSSHA'
app_package_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'tethysapp', app_package)
### Python Dependencies ###
dependencies = ['django',]
setup(
name=release_package,
version='0.1.0',
description='Access GSSHA model results that have been put away for a rainy day.',
long_description='',
keywords='',
author='Nathan Swain, Herman Dolder',
author_email='nathan.swain@byu.net',
url='tethys.ci-water.org',
license='BSD 2-Clause',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=['tethysapp', 'tethysapp.' + app_package],
include_package_data=True,
zip_safe=False,
install_requires=dependencies,
cmdclass={
'install': custom_install_command(app_package, app_package_dir, dependencies),
'develop': custom_develop_command(app_package, app_package_dir, dependencies)
}
)
| <commit_before>import os
import sys
from setuptools import setup, find_packages
from tethys_apps.app_installation import custom_develop_command, custom_install_command
### Apps Definition ###
app_package = 'canned_gssha'
release_package = 'tethysapp-' + app_package
app_class = 'canned_gssha.app:CannedGSSHA'
app_package_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'tethysapp', app_package)
### Python Dependencies ###
dependencies = ['django',]
setup(
name=release_package,
version='0.0.1',
description='Access GSSHA model results that have been put away for a rainy day.',
long_description='',
keywords='',
author='Nathan Swain, Herman Dolder',
author_email='nathan.swain@byu.net',
url='tethys.ci-water.org',
license='BSD 2-Clause',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=['tethysapp', 'tethysapp.' + app_package],
include_package_data=True,
zip_safe=False,
install_requires=dependencies,
cmdclass={
'install': custom_install_command(app_package, app_package_dir, dependencies),
'develop': custom_develop_command(app_package, app_package_dir, dependencies)
}
)
<commit_msg>Set maximum bounds for both plots to provide a common scale for comparison between the different scenarios.<commit_after> | import os
import sys
from setuptools import setup, find_packages
from tethys_apps.app_installation import custom_develop_command, custom_install_command
### Apps Definition ###
app_package = 'canned_gssha'
release_package = 'tethysapp-' + app_package
app_class = 'canned_gssha.app:CannedGSSHA'
app_package_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'tethysapp', app_package)
### Python Dependencies ###
dependencies = ['django',]
setup(
name=release_package,
version='0.1.0',
description='Access GSSHA model results that have been put away for a rainy day.',
long_description='',
keywords='',
author='Nathan Swain, Herman Dolder',
author_email='nathan.swain@byu.net',
url='tethys.ci-water.org',
license='BSD 2-Clause',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=['tethysapp', 'tethysapp.' + app_package],
include_package_data=True,
zip_safe=False,
install_requires=dependencies,
cmdclass={
'install': custom_install_command(app_package, app_package_dir, dependencies),
'develop': custom_develop_command(app_package, app_package_dir, dependencies)
}
)
| import os
import sys
from setuptools import setup, find_packages
from tethys_apps.app_installation import custom_develop_command, custom_install_command
### Apps Definition ###
app_package = 'canned_gssha'
release_package = 'tethysapp-' + app_package
app_class = 'canned_gssha.app:CannedGSSHA'
app_package_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'tethysapp', app_package)
### Python Dependencies ###
dependencies = ['django',]
setup(
name=release_package,
version='0.0.1',
description='Access GSSHA model results that have been put away for a rainy day.',
long_description='',
keywords='',
author='Nathan Swain, Herman Dolder',
author_email='nathan.swain@byu.net',
url='tethys.ci-water.org',
license='BSD 2-Clause',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=['tethysapp', 'tethysapp.' + app_package],
include_package_data=True,
zip_safe=False,
install_requires=dependencies,
cmdclass={
'install': custom_install_command(app_package, app_package_dir, dependencies),
'develop': custom_develop_command(app_package, app_package_dir, dependencies)
}
)
Set maximum bounds for both plots to provide a common scale for comparison between the different scenarios.import os
import sys
from setuptools import setup, find_packages
from tethys_apps.app_installation import custom_develop_command, custom_install_command
### Apps Definition ###
app_package = 'canned_gssha'
release_package = 'tethysapp-' + app_package
app_class = 'canned_gssha.app:CannedGSSHA'
app_package_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'tethysapp', app_package)
### Python Dependencies ###
dependencies = ['django',]
setup(
name=release_package,
version='0.1.0',
description='Access GSSHA model results that have been put away for a rainy day.',
long_description='',
keywords='',
author='Nathan Swain, Herman Dolder',
author_email='nathan.swain@byu.net',
url='tethys.ci-water.org',
license='BSD 2-Clause',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=['tethysapp', 'tethysapp.' + app_package],
include_package_data=True,
zip_safe=False,
install_requires=dependencies,
cmdclass={
'install': custom_install_command(app_package, app_package_dir, dependencies),
'develop': custom_develop_command(app_package, app_package_dir, dependencies)
}
)
| <commit_before>import os
import sys
from setuptools import setup, find_packages
from tethys_apps.app_installation import custom_develop_command, custom_install_command
### Apps Definition ###
app_package = 'canned_gssha'
release_package = 'tethysapp-' + app_package
app_class = 'canned_gssha.app:CannedGSSHA'
app_package_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'tethysapp', app_package)
### Python Dependencies ###
dependencies = ['django',]
setup(
name=release_package,
version='0.0.1',
description='Access GSSHA model results that have been put away for a rainy day.',
long_description='',
keywords='',
author='Nathan Swain, Herman Dolder',
author_email='nathan.swain@byu.net',
url='tethys.ci-water.org',
license='BSD 2-Clause',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=['tethysapp', 'tethysapp.' + app_package],
include_package_data=True,
zip_safe=False,
install_requires=dependencies,
cmdclass={
'install': custom_install_command(app_package, app_package_dir, dependencies),
'develop': custom_develop_command(app_package, app_package_dir, dependencies)
}
)
<commit_msg>Set maximum bounds for both plots to provide a common scale for comparison between the different scenarios.<commit_after>import os
import sys
from setuptools import setup, find_packages
from tethys_apps.app_installation import custom_develop_command, custom_install_command
### Apps Definition ###
app_package = 'canned_gssha'
release_package = 'tethysapp-' + app_package
app_class = 'canned_gssha.app:CannedGSSHA'
app_package_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'tethysapp', app_package)
### Python Dependencies ###
dependencies = ['django',]
setup(
name=release_package,
version='0.1.0',
description='Access GSSHA model results that have been put away for a rainy day.',
long_description='',
keywords='',
author='Nathan Swain, Herman Dolder',
author_email='nathan.swain@byu.net',
url='tethys.ci-water.org',
license='BSD 2-Clause',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=['tethysapp', 'tethysapp.' + app_package],
include_package_data=True,
zip_safe=False,
install_requires=dependencies,
cmdclass={
'install': custom_install_command(app_package, app_package_dir, dependencies),
'develop': custom_develop_command(app_package, app_package_dir, dependencies)
}
)
|
8f86b354b3ceff46363e3121bb1f553a8ff8b301 | setup.py | setup.py | #!/usr/bin/env python
from distutils.core import setup
import os.path
import versioneer
versioneer.versionfile_source = "circuit/_version.py"
versioneer.versionfile_build = "circuit/_version.py"
versioneer.tag_prefix = ""
versioneer.parentdir_prefix = ""
commands = versioneer.get_cmdclass().copy()
## Get long_description from index.txt:
here = os.path.dirname(os.path.abspath(__file__))
f = open(os.path.join(here, 'README.md'))
long_description = f.read().strip()
f.close()
setup(name='python-circuit',
version=versioneer.get_version(),
description='Simple implementation of the Circuit Breaker pattern',
long_description=long_description,
author='Johan Rydberg',
author_email='johan.rydberg@gmail.com',
url='https://github.com/edgeware/python-circuit',
packages=['circuit'],
cmdclass=commands)
| #!/usr/bin/env python
from setuptools import setup
import versioneer
versioneer.versionfile_source = "circuit/_version.py"
versioneer.versionfile_build = "circuit/_version.py"
versioneer.tag_prefix = ""
versioneer.parentdir_prefix = ""
commands = versioneer.get_cmdclass().copy()
with open('README.md') as f:
long_description = f.read().strip()
setup(name='python-circuit',
version=versioneer.get_version(),
description='Simple implementation of the Circuit Breaker pattern',
long_description=long_description,
author='Edgeware',
author_email='info@edgeware.tv',
url='https://github.com/edgeware/python-circuit',
packages=['circuit'],
test_suite='circuit.test',
tests_require=[
'mockito==0.5.2',
'Twisted>=10.2'
],
cmdclass=commands)
| Add test suit and requirements. | Add test suit and requirements.
| Python | apache-2.0 | edgeware/python-circuit | #!/usr/bin/env python
from distutils.core import setup
import os.path
import versioneer
versioneer.versionfile_source = "circuit/_version.py"
versioneer.versionfile_build = "circuit/_version.py"
versioneer.tag_prefix = ""
versioneer.parentdir_prefix = ""
commands = versioneer.get_cmdclass().copy()
## Get long_description from index.txt:
here = os.path.dirname(os.path.abspath(__file__))
f = open(os.path.join(here, 'README.md'))
long_description = f.read().strip()
f.close()
setup(name='python-circuit',
version=versioneer.get_version(),
description='Simple implementation of the Circuit Breaker pattern',
long_description=long_description,
author='Johan Rydberg',
author_email='johan.rydberg@gmail.com',
url='https://github.com/edgeware/python-circuit',
packages=['circuit'],
cmdclass=commands)
Add test suit and requirements. | #!/usr/bin/env python
from setuptools import setup
import versioneer
versioneer.versionfile_source = "circuit/_version.py"
versioneer.versionfile_build = "circuit/_version.py"
versioneer.tag_prefix = ""
versioneer.parentdir_prefix = ""
commands = versioneer.get_cmdclass().copy()
with open('README.md') as f:
long_description = f.read().strip()
setup(name='python-circuit',
version=versioneer.get_version(),
description='Simple implementation of the Circuit Breaker pattern',
long_description=long_description,
author='Edgeware',
author_email='info@edgeware.tv',
url='https://github.com/edgeware/python-circuit',
packages=['circuit'],
test_suite='circuit.test',
tests_require=[
'mockito==0.5.2',
'Twisted>=10.2'
],
cmdclass=commands)
| <commit_before>#!/usr/bin/env python
from distutils.core import setup
import os.path
import versioneer
versioneer.versionfile_source = "circuit/_version.py"
versioneer.versionfile_build = "circuit/_version.py"
versioneer.tag_prefix = ""
versioneer.parentdir_prefix = ""
commands = versioneer.get_cmdclass().copy()
## Get long_description from index.txt:
here = os.path.dirname(os.path.abspath(__file__))
f = open(os.path.join(here, 'README.md'))
long_description = f.read().strip()
f.close()
setup(name='python-circuit',
version=versioneer.get_version(),
description='Simple implementation of the Circuit Breaker pattern',
long_description=long_description,
author='Johan Rydberg',
author_email='johan.rydberg@gmail.com',
url='https://github.com/edgeware/python-circuit',
packages=['circuit'],
cmdclass=commands)
<commit_msg>Add test suit and requirements.<commit_after> | #!/usr/bin/env python
from setuptools import setup
import versioneer
versioneer.versionfile_source = "circuit/_version.py"
versioneer.versionfile_build = "circuit/_version.py"
versioneer.tag_prefix = ""
versioneer.parentdir_prefix = ""
commands = versioneer.get_cmdclass().copy()
with open('README.md') as f:
long_description = f.read().strip()
setup(name='python-circuit',
version=versioneer.get_version(),
description='Simple implementation of the Circuit Breaker pattern',
long_description=long_description,
author='Edgeware',
author_email='info@edgeware.tv',
url='https://github.com/edgeware/python-circuit',
packages=['circuit'],
test_suite='circuit.test',
tests_require=[
'mockito==0.5.2',
'Twisted>=10.2'
],
cmdclass=commands)
| #!/usr/bin/env python
from distutils.core import setup
import os.path
import versioneer
versioneer.versionfile_source = "circuit/_version.py"
versioneer.versionfile_build = "circuit/_version.py"
versioneer.tag_prefix = ""
versioneer.parentdir_prefix = ""
commands = versioneer.get_cmdclass().copy()
## Get long_description from index.txt:
here = os.path.dirname(os.path.abspath(__file__))
f = open(os.path.join(here, 'README.md'))
long_description = f.read().strip()
f.close()
setup(name='python-circuit',
version=versioneer.get_version(),
description='Simple implementation of the Circuit Breaker pattern',
long_description=long_description,
author='Johan Rydberg',
author_email='johan.rydberg@gmail.com',
url='https://github.com/edgeware/python-circuit',
packages=['circuit'],
cmdclass=commands)
Add test suit and requirements.#!/usr/bin/env python
from setuptools import setup
import versioneer
versioneer.versionfile_source = "circuit/_version.py"
versioneer.versionfile_build = "circuit/_version.py"
versioneer.tag_prefix = ""
versioneer.parentdir_prefix = ""
commands = versioneer.get_cmdclass().copy()
with open('README.md') as f:
long_description = f.read().strip()
setup(name='python-circuit',
version=versioneer.get_version(),
description='Simple implementation of the Circuit Breaker pattern',
long_description=long_description,
author='Edgeware',
author_email='info@edgeware.tv',
url='https://github.com/edgeware/python-circuit',
packages=['circuit'],
test_suite='circuit.test',
tests_require=[
'mockito==0.5.2',
'Twisted>=10.2'
],
cmdclass=commands)
| <commit_before>#!/usr/bin/env python
from distutils.core import setup
import os.path
import versioneer
versioneer.versionfile_source = "circuit/_version.py"
versioneer.versionfile_build = "circuit/_version.py"
versioneer.tag_prefix = ""
versioneer.parentdir_prefix = ""
commands = versioneer.get_cmdclass().copy()
## Get long_description from index.txt:
here = os.path.dirname(os.path.abspath(__file__))
f = open(os.path.join(here, 'README.md'))
long_description = f.read().strip()
f.close()
setup(name='python-circuit',
version=versioneer.get_version(),
description='Simple implementation of the Circuit Breaker pattern',
long_description=long_description,
author='Johan Rydberg',
author_email='johan.rydberg@gmail.com',
url='https://github.com/edgeware/python-circuit',
packages=['circuit'],
cmdclass=commands)
<commit_msg>Add test suit and requirements.<commit_after>#!/usr/bin/env python
from setuptools import setup
import versioneer
versioneer.versionfile_source = "circuit/_version.py"
versioneer.versionfile_build = "circuit/_version.py"
versioneer.tag_prefix = ""
versioneer.parentdir_prefix = ""
commands = versioneer.get_cmdclass().copy()
with open('README.md') as f:
long_description = f.read().strip()
setup(name='python-circuit',
version=versioneer.get_version(),
description='Simple implementation of the Circuit Breaker pattern',
long_description=long_description,
author='Edgeware',
author_email='info@edgeware.tv',
url='https://github.com/edgeware/python-circuit',
packages=['circuit'],
test_suite='circuit.test',
tests_require=[
'mockito==0.5.2',
'Twisted>=10.2'
],
cmdclass=commands)
|
1d0e6420e0e37921381c30b0247c0f5f27c72a1f | setup.py | setup.py | from setuptools import setup, find_packages
setup(
name="marshmallow-pynamodb",
version="0.7.1",
packages=find_packages(exclude=('*test*',)),
package_dir={'marshmallow-pynamodb': 'marshmallow_pynamodb'},
description='PynamoDB integration with the marshmallow (de)serialization library',
author='Mathew Marcus',
author_email='mathewmarcus456@gmail.com',
long_description=open('README.rst').read(),
install_requires=[
"marshmallow>=2.12.2",
"pynamodb>=2.0.3",
]
)
| from setuptools import setup, find_packages
setup(
name="marshmallow-pynamodb",
version="0.8.0",
packages=find_packages(exclude=('*test*',)),
package_dir={'marshmallow-pynamodb': 'marshmallow_pynamodb'},
description='PynamoDB integration with the marshmallow (de)serialization library',
author='Mathew Marcus',
author_email='mathewmarcus456@gmail.com',
long_description=open('README.rst').read(),
install_requires=[
"marshmallow>=2.12.2",
"pynamodb>=2.0.3",
]
)
| Increment version number 0.8.0 Set field support | Increment version number 0.8.0 Set field support
| Python | mit | mathewmarcus/marshmallow-pynamodb | from setuptools import setup, find_packages
setup(
name="marshmallow-pynamodb",
version="0.7.1",
packages=find_packages(exclude=('*test*',)),
package_dir={'marshmallow-pynamodb': 'marshmallow_pynamodb'},
description='PynamoDB integration with the marshmallow (de)serialization library',
author='Mathew Marcus',
author_email='mathewmarcus456@gmail.com',
long_description=open('README.rst').read(),
install_requires=[
"marshmallow>=2.12.2",
"pynamodb>=2.0.3",
]
)
Increment version number 0.8.0 Set field support | from setuptools import setup, find_packages
setup(
name="marshmallow-pynamodb",
version="0.8.0",
packages=find_packages(exclude=('*test*',)),
package_dir={'marshmallow-pynamodb': 'marshmallow_pynamodb'},
description='PynamoDB integration with the marshmallow (de)serialization library',
author='Mathew Marcus',
author_email='mathewmarcus456@gmail.com',
long_description=open('README.rst').read(),
install_requires=[
"marshmallow>=2.12.2",
"pynamodb>=2.0.3",
]
)
| <commit_before>from setuptools import setup, find_packages
setup(
name="marshmallow-pynamodb",
version="0.7.1",
packages=find_packages(exclude=('*test*',)),
package_dir={'marshmallow-pynamodb': 'marshmallow_pynamodb'},
description='PynamoDB integration with the marshmallow (de)serialization library',
author='Mathew Marcus',
author_email='mathewmarcus456@gmail.com',
long_description=open('README.rst').read(),
install_requires=[
"marshmallow>=2.12.2",
"pynamodb>=2.0.3",
]
)
<commit_msg>Increment version number 0.8.0 Set field support<commit_after> | from setuptools import setup, find_packages
setup(
name="marshmallow-pynamodb",
version="0.8.0",
packages=find_packages(exclude=('*test*',)),
package_dir={'marshmallow-pynamodb': 'marshmallow_pynamodb'},
description='PynamoDB integration with the marshmallow (de)serialization library',
author='Mathew Marcus',
author_email='mathewmarcus456@gmail.com',
long_description=open('README.rst').read(),
install_requires=[
"marshmallow>=2.12.2",
"pynamodb>=2.0.3",
]
)
| from setuptools import setup, find_packages
setup(
name="marshmallow-pynamodb",
version="0.7.1",
packages=find_packages(exclude=('*test*',)),
package_dir={'marshmallow-pynamodb': 'marshmallow_pynamodb'},
description='PynamoDB integration with the marshmallow (de)serialization library',
author='Mathew Marcus',
author_email='mathewmarcus456@gmail.com',
long_description=open('README.rst').read(),
install_requires=[
"marshmallow>=2.12.2",
"pynamodb>=2.0.3",
]
)
Increment version number 0.8.0 Set field supportfrom setuptools import setup, find_packages
setup(
name="marshmallow-pynamodb",
version="0.8.0",
packages=find_packages(exclude=('*test*',)),
package_dir={'marshmallow-pynamodb': 'marshmallow_pynamodb'},
description='PynamoDB integration with the marshmallow (de)serialization library',
author='Mathew Marcus',
author_email='mathewmarcus456@gmail.com',
long_description=open('README.rst').read(),
install_requires=[
"marshmallow>=2.12.2",
"pynamodb>=2.0.3",
]
)
| <commit_before>from setuptools import setup, find_packages
setup(
name="marshmallow-pynamodb",
version="0.7.1",
packages=find_packages(exclude=('*test*',)),
package_dir={'marshmallow-pynamodb': 'marshmallow_pynamodb'},
description='PynamoDB integration with the marshmallow (de)serialization library',
author='Mathew Marcus',
author_email='mathewmarcus456@gmail.com',
long_description=open('README.rst').read(),
install_requires=[
"marshmallow>=2.12.2",
"pynamodb>=2.0.3",
]
)
<commit_msg>Increment version number 0.8.0 Set field support<commit_after>from setuptools import setup, find_packages
setup(
name="marshmallow-pynamodb",
version="0.8.0",
packages=find_packages(exclude=('*test*',)),
package_dir={'marshmallow-pynamodb': 'marshmallow_pynamodb'},
description='PynamoDB integration with the marshmallow (de)serialization library',
author='Mathew Marcus',
author_email='mathewmarcus456@gmail.com',
long_description=open('README.rst').read(),
install_requires=[
"marshmallow>=2.12.2",
"pynamodb>=2.0.3",
]
)
|
c529b8d4979f5fae6984d2bcd6d2aa40d181e097 | setup.py | setup.py | import re
from setuptools import setup
__version__,= re.findall('__version__ = "(.*)"', open('mappyfile/__init__.py').read())
def readme():
with open('README.rst') as f:
return f.read()
setup(name='mappyfile',
version=__version__,
description='A pure Python MapFile parser for working with MapServer',
long_description=readme(),
classifiers=[
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Intended Audience :: Developers',
'Topic :: Text Processing :: Linguistic',
'Topic :: Software Development :: Build Tools'
],
package_data = {
'': ['*.g']
},
url='http://github.com/geographika/mappyfile',
author='Seth Girvin',
author_email='sethg@geographika.co.uk',
license='MIT',
packages=['mappyfile'],
install_requires=['lark-parser','jsonschema'],
zip_safe=False) | import re
from setuptools import setup
__version__,= re.findall('__version__ = "(.*)"', open('mappyfile/__init__.py').read())
def readme():
with open('README.rst') as f:
return f.read()
setup(name='mappyfile',
version=__version__,
description='A pure Python MapFile parser for working with MapServer',
long_description=readme(),
classifiers=[
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Intended Audience :: Developers',
'Topic :: Text Processing :: Linguistic',
'Topic :: Software Development :: Build Tools'
],
package_data = {
'': ['*.g', 'schemas/*.json']
},
url='http://github.com/geographika/mappyfile',
author='Seth Girvin',
author_email='sethg@geographika.co.uk',
license='MIT',
packages=['mappyfile'],
install_requires=['lark-parser','jsonschema'],
zip_safe=False) | Add schemas folder to package | Add schemas folder to package
| Python | mit | geographika/mappyfile,geographika/mappyfile | import re
from setuptools import setup
__version__,= re.findall('__version__ = "(.*)"', open('mappyfile/__init__.py').read())
def readme():
with open('README.rst') as f:
return f.read()
setup(name='mappyfile',
version=__version__,
description='A pure Python MapFile parser for working with MapServer',
long_description=readme(),
classifiers=[
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Intended Audience :: Developers',
'Topic :: Text Processing :: Linguistic',
'Topic :: Software Development :: Build Tools'
],
package_data = {
'': ['*.g']
},
url='http://github.com/geographika/mappyfile',
author='Seth Girvin',
author_email='sethg@geographika.co.uk',
license='MIT',
packages=['mappyfile'],
install_requires=['lark-parser','jsonschema'],
zip_safe=False)Add schemas folder to package | import re
from setuptools import setup
__version__,= re.findall('__version__ = "(.*)"', open('mappyfile/__init__.py').read())
def readme():
with open('README.rst') as f:
return f.read()
setup(name='mappyfile',
version=__version__,
description='A pure Python MapFile parser for working with MapServer',
long_description=readme(),
classifiers=[
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Intended Audience :: Developers',
'Topic :: Text Processing :: Linguistic',
'Topic :: Software Development :: Build Tools'
],
package_data = {
'': ['*.g', 'schemas/*.json']
},
url='http://github.com/geographika/mappyfile',
author='Seth Girvin',
author_email='sethg@geographika.co.uk',
license='MIT',
packages=['mappyfile'],
install_requires=['lark-parser','jsonschema'],
zip_safe=False) | <commit_before>import re
from setuptools import setup
__version__,= re.findall('__version__ = "(.*)"', open('mappyfile/__init__.py').read())
def readme():
with open('README.rst') as f:
return f.read()
setup(name='mappyfile',
version=__version__,
description='A pure Python MapFile parser for working with MapServer',
long_description=readme(),
classifiers=[
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Intended Audience :: Developers',
'Topic :: Text Processing :: Linguistic',
'Topic :: Software Development :: Build Tools'
],
package_data = {
'': ['*.g']
},
url='http://github.com/geographika/mappyfile',
author='Seth Girvin',
author_email='sethg@geographika.co.uk',
license='MIT',
packages=['mappyfile'],
install_requires=['lark-parser','jsonschema'],
zip_safe=False)<commit_msg>Add schemas folder to package<commit_after> | import re
from setuptools import setup
__version__,= re.findall('__version__ = "(.*)"', open('mappyfile/__init__.py').read())
def readme():
with open('README.rst') as f:
return f.read()
setup(name='mappyfile',
version=__version__,
description='A pure Python MapFile parser for working with MapServer',
long_description=readme(),
classifiers=[
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Intended Audience :: Developers',
'Topic :: Text Processing :: Linguistic',
'Topic :: Software Development :: Build Tools'
],
package_data = {
'': ['*.g', 'schemas/*.json']
},
url='http://github.com/geographika/mappyfile',
author='Seth Girvin',
author_email='sethg@geographika.co.uk',
license='MIT',
packages=['mappyfile'],
install_requires=['lark-parser','jsonschema'],
zip_safe=False) | import re
from setuptools import setup
__version__,= re.findall('__version__ = "(.*)"', open('mappyfile/__init__.py').read())
def readme():
with open('README.rst') as f:
return f.read()
setup(name='mappyfile',
version=__version__,
description='A pure Python MapFile parser for working with MapServer',
long_description=readme(),
classifiers=[
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Intended Audience :: Developers',
'Topic :: Text Processing :: Linguistic',
'Topic :: Software Development :: Build Tools'
],
package_data = {
'': ['*.g']
},
url='http://github.com/geographika/mappyfile',
author='Seth Girvin',
author_email='sethg@geographika.co.uk',
license='MIT',
packages=['mappyfile'],
install_requires=['lark-parser','jsonschema'],
zip_safe=False)Add schemas folder to packageimport re
from setuptools import setup
__version__,= re.findall('__version__ = "(.*)"', open('mappyfile/__init__.py').read())
def readme():
with open('README.rst') as f:
return f.read()
setup(name='mappyfile',
version=__version__,
description='A pure Python MapFile parser for working with MapServer',
long_description=readme(),
classifiers=[
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Intended Audience :: Developers',
'Topic :: Text Processing :: Linguistic',
'Topic :: Software Development :: Build Tools'
],
package_data = {
'': ['*.g', 'schemas/*.json']
},
url='http://github.com/geographika/mappyfile',
author='Seth Girvin',
author_email='sethg@geographika.co.uk',
license='MIT',
packages=['mappyfile'],
install_requires=['lark-parser','jsonschema'],
zip_safe=False) | <commit_before>import re
from setuptools import setup
__version__,= re.findall('__version__ = "(.*)"', open('mappyfile/__init__.py').read())
def readme():
with open('README.rst') as f:
return f.read()
setup(name='mappyfile',
version=__version__,
description='A pure Python MapFile parser for working with MapServer',
long_description=readme(),
classifiers=[
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Intended Audience :: Developers',
'Topic :: Text Processing :: Linguistic',
'Topic :: Software Development :: Build Tools'
],
package_data = {
'': ['*.g']
},
url='http://github.com/geographika/mappyfile',
author='Seth Girvin',
author_email='sethg@geographika.co.uk',
license='MIT',
packages=['mappyfile'],
install_requires=['lark-parser','jsonschema'],
zip_safe=False)<commit_msg>Add schemas folder to package<commit_after>import re
from setuptools import setup
__version__,= re.findall('__version__ = "(.*)"', open('mappyfile/__init__.py').read())
def readme():
with open('README.rst') as f:
return f.read()
setup(name='mappyfile',
version=__version__,
description='A pure Python MapFile parser for working with MapServer',
long_description=readme(),
classifiers=[
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Intended Audience :: Developers',
'Topic :: Text Processing :: Linguistic',
'Topic :: Software Development :: Build Tools'
],
package_data = {
'': ['*.g', 'schemas/*.json']
},
url='http://github.com/geographika/mappyfile',
author='Seth Girvin',
author_email='sethg@geographika.co.uk',
license='MIT',
packages=['mappyfile'],
install_requires=['lark-parser','jsonschema'],
zip_safe=False) |
766e33416df08b6cdcdb236a335afbc6bd7acc06 | setup.py | setup.py | from setuptools import setup
from os import path
with open(path.join(path.abspath(path.dirname(__file__)), 'README.rst')) as f:
readme = f.read()
setup(
name='ghstats',
version='1.1.1',
packages=['ghstats'],
description='GitHub Release download count and other statistics.',
long_description=readme,
author='Alexander Gorishnyak',
author_email='kefir500@gmail.com',
license='MIT',
url='https://github.com/kefir500/ghstats',
keywords='github release download count stats statistics',
entry_points={
'console_scripts': [
'ghstats = ghstats.ghstats:main_cli'
]
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities'
]
)
| from setuptools import setup
from os import path
with open(path.join(path.abspath(path.dirname(__file__)), 'README.rst')) as f:
readme = f.read()
setup(
name='ghstats',
version='1.1.1',
packages=['ghstats'],
description='GitHub Release download count and other statistics.',
long_description=readme,
author='Alexander Gorishnyak',
author_email='kefir500@gmail.com',
license='MIT',
url='https://github.com/kefir500/ghstats',
keywords='github release download count stats statistics',
entry_points={
'console_scripts': [
'ghstats = ghstats.ghstats:main_cli'
]
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Utilities'
]
)
| Add explicit Python 3.6 support | Add explicit Python 3.6 support
| Python | mit | kefir500/ghstats | from setuptools import setup
from os import path
with open(path.join(path.abspath(path.dirname(__file__)), 'README.rst')) as f:
readme = f.read()
setup(
name='ghstats',
version='1.1.1',
packages=['ghstats'],
description='GitHub Release download count and other statistics.',
long_description=readme,
author='Alexander Gorishnyak',
author_email='kefir500@gmail.com',
license='MIT',
url='https://github.com/kefir500/ghstats',
keywords='github release download count stats statistics',
entry_points={
'console_scripts': [
'ghstats = ghstats.ghstats:main_cli'
]
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities'
]
)
Add explicit Python 3.6 support | from setuptools import setup
from os import path
with open(path.join(path.abspath(path.dirname(__file__)), 'README.rst')) as f:
readme = f.read()
setup(
name='ghstats',
version='1.1.1',
packages=['ghstats'],
description='GitHub Release download count and other statistics.',
long_description=readme,
author='Alexander Gorishnyak',
author_email='kefir500@gmail.com',
license='MIT',
url='https://github.com/kefir500/ghstats',
keywords='github release download count stats statistics',
entry_points={
'console_scripts': [
'ghstats = ghstats.ghstats:main_cli'
]
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Utilities'
]
)
| <commit_before>from setuptools import setup
from os import path
with open(path.join(path.abspath(path.dirname(__file__)), 'README.rst')) as f:
readme = f.read()
setup(
name='ghstats',
version='1.1.1',
packages=['ghstats'],
description='GitHub Release download count and other statistics.',
long_description=readme,
author='Alexander Gorishnyak',
author_email='kefir500@gmail.com',
license='MIT',
url='https://github.com/kefir500/ghstats',
keywords='github release download count stats statistics',
entry_points={
'console_scripts': [
'ghstats = ghstats.ghstats:main_cli'
]
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities'
]
)
<commit_msg>Add explicit Python 3.6 support<commit_after> | from setuptools import setup
from os import path
with open(path.join(path.abspath(path.dirname(__file__)), 'README.rst')) as f:
readme = f.read()
setup(
name='ghstats',
version='1.1.1',
packages=['ghstats'],
description='GitHub Release download count and other statistics.',
long_description=readme,
author='Alexander Gorishnyak',
author_email='kefir500@gmail.com',
license='MIT',
url='https://github.com/kefir500/ghstats',
keywords='github release download count stats statistics',
entry_points={
'console_scripts': [
'ghstats = ghstats.ghstats:main_cli'
]
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Utilities'
]
)
| from setuptools import setup
from os import path
with open(path.join(path.abspath(path.dirname(__file__)), 'README.rst')) as f:
readme = f.read()
setup(
name='ghstats',
version='1.1.1',
packages=['ghstats'],
description='GitHub Release download count and other statistics.',
long_description=readme,
author='Alexander Gorishnyak',
author_email='kefir500@gmail.com',
license='MIT',
url='https://github.com/kefir500/ghstats',
keywords='github release download count stats statistics',
entry_points={
'console_scripts': [
'ghstats = ghstats.ghstats:main_cli'
]
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities'
]
)
Add explicit Python 3.6 supportfrom setuptools import setup
from os import path
with open(path.join(path.abspath(path.dirname(__file__)), 'README.rst')) as f:
readme = f.read()
setup(
name='ghstats',
version='1.1.1',
packages=['ghstats'],
description='GitHub Release download count and other statistics.',
long_description=readme,
author='Alexander Gorishnyak',
author_email='kefir500@gmail.com',
license='MIT',
url='https://github.com/kefir500/ghstats',
keywords='github release download count stats statistics',
entry_points={
'console_scripts': [
'ghstats = ghstats.ghstats:main_cli'
]
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Utilities'
]
)
| <commit_before>from setuptools import setup
from os import path
with open(path.join(path.abspath(path.dirname(__file__)), 'README.rst')) as f:
readme = f.read()
setup(
name='ghstats',
version='1.1.1',
packages=['ghstats'],
description='GitHub Release download count and other statistics.',
long_description=readme,
author='Alexander Gorishnyak',
author_email='kefir500@gmail.com',
license='MIT',
url='https://github.com/kefir500/ghstats',
keywords='github release download count stats statistics',
entry_points={
'console_scripts': [
'ghstats = ghstats.ghstats:main_cli'
]
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities'
]
)
<commit_msg>Add explicit Python 3.6 support<commit_after>from setuptools import setup
from os import path
with open(path.join(path.abspath(path.dirname(__file__)), 'README.rst')) as f:
readme = f.read()
setup(
name='ghstats',
version='1.1.1',
packages=['ghstats'],
description='GitHub Release download count and other statistics.',
long_description=readme,
author='Alexander Gorishnyak',
author_email='kefir500@gmail.com',
license='MIT',
url='https://github.com/kefir500/ghstats',
keywords='github release download count stats statistics',
entry_points={
'console_scripts': [
'ghstats = ghstats.ghstats:main_cli'
]
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Utilities'
]
)
|
e05b6484938f65338882a86c9ce2d71df6e5272b | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from distutils.core import setup
setup(
name='regulations-parser',
url='https://github.com/cfpb/regulations-parser',
author='CFPB',
author_email='tech@cfpb.gov',
license='CC0',
version='0.1.0',
description='eCFR Parser for eRegulations',
long_description=open('README.md').read()
if os.path.exists('README.md') else '',
packages=['regparser', ],
include_package_data=True,
install_requires=[
'lxml',
'pyparsing',
'inflection',
'requests',
'GitPython',
'python-constraint',
],
setup_requires=[
'nose>=1.0'
],
test_suite='xtdiff.tests',
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup, find_packages
setup(
name='regulations-parser',
url='https://github.com/cfpb/regulations-parser',
author='CFPB',
author_email='tech@cfpb.gov',
license='CC0',
version='0.1.0',
description='eCFR Parser for eRegulations',
long_description=open('README.md').read()
if os.path.exists('README.md') else '',
packages=find_packages(),
include_package_data=True,
install_requires=[
'lxml',
'pyparsing',
'inflection',
'requests',
'GitPython',
'python-constraint',
],
setup_requires=[
'nose>=1.0'
],
test_suite='xtdiff.tests',
)
| Use find_packages() to ensure the whole regparser gets installed | Use find_packages() to ensure the whole regparser gets installed
| Python | cc0-1.0 | grapesmoker/regulations-parser | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from distutils.core import setup
setup(
name='regulations-parser',
url='https://github.com/cfpb/regulations-parser',
author='CFPB',
author_email='tech@cfpb.gov',
license='CC0',
version='0.1.0',
description='eCFR Parser for eRegulations',
long_description=open('README.md').read()
if os.path.exists('README.md') else '',
packages=['regparser', ],
include_package_data=True,
install_requires=[
'lxml',
'pyparsing',
'inflection',
'requests',
'GitPython',
'python-constraint',
],
setup_requires=[
'nose>=1.0'
],
test_suite='xtdiff.tests',
)
Use find_packages() to ensure the whole regparser gets installed | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup, find_packages
setup(
name='regulations-parser',
url='https://github.com/cfpb/regulations-parser',
author='CFPB',
author_email='tech@cfpb.gov',
license='CC0',
version='0.1.0',
description='eCFR Parser for eRegulations',
long_description=open('README.md').read()
if os.path.exists('README.md') else '',
packages=find_packages(),
include_package_data=True,
install_requires=[
'lxml',
'pyparsing',
'inflection',
'requests',
'GitPython',
'python-constraint',
],
setup_requires=[
'nose>=1.0'
],
test_suite='xtdiff.tests',
)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from distutils.core import setup
setup(
name='regulations-parser',
url='https://github.com/cfpb/regulations-parser',
author='CFPB',
author_email='tech@cfpb.gov',
license='CC0',
version='0.1.0',
description='eCFR Parser for eRegulations',
long_description=open('README.md').read()
if os.path.exists('README.md') else '',
packages=['regparser', ],
include_package_data=True,
install_requires=[
'lxml',
'pyparsing',
'inflection',
'requests',
'GitPython',
'python-constraint',
],
setup_requires=[
'nose>=1.0'
],
test_suite='xtdiff.tests',
)
<commit_msg>Use find_packages() to ensure the whole regparser gets installed<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup, find_packages
setup(
name='regulations-parser',
url='https://github.com/cfpb/regulations-parser',
author='CFPB',
author_email='tech@cfpb.gov',
license='CC0',
version='0.1.0',
description='eCFR Parser for eRegulations',
long_description=open('README.md').read()
if os.path.exists('README.md') else '',
packages=find_packages(),
include_package_data=True,
install_requires=[
'lxml',
'pyparsing',
'inflection',
'requests',
'GitPython',
'python-constraint',
],
setup_requires=[
'nose>=1.0'
],
test_suite='xtdiff.tests',
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from distutils.core import setup
setup(
name='regulations-parser',
url='https://github.com/cfpb/regulations-parser',
author='CFPB',
author_email='tech@cfpb.gov',
license='CC0',
version='0.1.0',
description='eCFR Parser for eRegulations',
long_description=open('README.md').read()
if os.path.exists('README.md') else '',
packages=['regparser', ],
include_package_data=True,
install_requires=[
'lxml',
'pyparsing',
'inflection',
'requests',
'GitPython',
'python-constraint',
],
setup_requires=[
'nose>=1.0'
],
test_suite='xtdiff.tests',
)
Use find_packages() to ensure the whole regparser gets installed#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup, find_packages
setup(
name='regulations-parser',
url='https://github.com/cfpb/regulations-parser',
author='CFPB',
author_email='tech@cfpb.gov',
license='CC0',
version='0.1.0',
description='eCFR Parser for eRegulations',
long_description=open('README.md').read()
if os.path.exists('README.md') else '',
packages=find_packages(),
include_package_data=True,
install_requires=[
'lxml',
'pyparsing',
'inflection',
'requests',
'GitPython',
'python-constraint',
],
setup_requires=[
'nose>=1.0'
],
test_suite='xtdiff.tests',
)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from distutils.core import setup
setup(
name='regulations-parser',
url='https://github.com/cfpb/regulations-parser',
author='CFPB',
author_email='tech@cfpb.gov',
license='CC0',
version='0.1.0',
description='eCFR Parser for eRegulations',
long_description=open('README.md').read()
if os.path.exists('README.md') else '',
packages=['regparser', ],
include_package_data=True,
install_requires=[
'lxml',
'pyparsing',
'inflection',
'requests',
'GitPython',
'python-constraint',
],
setup_requires=[
'nose>=1.0'
],
test_suite='xtdiff.tests',
)
<commit_msg>Use find_packages() to ensure the whole regparser gets installed<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup, find_packages
setup(
name='regulations-parser',
url='https://github.com/cfpb/regulations-parser',
author='CFPB',
author_email='tech@cfpb.gov',
license='CC0',
version='0.1.0',
description='eCFR Parser for eRegulations',
long_description=open('README.md').read()
if os.path.exists('README.md') else '',
packages=find_packages(),
include_package_data=True,
install_requires=[
'lxml',
'pyparsing',
'inflection',
'requests',
'GitPython',
'python-constraint',
],
setup_requires=[
'nose>=1.0'
],
test_suite='xtdiff.tests',
)
|
3766a8638094fc7bbf8bfb529312a0741049376b | spyder_memory_profiler/__init__.py | spyder_memory_profiler/__init__.py | # -*- coding: utf-8 -*-
#
# Copyright © 2013 Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
__version__ = '0.2.dev0'
# =============================================================================
# The following statements are required to register this 3rd party plugin:
# =============================================================================
from .memoryprofiler import MemoryProfiler
PLUGIN_CLASS = MemoryProfiler
| # -*- coding: utf-8 -*-
#
# Copyright © 2013 Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
__version__ = '0.2.0'
# =============================================================================
# The following statements are required to register this 3rd party plugin:
# =============================================================================
from .memoryprofiler import MemoryProfiler
PLUGIN_CLASS = MemoryProfiler
| Change version number to 0.2.0 | Change version number to 0.2.0
| Python | mit | jitseniesen/spyder-memory-profiler,spyder-ide/spyder.memory_profiler,jitseniesen/spyder-memory-profiler | # -*- coding: utf-8 -*-
#
# Copyright © 2013 Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
__version__ = '0.2.dev0'
# =============================================================================
# The following statements are required to register this 3rd party plugin:
# =============================================================================
from .memoryprofiler import MemoryProfiler
PLUGIN_CLASS = MemoryProfiler
Change version number to 0.2.0 | # -*- coding: utf-8 -*-
#
# Copyright © 2013 Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
__version__ = '0.2.0'
# =============================================================================
# The following statements are required to register this 3rd party plugin:
# =============================================================================
from .memoryprofiler import MemoryProfiler
PLUGIN_CLASS = MemoryProfiler
| <commit_before># -*- coding: utf-8 -*-
#
# Copyright © 2013 Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
__version__ = '0.2.dev0'
# =============================================================================
# The following statements are required to register this 3rd party plugin:
# =============================================================================
from .memoryprofiler import MemoryProfiler
PLUGIN_CLASS = MemoryProfiler
<commit_msg>Change version number to 0.2.0<commit_after> | # -*- coding: utf-8 -*-
#
# Copyright © 2013 Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
__version__ = '0.2.0'
# =============================================================================
# The following statements are required to register this 3rd party plugin:
# =============================================================================
from .memoryprofiler import MemoryProfiler
PLUGIN_CLASS = MemoryProfiler
| # -*- coding: utf-8 -*-
#
# Copyright © 2013 Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
__version__ = '0.2.dev0'
# =============================================================================
# The following statements are required to register this 3rd party plugin:
# =============================================================================
from .memoryprofiler import MemoryProfiler
PLUGIN_CLASS = MemoryProfiler
Change version number to 0.2.0# -*- coding: utf-8 -*-
#
# Copyright © 2013 Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
__version__ = '0.2.0'
# =============================================================================
# The following statements are required to register this 3rd party plugin:
# =============================================================================
from .memoryprofiler import MemoryProfiler
PLUGIN_CLASS = MemoryProfiler
| <commit_before># -*- coding: utf-8 -*-
#
# Copyright © 2013 Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
__version__ = '0.2.dev0'
# =============================================================================
# The following statements are required to register this 3rd party plugin:
# =============================================================================
from .memoryprofiler import MemoryProfiler
PLUGIN_CLASS = MemoryProfiler
<commit_msg>Change version number to 0.2.0<commit_after># -*- coding: utf-8 -*-
#
# Copyright © 2013 Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
__version__ = '0.2.0'
# =============================================================================
# The following statements are required to register this 3rd party plugin:
# =============================================================================
from .memoryprofiler import MemoryProfiler
PLUGIN_CLASS = MemoryProfiler
|
137d3c0394309dfb22a407eda5b80bc312482c1d | setup.py | setup.py | from distutils.core import setup
setup(name="zutil",
version='0.1.5',
description="Utilities used for generating zCFD control dictionaries",
author="Zenotech",
author_email="support@zenotech.com",
url="https://zcfd.zenotech.com/",
packages=["zutil", "zutil.post", "zutil.analysis", "zutil.plot"],
install_requires=[
'ipython<6.0',
'Fabric',
'ipywidgets',
'matplotlib',
'numpy',
'pandas',
'PyYAML'
],
extras_require={
"mpi": ["mpi4py"]
}
)
| from distutils.core import setup
setup(name="zutil",
version='0.1.5',
description="Utilities used for generating zCFD control dictionaries",
author="Zenotech",
author_email="support@zenotech.com",
license="MIT",
url="https://zcfd.zenotech.com/",
project_urls={
"Source Code": "https://github.com/zCFD/zutil/",
},
packages=["zutil", "zutil.post", "zutil.analysis", "zutil.plot"],
install_requires=[
'ipython<6.0',
'Fabric',
'ipywidgets',
'matplotlib',
'numpy',
'pandas',
'PyYAML'
],
extras_require={
"mpi": ["mpi4py"]
}
)
| Add license and Source Code url | Add license and Source Code url
| Python | mit | zCFD/zutil | from distutils.core import setup
setup(name="zutil",
version='0.1.5',
description="Utilities used for generating zCFD control dictionaries",
author="Zenotech",
author_email="support@zenotech.com",
url="https://zcfd.zenotech.com/",
packages=["zutil", "zutil.post", "zutil.analysis", "zutil.plot"],
install_requires=[
'ipython<6.0',
'Fabric',
'ipywidgets',
'matplotlib',
'numpy',
'pandas',
'PyYAML'
],
extras_require={
"mpi": ["mpi4py"]
}
)
Add license and Source Code url | from distutils.core import setup
setup(name="zutil",
version='0.1.5',
description="Utilities used for generating zCFD control dictionaries",
author="Zenotech",
author_email="support@zenotech.com",
license="MIT",
url="https://zcfd.zenotech.com/",
project_urls={
"Source Code": "https://github.com/zCFD/zutil/",
},
packages=["zutil", "zutil.post", "zutil.analysis", "zutil.plot"],
install_requires=[
'ipython<6.0',
'Fabric',
'ipywidgets',
'matplotlib',
'numpy',
'pandas',
'PyYAML'
],
extras_require={
"mpi": ["mpi4py"]
}
)
| <commit_before>from distutils.core import setup
setup(name="zutil",
version='0.1.5',
description="Utilities used for generating zCFD control dictionaries",
author="Zenotech",
author_email="support@zenotech.com",
url="https://zcfd.zenotech.com/",
packages=["zutil", "zutil.post", "zutil.analysis", "zutil.plot"],
install_requires=[
'ipython<6.0',
'Fabric',
'ipywidgets',
'matplotlib',
'numpy',
'pandas',
'PyYAML'
],
extras_require={
"mpi": ["mpi4py"]
}
)
<commit_msg>Add license and Source Code url<commit_after> | from distutils.core import setup
setup(name="zutil",
version='0.1.5',
description="Utilities used for generating zCFD control dictionaries",
author="Zenotech",
author_email="support@zenotech.com",
license="MIT",
url="https://zcfd.zenotech.com/",
project_urls={
"Source Code": "https://github.com/zCFD/zutil/",
},
packages=["zutil", "zutil.post", "zutil.analysis", "zutil.plot"],
install_requires=[
'ipython<6.0',
'Fabric',
'ipywidgets',
'matplotlib',
'numpy',
'pandas',
'PyYAML'
],
extras_require={
"mpi": ["mpi4py"]
}
)
| from distutils.core import setup
setup(name="zutil",
version='0.1.5',
description="Utilities used for generating zCFD control dictionaries",
author="Zenotech",
author_email="support@zenotech.com",
url="https://zcfd.zenotech.com/",
packages=["zutil", "zutil.post", "zutil.analysis", "zutil.plot"],
install_requires=[
'ipython<6.0',
'Fabric',
'ipywidgets',
'matplotlib',
'numpy',
'pandas',
'PyYAML'
],
extras_require={
"mpi": ["mpi4py"]
}
)
Add license and Source Code urlfrom distutils.core import setup
setup(name="zutil",
version='0.1.5',
description="Utilities used for generating zCFD control dictionaries",
author="Zenotech",
author_email="support@zenotech.com",
license="MIT",
url="https://zcfd.zenotech.com/",
project_urls={
"Source Code": "https://github.com/zCFD/zutil/",
},
packages=["zutil", "zutil.post", "zutil.analysis", "zutil.plot"],
install_requires=[
'ipython<6.0',
'Fabric',
'ipywidgets',
'matplotlib',
'numpy',
'pandas',
'PyYAML'
],
extras_require={
"mpi": ["mpi4py"]
}
)
| <commit_before>from distutils.core import setup
setup(name="zutil",
version='0.1.5',
description="Utilities used for generating zCFD control dictionaries",
author="Zenotech",
author_email="support@zenotech.com",
url="https://zcfd.zenotech.com/",
packages=["zutil", "zutil.post", "zutil.analysis", "zutil.plot"],
install_requires=[
'ipython<6.0',
'Fabric',
'ipywidgets',
'matplotlib',
'numpy',
'pandas',
'PyYAML'
],
extras_require={
"mpi": ["mpi4py"]
}
)
<commit_msg>Add license and Source Code url<commit_after>from distutils.core import setup
setup(name="zutil",
version='0.1.5',
description="Utilities used for generating zCFD control dictionaries",
author="Zenotech",
author_email="support@zenotech.com",
license="MIT",
url="https://zcfd.zenotech.com/",
project_urls={
"Source Code": "https://github.com/zCFD/zutil/",
},
packages=["zutil", "zutil.post", "zutil.analysis", "zutil.plot"],
install_requires=[
'ipython<6.0',
'Fabric',
'ipywidgets',
'matplotlib',
'numpy',
'pandas',
'PyYAML'
],
extras_require={
"mpi": ["mpi4py"]
}
)
|
0e801cf96a7dee047f935b32c931eabe135035ea | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup
with open('README.rst') as readme_f:
README = readme_f.read()
with open('tests/requirements.txt') as test_requirements_f:
TEST_REQUIREMENTS = test_requirements_f.readlines()
setup(
name="yturl",
version="1.18.0",
description="Gets direct media URLs to YouTube media",
long_description=README,
url="https://github.com/cdown/yturl",
license='ISC',
author="Chris Down",
author_email="chris@chrisdown.name",
py_modules=["yturl"],
entry_points={
'console_scripts': ['yturl=yturl:_main'],
},
keywords='youtube media video',
classifiers=[
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Topic :: Multimedia",
"Topic :: Internet",
"Topic :: Utilities",
],
test_suite='nose.collector',
tests_require=TEST_REQUIREMENTS
)
| #!/usr/bin/env python
from setuptools import setup
with open('README.rst') as readme_f:
README = readme_f.read()
with open('tests/requirements.txt') as test_requirements_f:
TEST_REQUIREMENTS = test_requirements_f.readlines()
setup(
name="yturl",
version="1.18.0",
description="Gets direct media URLs to YouTube media",
long_description=README,
url="https://github.com/cdown/yturl",
license='ISC',
author="Chris Down",
author_email="chris@chrisdown.name",
py_modules=["yturl"],
entry_points={
'console_scripts': ['yturl=yturl:_main'],
},
keywords='youtube media video',
classifiers=[
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: ISC License (ISCL)",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Topic :: Multimedia",
"Topic :: Internet",
"Topic :: Utilities",
],
test_suite='nose.collector',
tests_require=TEST_REQUIREMENTS
)
| Update classifiers to show ISC license | Update classifiers to show ISC license
| Python | isc | garg10may/yturl | #!/usr/bin/env python
from setuptools import setup
with open('README.rst') as readme_f:
README = readme_f.read()
with open('tests/requirements.txt') as test_requirements_f:
TEST_REQUIREMENTS = test_requirements_f.readlines()
setup(
name="yturl",
version="1.18.0",
description="Gets direct media URLs to YouTube media",
long_description=README,
url="https://github.com/cdown/yturl",
license='ISC',
author="Chris Down",
author_email="chris@chrisdown.name",
py_modules=["yturl"],
entry_points={
'console_scripts': ['yturl=yturl:_main'],
},
keywords='youtube media video',
classifiers=[
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Topic :: Multimedia",
"Topic :: Internet",
"Topic :: Utilities",
],
test_suite='nose.collector',
tests_require=TEST_REQUIREMENTS
)
Update classifiers to show ISC license | #!/usr/bin/env python
from setuptools import setup
with open('README.rst') as readme_f:
README = readme_f.read()
with open('tests/requirements.txt') as test_requirements_f:
TEST_REQUIREMENTS = test_requirements_f.readlines()
setup(
name="yturl",
version="1.18.0",
description="Gets direct media URLs to YouTube media",
long_description=README,
url="https://github.com/cdown/yturl",
license='ISC',
author="Chris Down",
author_email="chris@chrisdown.name",
py_modules=["yturl"],
entry_points={
'console_scripts': ['yturl=yturl:_main'],
},
keywords='youtube media video',
classifiers=[
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: ISC License (ISCL)",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Topic :: Multimedia",
"Topic :: Internet",
"Topic :: Utilities",
],
test_suite='nose.collector',
tests_require=TEST_REQUIREMENTS
)
| <commit_before>#!/usr/bin/env python
from setuptools import setup
with open('README.rst') as readme_f:
README = readme_f.read()
with open('tests/requirements.txt') as test_requirements_f:
TEST_REQUIREMENTS = test_requirements_f.readlines()
setup(
name="yturl",
version="1.18.0",
description="Gets direct media URLs to YouTube media",
long_description=README,
url="https://github.com/cdown/yturl",
license='ISC',
author="Chris Down",
author_email="chris@chrisdown.name",
py_modules=["yturl"],
entry_points={
'console_scripts': ['yturl=yturl:_main'],
},
keywords='youtube media video',
classifiers=[
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Topic :: Multimedia",
"Topic :: Internet",
"Topic :: Utilities",
],
test_suite='nose.collector',
tests_require=TEST_REQUIREMENTS
)
<commit_msg>Update classifiers to show ISC license<commit_after> | #!/usr/bin/env python
from setuptools import setup
with open('README.rst') as readme_f:
README = readme_f.read()
with open('tests/requirements.txt') as test_requirements_f:
TEST_REQUIREMENTS = test_requirements_f.readlines()
setup(
name="yturl",
version="1.18.0",
description="Gets direct media URLs to YouTube media",
long_description=README,
url="https://github.com/cdown/yturl",
license='ISC',
author="Chris Down",
author_email="chris@chrisdown.name",
py_modules=["yturl"],
entry_points={
'console_scripts': ['yturl=yturl:_main'],
},
keywords='youtube media video',
classifiers=[
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: ISC License (ISCL)",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Topic :: Multimedia",
"Topic :: Internet",
"Topic :: Utilities",
],
test_suite='nose.collector',
tests_require=TEST_REQUIREMENTS
)
| #!/usr/bin/env python
from setuptools import setup
with open('README.rst') as readme_f:
README = readme_f.read()
with open('tests/requirements.txt') as test_requirements_f:
TEST_REQUIREMENTS = test_requirements_f.readlines()
setup(
name="yturl",
version="1.18.0",
description="Gets direct media URLs to YouTube media",
long_description=README,
url="https://github.com/cdown/yturl",
license='ISC',
author="Chris Down",
author_email="chris@chrisdown.name",
py_modules=["yturl"],
entry_points={
'console_scripts': ['yturl=yturl:_main'],
},
keywords='youtube media video',
classifiers=[
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Topic :: Multimedia",
"Topic :: Internet",
"Topic :: Utilities",
],
test_suite='nose.collector',
tests_require=TEST_REQUIREMENTS
)
Update classifiers to show ISC license#!/usr/bin/env python
from setuptools import setup
with open('README.rst') as readme_f:
README = readme_f.read()
with open('tests/requirements.txt') as test_requirements_f:
TEST_REQUIREMENTS = test_requirements_f.readlines()
setup(
name="yturl",
version="1.18.0",
description="Gets direct media URLs to YouTube media",
long_description=README,
url="https://github.com/cdown/yturl",
license='ISC',
author="Chris Down",
author_email="chris@chrisdown.name",
py_modules=["yturl"],
entry_points={
'console_scripts': ['yturl=yturl:_main'],
},
keywords='youtube media video',
classifiers=[
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: ISC License (ISCL)",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Topic :: Multimedia",
"Topic :: Internet",
"Topic :: Utilities",
],
test_suite='nose.collector',
tests_require=TEST_REQUIREMENTS
)
| <commit_before>#!/usr/bin/env python
from setuptools import setup
with open('README.rst') as readme_f:
README = readme_f.read()
with open('tests/requirements.txt') as test_requirements_f:
TEST_REQUIREMENTS = test_requirements_f.readlines()
setup(
name="yturl",
version="1.18.0",
description="Gets direct media URLs to YouTube media",
long_description=README,
url="https://github.com/cdown/yturl",
license='ISC',
author="Chris Down",
author_email="chris@chrisdown.name",
py_modules=["yturl"],
entry_points={
'console_scripts': ['yturl=yturl:_main'],
},
keywords='youtube media video',
classifiers=[
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Topic :: Multimedia",
"Topic :: Internet",
"Topic :: Utilities",
],
test_suite='nose.collector',
tests_require=TEST_REQUIREMENTS
)
<commit_msg>Update classifiers to show ISC license<commit_after>#!/usr/bin/env python
from setuptools import setup
with open('README.rst') as readme_f:
README = readme_f.read()
with open('tests/requirements.txt') as test_requirements_f:
TEST_REQUIREMENTS = test_requirements_f.readlines()
setup(
name="yturl",
version="1.18.0",
description="Gets direct media URLs to YouTube media",
long_description=README,
url="https://github.com/cdown/yturl",
license='ISC',
author="Chris Down",
author_email="chris@chrisdown.name",
py_modules=["yturl"],
entry_points={
'console_scripts': ['yturl=yturl:_main'],
},
keywords='youtube media video',
classifiers=[
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: ISC License (ISCL)",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Topic :: Multimedia",
"Topic :: Internet",
"Topic :: Utilities",
],
test_suite='nose.collector',
tests_require=TEST_REQUIREMENTS
)
|
9af9144f9026e84ce04f9cdd5ce738b015247c12 | setup.py | setup.py | #!/usr/bin/env python
from distutils.core import setup
from catplot import __version__ as version
maintainer = 'Shao-Zheng-Jiang'
maintainer_email = 'shaozhengjiang@gmail.com'
author = maintainer
author_email = maintainer_email
description = __doc__
requires = [
'numpy',
'scipy',
'matplotlib',
]
license = 'LICENSE'
long_description = file('README.md').read()
name = 'python-catplot'
packages = [
'catplot',
]
platforms = ['linux', 'windows']
url = 'https://github.com/PytLab/catplot'
download_url = 'https://github.com/PytLab/catplot/releases'
setup(
author=author,
author_email=author_email,
description=description,
license=license,
long_description=long_description,
maintainer=maintainer,
name=name,
packages=packages,
platforms=platforms,
url=url,
download_url=download_url,
version=version,
)
| #!/usr/bin/env python
from distutils.core import setup
from catplot import __version__ as version
maintainer = 'Shao-Zheng-Jiang'
maintainer_email = 'shaozhengjiang@gmail.com'
author = maintainer
author_email = maintainer_email
description = __doc__
requires = [
'numpy',
'scipy',
'matplotlib',
]
license = 'LICENSE'
long_description = open('README.md').read()
name = 'python-catplot'
packages = [
'catplot',
]
platforms = ['linux', 'windows']
url = 'https://github.com/PytLab/catplot'
download_url = 'https://github.com/PytLab/catplot/releases'
setup(
author=author,
author_email=author_email,
description=description,
license=license,
long_description=long_description,
maintainer=maintainer,
name=name,
packages=packages,
platforms=platforms,
url=url,
download_url=download_url,
version=version,
)
| Fix compatible bug: file() -> open(). | Fix compatible bug: file() -> open().
| Python | mit | PytLab/catplot | #!/usr/bin/env python
from distutils.core import setup
from catplot import __version__ as version
maintainer = 'Shao-Zheng-Jiang'
maintainer_email = 'shaozhengjiang@gmail.com'
author = maintainer
author_email = maintainer_email
description = __doc__
requires = [
'numpy',
'scipy',
'matplotlib',
]
license = 'LICENSE'
long_description = file('README.md').read()
name = 'python-catplot'
packages = [
'catplot',
]
platforms = ['linux', 'windows']
url = 'https://github.com/PytLab/catplot'
download_url = 'https://github.com/PytLab/catplot/releases'
setup(
author=author,
author_email=author_email,
description=description,
license=license,
long_description=long_description,
maintainer=maintainer,
name=name,
packages=packages,
platforms=platforms,
url=url,
download_url=download_url,
version=version,
)
Fix compatible bug: file() -> open(). | #!/usr/bin/env python
from distutils.core import setup
from catplot import __version__ as version
maintainer = 'Shao-Zheng-Jiang'
maintainer_email = 'shaozhengjiang@gmail.com'
author = maintainer
author_email = maintainer_email
description = __doc__
requires = [
'numpy',
'scipy',
'matplotlib',
]
license = 'LICENSE'
long_description = open('README.md').read()
name = 'python-catplot'
packages = [
'catplot',
]
platforms = ['linux', 'windows']
url = 'https://github.com/PytLab/catplot'
download_url = 'https://github.com/PytLab/catplot/releases'
setup(
author=author,
author_email=author_email,
description=description,
license=license,
long_description=long_description,
maintainer=maintainer,
name=name,
packages=packages,
platforms=platforms,
url=url,
download_url=download_url,
version=version,
)
| <commit_before>#!/usr/bin/env python
from distutils.core import setup
from catplot import __version__ as version
maintainer = 'Shao-Zheng-Jiang'
maintainer_email = 'shaozhengjiang@gmail.com'
author = maintainer
author_email = maintainer_email
description = __doc__
requires = [
'numpy',
'scipy',
'matplotlib',
]
license = 'LICENSE'
long_description = file('README.md').read()
name = 'python-catplot'
packages = [
'catplot',
]
platforms = ['linux', 'windows']
url = 'https://github.com/PytLab/catplot'
download_url = 'https://github.com/PytLab/catplot/releases'
setup(
author=author,
author_email=author_email,
description=description,
license=license,
long_description=long_description,
maintainer=maintainer,
name=name,
packages=packages,
platforms=platforms,
url=url,
download_url=download_url,
version=version,
)
<commit_msg>Fix compatible bug: file() -> open().<commit_after> | #!/usr/bin/env python
from distutils.core import setup
from catplot import __version__ as version
maintainer = 'Shao-Zheng-Jiang'
maintainer_email = 'shaozhengjiang@gmail.com'
author = maintainer
author_email = maintainer_email
description = __doc__
requires = [
'numpy',
'scipy',
'matplotlib',
]
license = 'LICENSE'
long_description = open('README.md').read()
name = 'python-catplot'
packages = [
'catplot',
]
platforms = ['linux', 'windows']
url = 'https://github.com/PytLab/catplot'
download_url = 'https://github.com/PytLab/catplot/releases'
setup(
author=author,
author_email=author_email,
description=description,
license=license,
long_description=long_description,
maintainer=maintainer,
name=name,
packages=packages,
platforms=platforms,
url=url,
download_url=download_url,
version=version,
)
| #!/usr/bin/env python
from distutils.core import setup
from catplot import __version__ as version
maintainer = 'Shao-Zheng-Jiang'
maintainer_email = 'shaozhengjiang@gmail.com'
author = maintainer
author_email = maintainer_email
description = __doc__
requires = [
'numpy',
'scipy',
'matplotlib',
]
license = 'LICENSE'
long_description = file('README.md').read()
name = 'python-catplot'
packages = [
'catplot',
]
platforms = ['linux', 'windows']
url = 'https://github.com/PytLab/catplot'
download_url = 'https://github.com/PytLab/catplot/releases'
setup(
author=author,
author_email=author_email,
description=description,
license=license,
long_description=long_description,
maintainer=maintainer,
name=name,
packages=packages,
platforms=platforms,
url=url,
download_url=download_url,
version=version,
)
Fix compatible bug: file() -> open().#!/usr/bin/env python
from distutils.core import setup
from catplot import __version__ as version
maintainer = 'Shao-Zheng-Jiang'
maintainer_email = 'shaozhengjiang@gmail.com'
author = maintainer
author_email = maintainer_email
description = __doc__
requires = [
'numpy',
'scipy',
'matplotlib',
]
license = 'LICENSE'
long_description = open('README.md').read()
name = 'python-catplot'
packages = [
'catplot',
]
platforms = ['linux', 'windows']
url = 'https://github.com/PytLab/catplot'
download_url = 'https://github.com/PytLab/catplot/releases'
setup(
author=author,
author_email=author_email,
description=description,
license=license,
long_description=long_description,
maintainer=maintainer,
name=name,
packages=packages,
platforms=platforms,
url=url,
download_url=download_url,
version=version,
)
| <commit_before>#!/usr/bin/env python
from distutils.core import setup
from catplot import __version__ as version
maintainer = 'Shao-Zheng-Jiang'
maintainer_email = 'shaozhengjiang@gmail.com'
author = maintainer
author_email = maintainer_email
description = __doc__
requires = [
'numpy',
'scipy',
'matplotlib',
]
license = 'LICENSE'
long_description = file('README.md').read()
name = 'python-catplot'
packages = [
'catplot',
]
platforms = ['linux', 'windows']
url = 'https://github.com/PytLab/catplot'
download_url = 'https://github.com/PytLab/catplot/releases'
setup(
author=author,
author_email=author_email,
description=description,
license=license,
long_description=long_description,
maintainer=maintainer,
name=name,
packages=packages,
platforms=platforms,
url=url,
download_url=download_url,
version=version,
)
<commit_msg>Fix compatible bug: file() -> open().<commit_after>#!/usr/bin/env python
from distutils.core import setup
from catplot import __version__ as version
maintainer = 'Shao-Zheng-Jiang'
maintainer_email = 'shaozhengjiang@gmail.com'
author = maintainer
author_email = maintainer_email
description = __doc__
requires = [
'numpy',
'scipy',
'matplotlib',
]
license = 'LICENSE'
long_description = open('README.md').read()
name = 'python-catplot'
packages = [
'catplot',
]
platforms = ['linux', 'windows']
url = 'https://github.com/PytLab/catplot'
download_url = 'https://github.com/PytLab/catplot/releases'
setup(
author=author,
author_email=author_email,
description=description,
license=license,
long_description=long_description,
maintainer=maintainer,
name=name,
packages=packages,
platforms=platforms,
url=url,
download_url=download_url,
version=version,
)
|
d978f9c54d3509a5fd8ef3b287d2c3dfa7683d77 | setup.py | setup.py | #!/usr/bin/python
from setuptools import setup
setup(name="catsnap",
version="6.0.0",
description="catalog and store images",
author="Erin Call",
author_email="hello@erincall.com",
url="https://github.com/ErinCall/",
packages=['catsnap',
'catsnap.document',
'catsnap.config',
'catsnap.batch'],
install_requires=[
"Flask==0.9",
"gunicorn==0.14.6",
"boto==2.5.2",
"requests==0.13.2",
"argparse==1.2.1",
"psycopg2==2.4.6",
"sqlalchemy==0.8.0b2",
"yoyo-migrations==4.1.6",
"wand==0.3.3",
"celery==3.1.16",
"redis==2.10.3",
"gevent==1.0.2",
"Flask-Sockets==0.1",
"PyYAML==3.11",
"mock==1.0.1",
"nose==1.1.2",
"splinter==0.5.3",
"bcrypt==1.1.1",
],
)
| #!/usr/bin/python
from setuptools import setup
setup(name="catsnap",
version="6.0.0",
description="catalog and store images",
author="Erin Call",
author_email="hello@erincall.com",
url="https://github.com/ErinCall/",
packages=['catsnap',
'catsnap.document',
'catsnap.config',
'catsnap.batch'],
install_requires=[
"Flask==0.9",
"gunicorn==0.14.6",
"boto==2.5.2",
"requests==0.13.2",
"argparse==1.2.1",
"psycopg2==2.4.6",
"sqlalchemy==0.8.0b2",
"yoyo-migrations==4.1.6",
"wand==0.3.3",
"celery==3.1.16",
"redis==2.10.3",
"gevent==1.1b5",
"Flask-Sockets==0.1",
"PyYAML==3.11",
"mock==1.0.1",
"nose==1.1.2",
"splinter==0.5.3",
"bcrypt==1.1.1",
],
)
| Upgrade to a newer gevent for OSX Yosemity compat | Upgrade to a newer gevent for OSX Yosemity compat
See https://github.com/gevent/gevent/issues/656
| Python | mit | ErinCall/catsnap,ErinCall/catsnap,ErinCall/catsnap | #!/usr/bin/python
from setuptools import setup
setup(name="catsnap",
version="6.0.0",
description="catalog and store images",
author="Erin Call",
author_email="hello@erincall.com",
url="https://github.com/ErinCall/",
packages=['catsnap',
'catsnap.document',
'catsnap.config',
'catsnap.batch'],
install_requires=[
"Flask==0.9",
"gunicorn==0.14.6",
"boto==2.5.2",
"requests==0.13.2",
"argparse==1.2.1",
"psycopg2==2.4.6",
"sqlalchemy==0.8.0b2",
"yoyo-migrations==4.1.6",
"wand==0.3.3",
"celery==3.1.16",
"redis==2.10.3",
"gevent==1.0.2",
"Flask-Sockets==0.1",
"PyYAML==3.11",
"mock==1.0.1",
"nose==1.1.2",
"splinter==0.5.3",
"bcrypt==1.1.1",
],
)
Upgrade to a newer gevent for OSX Yosemity compat
See https://github.com/gevent/gevent/issues/656 | #!/usr/bin/python
from setuptools import setup
setup(name="catsnap",
version="6.0.0",
description="catalog and store images",
author="Erin Call",
author_email="hello@erincall.com",
url="https://github.com/ErinCall/",
packages=['catsnap',
'catsnap.document',
'catsnap.config',
'catsnap.batch'],
install_requires=[
"Flask==0.9",
"gunicorn==0.14.6",
"boto==2.5.2",
"requests==0.13.2",
"argparse==1.2.1",
"psycopg2==2.4.6",
"sqlalchemy==0.8.0b2",
"yoyo-migrations==4.1.6",
"wand==0.3.3",
"celery==3.1.16",
"redis==2.10.3",
"gevent==1.1b5",
"Flask-Sockets==0.1",
"PyYAML==3.11",
"mock==1.0.1",
"nose==1.1.2",
"splinter==0.5.3",
"bcrypt==1.1.1",
],
)
| <commit_before>#!/usr/bin/python
from setuptools import setup
setup(name="catsnap",
version="6.0.0",
description="catalog and store images",
author="Erin Call",
author_email="hello@erincall.com",
url="https://github.com/ErinCall/",
packages=['catsnap',
'catsnap.document',
'catsnap.config',
'catsnap.batch'],
install_requires=[
"Flask==0.9",
"gunicorn==0.14.6",
"boto==2.5.2",
"requests==0.13.2",
"argparse==1.2.1",
"psycopg2==2.4.6",
"sqlalchemy==0.8.0b2",
"yoyo-migrations==4.1.6",
"wand==0.3.3",
"celery==3.1.16",
"redis==2.10.3",
"gevent==1.0.2",
"Flask-Sockets==0.1",
"PyYAML==3.11",
"mock==1.0.1",
"nose==1.1.2",
"splinter==0.5.3",
"bcrypt==1.1.1",
],
)
<commit_msg>Upgrade to a newer gevent for OSX Yosemity compat
See https://github.com/gevent/gevent/issues/656<commit_after> | #!/usr/bin/python
from setuptools import setup
setup(name="catsnap",
version="6.0.0",
description="catalog and store images",
author="Erin Call",
author_email="hello@erincall.com",
url="https://github.com/ErinCall/",
packages=['catsnap',
'catsnap.document',
'catsnap.config',
'catsnap.batch'],
install_requires=[
"Flask==0.9",
"gunicorn==0.14.6",
"boto==2.5.2",
"requests==0.13.2",
"argparse==1.2.1",
"psycopg2==2.4.6",
"sqlalchemy==0.8.0b2",
"yoyo-migrations==4.1.6",
"wand==0.3.3",
"celery==3.1.16",
"redis==2.10.3",
"gevent==1.1b5",
"Flask-Sockets==0.1",
"PyYAML==3.11",
"mock==1.0.1",
"nose==1.1.2",
"splinter==0.5.3",
"bcrypt==1.1.1",
],
)
| #!/usr/bin/python
from setuptools import setup
setup(name="catsnap",
version="6.0.0",
description="catalog and store images",
author="Erin Call",
author_email="hello@erincall.com",
url="https://github.com/ErinCall/",
packages=['catsnap',
'catsnap.document',
'catsnap.config',
'catsnap.batch'],
install_requires=[
"Flask==0.9",
"gunicorn==0.14.6",
"boto==2.5.2",
"requests==0.13.2",
"argparse==1.2.1",
"psycopg2==2.4.6",
"sqlalchemy==0.8.0b2",
"yoyo-migrations==4.1.6",
"wand==0.3.3",
"celery==3.1.16",
"redis==2.10.3",
"gevent==1.0.2",
"Flask-Sockets==0.1",
"PyYAML==3.11",
"mock==1.0.1",
"nose==1.1.2",
"splinter==0.5.3",
"bcrypt==1.1.1",
],
)
Upgrade to a newer gevent for OSX Yosemity compat
See https://github.com/gevent/gevent/issues/656#!/usr/bin/python
from setuptools import setup
setup(name="catsnap",
version="6.0.0",
description="catalog and store images",
author="Erin Call",
author_email="hello@erincall.com",
url="https://github.com/ErinCall/",
packages=['catsnap',
'catsnap.document',
'catsnap.config',
'catsnap.batch'],
install_requires=[
"Flask==0.9",
"gunicorn==0.14.6",
"boto==2.5.2",
"requests==0.13.2",
"argparse==1.2.1",
"psycopg2==2.4.6",
"sqlalchemy==0.8.0b2",
"yoyo-migrations==4.1.6",
"wand==0.3.3",
"celery==3.1.16",
"redis==2.10.3",
"gevent==1.1b5",
"Flask-Sockets==0.1",
"PyYAML==3.11",
"mock==1.0.1",
"nose==1.1.2",
"splinter==0.5.3",
"bcrypt==1.1.1",
],
)
| <commit_before>#!/usr/bin/python
from setuptools import setup
setup(name="catsnap",
version="6.0.0",
description="catalog and store images",
author="Erin Call",
author_email="hello@erincall.com",
url="https://github.com/ErinCall/",
packages=['catsnap',
'catsnap.document',
'catsnap.config',
'catsnap.batch'],
install_requires=[
"Flask==0.9",
"gunicorn==0.14.6",
"boto==2.5.2",
"requests==0.13.2",
"argparse==1.2.1",
"psycopg2==2.4.6",
"sqlalchemy==0.8.0b2",
"yoyo-migrations==4.1.6",
"wand==0.3.3",
"celery==3.1.16",
"redis==2.10.3",
"gevent==1.0.2",
"Flask-Sockets==0.1",
"PyYAML==3.11",
"mock==1.0.1",
"nose==1.1.2",
"splinter==0.5.3",
"bcrypt==1.1.1",
],
)
<commit_msg>Upgrade to a newer gevent for OSX Yosemity compat
See https://github.com/gevent/gevent/issues/656<commit_after>#!/usr/bin/python
from setuptools import setup
setup(name="catsnap",
version="6.0.0",
description="catalog and store images",
author="Erin Call",
author_email="hello@erincall.com",
url="https://github.com/ErinCall/",
packages=['catsnap',
'catsnap.document',
'catsnap.config',
'catsnap.batch'],
install_requires=[
"Flask==0.9",
"gunicorn==0.14.6",
"boto==2.5.2",
"requests==0.13.2",
"argparse==1.2.1",
"psycopg2==2.4.6",
"sqlalchemy==0.8.0b2",
"yoyo-migrations==4.1.6",
"wand==0.3.3",
"celery==3.1.16",
"redis==2.10.3",
"gevent==1.1b5",
"Flask-Sockets==0.1",
"PyYAML==3.11",
"mock==1.0.1",
"nose==1.1.2",
"splinter==0.5.3",
"bcrypt==1.1.1",
],
)
|
976167045131263dc52ff57315f08783a318a9df | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup
setup(use_scm_version=True)
| #!/usr/bin/env python
from setuptools import setup
setup(name='django-s3file', use_scm_version=True)
| Add package name for github usage report | Add package name for github usage report | Python | mit | codingjoe/django-s3file,codingjoe/django-s3file,codingjoe/django-s3file | #!/usr/bin/env python
from setuptools import setup
setup(use_scm_version=True)
Add package name for github usage report | #!/usr/bin/env python
from setuptools import setup
setup(name='django-s3file', use_scm_version=True)
| <commit_before>#!/usr/bin/env python
from setuptools import setup
setup(use_scm_version=True)
<commit_msg>Add package name for github usage report<commit_after> | #!/usr/bin/env python
from setuptools import setup
setup(name='django-s3file', use_scm_version=True)
| #!/usr/bin/env python
from setuptools import setup
setup(use_scm_version=True)
Add package name for github usage report#!/usr/bin/env python
from setuptools import setup
setup(name='django-s3file', use_scm_version=True)
| <commit_before>#!/usr/bin/env python
from setuptools import setup
setup(use_scm_version=True)
<commit_msg>Add package name for github usage report<commit_after>#!/usr/bin/env python
from setuptools import setup
setup(name='django-s3file', use_scm_version=True)
|
83f62bd5993ba253183f120567a2a42108c4b7b4 | setup.py | setup.py | from distutils.core import setup
description = """
A python module for calculating riichi mahjong hands: yaku, han and fu.
You can find usage examples here https://github.com/MahjongRepository/mahjong
"""
setup(
name='mahjong',
packages=['mahjong'],
version='1.0.1',
description='Mahjong hands calculation',
long_description=description,
author='Alexey Lisikhin',
author_email='lisikhin@gmail.com',
url='https://github.com/MahjongRepository/mahjong',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
| from distutils.core import setup
description = """
A python module for calculating riichi mahjong hands: yaku, han and fu.
Right now it supports only japanese version (riichi mahjong). MCR (chinese version) in plans
You can find usage examples here https://github.com/MahjongRepository/mahjong
"""
setup(
name='mahjong',
packages=[
'mahjong',
'mahjong.hand_calculating',
'mahjong.hand_calculating.yaku_list',
'mahjong.hand_calculating.yaku_list.yakuman',
],
version='1.0.2',
description='Mahjong hands calculation',
long_description=description,
author='Alexey Lisikhin',
author_email='lisikhin@gmail.com',
url='https://github.com/MahjongRepository/mahjong',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
| Add missed packages tot he build script | Add missed packages tot he build script
| Python | mit | MahjongRepository/mahjong | from distutils.core import setup
description = """
A python module for calculating riichi mahjong hands: yaku, han and fu.
You can find usage examples here https://github.com/MahjongRepository/mahjong
"""
setup(
name='mahjong',
packages=['mahjong'],
version='1.0.1',
description='Mahjong hands calculation',
long_description=description,
author='Alexey Lisikhin',
author_email='lisikhin@gmail.com',
url='https://github.com/MahjongRepository/mahjong',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
Add missed packages tot he build script | from distutils.core import setup
description = """
A python module for calculating riichi mahjong hands: yaku, han and fu.
Right now it supports only japanese version (riichi mahjong). MCR (chinese version) in plans
You can find usage examples here https://github.com/MahjongRepository/mahjong
"""
setup(
name='mahjong',
packages=[
'mahjong',
'mahjong.hand_calculating',
'mahjong.hand_calculating.yaku_list',
'mahjong.hand_calculating.yaku_list.yakuman',
],
version='1.0.2',
description='Mahjong hands calculation',
long_description=description,
author='Alexey Lisikhin',
author_email='lisikhin@gmail.com',
url='https://github.com/MahjongRepository/mahjong',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
| <commit_before>from distutils.core import setup
description = """
A python module for calculating riichi mahjong hands: yaku, han and fu.
You can find usage examples here https://github.com/MahjongRepository/mahjong
"""
setup(
name='mahjong',
packages=['mahjong'],
version='1.0.1',
description='Mahjong hands calculation',
long_description=description,
author='Alexey Lisikhin',
author_email='lisikhin@gmail.com',
url='https://github.com/MahjongRepository/mahjong',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
<commit_msg>Add missed packages tot he build script<commit_after> | from distutils.core import setup
description = """
A python module for calculating riichi mahjong hands: yaku, han and fu.
Right now it supports only japanese version (riichi mahjong). MCR (chinese version) in plans
You can find usage examples here https://github.com/MahjongRepository/mahjong
"""
setup(
name='mahjong',
packages=[
'mahjong',
'mahjong.hand_calculating',
'mahjong.hand_calculating.yaku_list',
'mahjong.hand_calculating.yaku_list.yakuman',
],
version='1.0.2',
description='Mahjong hands calculation',
long_description=description,
author='Alexey Lisikhin',
author_email='lisikhin@gmail.com',
url='https://github.com/MahjongRepository/mahjong',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
| from distutils.core import setup
description = """
A python module for calculating riichi mahjong hands: yaku, han and fu.
You can find usage examples here https://github.com/MahjongRepository/mahjong
"""
setup(
name='mahjong',
packages=['mahjong'],
version='1.0.1',
description='Mahjong hands calculation',
long_description=description,
author='Alexey Lisikhin',
author_email='lisikhin@gmail.com',
url='https://github.com/MahjongRepository/mahjong',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
Add missed packages tot he build scriptfrom distutils.core import setup
description = """
A python module for calculating riichi mahjong hands: yaku, han and fu.
Right now it supports only japanese version (riichi mahjong). MCR (chinese version) in plans
You can find usage examples here https://github.com/MahjongRepository/mahjong
"""
setup(
name='mahjong',
packages=[
'mahjong',
'mahjong.hand_calculating',
'mahjong.hand_calculating.yaku_list',
'mahjong.hand_calculating.yaku_list.yakuman',
],
version='1.0.2',
description='Mahjong hands calculation',
long_description=description,
author='Alexey Lisikhin',
author_email='lisikhin@gmail.com',
url='https://github.com/MahjongRepository/mahjong',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
| <commit_before>from distutils.core import setup
description = """
A python module for calculating riichi mahjong hands: yaku, han and fu.
You can find usage examples here https://github.com/MahjongRepository/mahjong
"""
setup(
name='mahjong',
packages=['mahjong'],
version='1.0.1',
description='Mahjong hands calculation',
long_description=description,
author='Alexey Lisikhin',
author_email='lisikhin@gmail.com',
url='https://github.com/MahjongRepository/mahjong',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
<commit_msg>Add missed packages tot he build script<commit_after>from distutils.core import setup
description = """
A python module for calculating riichi mahjong hands: yaku, han and fu.
Right now it supports only japanese version (riichi mahjong). MCR (chinese version) in plans
You can find usage examples here https://github.com/MahjongRepository/mahjong
"""
setup(
name='mahjong',
packages=[
'mahjong',
'mahjong.hand_calculating',
'mahjong.hand_calculating.yaku_list',
'mahjong.hand_calculating.yaku_list.yakuman',
],
version='1.0.2',
description='Mahjong hands calculation',
long_description=description,
author='Alexey Lisikhin',
author_email='lisikhin@gmail.com',
url='https://github.com/MahjongRepository/mahjong',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
|
70189c54cfbe07b819fcd23fbe213be9de5b4db2 | setup.py | setup.py | from setuptools import setup, find_packages
setup(
name='sheetsu',
version='0.0.4',
description='Sheetsu Python client',
url='http://github.com/andreffs18/sheetsu-python',
author='Andre Silva',
author_email='andreffs18@gmail.com',
license='MIT',
keywords='sheetsu api client sdk spreadsheet',
packages=find_packages(),
zip_safe=False
)
| from setuptools import setup, find_packages
setup(
name='sheetsu',
version='0.0.5',
description='Sheetsu Python client',
url='http://github.com/andreffs18/sheetsu-python',
author='Andre Silva',
author_email='andreffs18@gmail.com',
license='MIT',
keywords='sheetsu api client sdk spreadsheet',
packages=find_packages(),
zip_safe=False,
install_requires=['requests']
)
| Add required packages for lib to work and update version | Add required packages for lib to work and update version
| Python | mit | andreffs18/sheetsu-python | from setuptools import setup, find_packages
setup(
name='sheetsu',
version='0.0.4',
description='Sheetsu Python client',
url='http://github.com/andreffs18/sheetsu-python',
author='Andre Silva',
author_email='andreffs18@gmail.com',
license='MIT',
keywords='sheetsu api client sdk spreadsheet',
packages=find_packages(),
zip_safe=False
)
Add required packages for lib to work and update version | from setuptools import setup, find_packages
setup(
name='sheetsu',
version='0.0.5',
description='Sheetsu Python client',
url='http://github.com/andreffs18/sheetsu-python',
author='Andre Silva',
author_email='andreffs18@gmail.com',
license='MIT',
keywords='sheetsu api client sdk spreadsheet',
packages=find_packages(),
zip_safe=False,
install_requires=['requests']
)
| <commit_before>from setuptools import setup, find_packages
setup(
name='sheetsu',
version='0.0.4',
description='Sheetsu Python client',
url='http://github.com/andreffs18/sheetsu-python',
author='Andre Silva',
author_email='andreffs18@gmail.com',
license='MIT',
keywords='sheetsu api client sdk spreadsheet',
packages=find_packages(),
zip_safe=False
)
<commit_msg>Add required packages for lib to work and update version<commit_after> | from setuptools import setup, find_packages
setup(
name='sheetsu',
version='0.0.5',
description='Sheetsu Python client',
url='http://github.com/andreffs18/sheetsu-python',
author='Andre Silva',
author_email='andreffs18@gmail.com',
license='MIT',
keywords='sheetsu api client sdk spreadsheet',
packages=find_packages(),
zip_safe=False,
install_requires=['requests']
)
| from setuptools import setup, find_packages
setup(
name='sheetsu',
version='0.0.4',
description='Sheetsu Python client',
url='http://github.com/andreffs18/sheetsu-python',
author='Andre Silva',
author_email='andreffs18@gmail.com',
license='MIT',
keywords='sheetsu api client sdk spreadsheet',
packages=find_packages(),
zip_safe=False
)
Add required packages for lib to work and update versionfrom setuptools import setup, find_packages
setup(
name='sheetsu',
version='0.0.5',
description='Sheetsu Python client',
url='http://github.com/andreffs18/sheetsu-python',
author='Andre Silva',
author_email='andreffs18@gmail.com',
license='MIT',
keywords='sheetsu api client sdk spreadsheet',
packages=find_packages(),
zip_safe=False,
install_requires=['requests']
)
| <commit_before>from setuptools import setup, find_packages
setup(
name='sheetsu',
version='0.0.4',
description='Sheetsu Python client',
url='http://github.com/andreffs18/sheetsu-python',
author='Andre Silva',
author_email='andreffs18@gmail.com',
license='MIT',
keywords='sheetsu api client sdk spreadsheet',
packages=find_packages(),
zip_safe=False
)
<commit_msg>Add required packages for lib to work and update version<commit_after>from setuptools import setup, find_packages
setup(
name='sheetsu',
version='0.0.5',
description='Sheetsu Python client',
url='http://github.com/andreffs18/sheetsu-python',
author='Andre Silva',
author_email='andreffs18@gmail.com',
license='MIT',
keywords='sheetsu api client sdk spreadsheet',
packages=find_packages(),
zip_safe=False,
install_requires=['requests']
)
|
4bf26b6d976171b5a388134ad9716af639f15a3b | setup.py | setup.py | import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README')).read()
CHANGES = open(os.path.join(here, 'CHANGES')).read()
requires = [
'oauth2client',
]
tests_require = []
testing_requires = tests_require + [
'nose',
'coverage',
]
develop_requires = [
]
setup(name='acctwatch',
version='0.0',
description='acctwatch',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
],
author='Bert JW Regeer',
author_email='bertjw@regeer.org',
url='',
keywords='',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
test_suite='acctwatch.tests',
install_requires=requires,
tests_require=tests_require,
extras_require = {
'develop': develop_requires,
'testing': testing_requires,
},
entry_points="""\
[console_scripts]
acctwatch = acctwatch.acctwatch:main
""",
)
| import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README')).read()
CHANGES = open(os.path.join(here, 'CHANGES')).read()
requires = [
'oauth2client',
'google-api-python-client',
]
tests_require = []
testing_requires = tests_require + [
'nose',
'coverage',
]
develop_requires = [
]
setup(name='acctwatch',
version='0.0',
description='acctwatch',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
],
author='Bert JW Regeer',
author_email='bertjw@regeer.org',
url='',
keywords='',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
test_suite='acctwatch.tests',
install_requires=requires,
tests_require=tests_require,
extras_require = {
'develop': develop_requires,
'testing': testing_requires,
},
entry_points="""\
[console_scripts]
acctwatch = acctwatch.acctwatch:main
""",
)
| Add the Google API client as a requirement | Add the Google API client as a requirement
| Python | isc | GuardedRisk/Google-Apps-Auditing | import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README')).read()
CHANGES = open(os.path.join(here, 'CHANGES')).read()
requires = [
'oauth2client',
]
tests_require = []
testing_requires = tests_require + [
'nose',
'coverage',
]
develop_requires = [
]
setup(name='acctwatch',
version='0.0',
description='acctwatch',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
],
author='Bert JW Regeer',
author_email='bertjw@regeer.org',
url='',
keywords='',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
test_suite='acctwatch.tests',
install_requires=requires,
tests_require=tests_require,
extras_require = {
'develop': develop_requires,
'testing': testing_requires,
},
entry_points="""\
[console_scripts]
acctwatch = acctwatch.acctwatch:main
""",
)
Add the Google API client as a requirement | import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README')).read()
CHANGES = open(os.path.join(here, 'CHANGES')).read()
requires = [
'oauth2client',
'google-api-python-client',
]
tests_require = []
testing_requires = tests_require + [
'nose',
'coverage',
]
develop_requires = [
]
setup(name='acctwatch',
version='0.0',
description='acctwatch',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
],
author='Bert JW Regeer',
author_email='bertjw@regeer.org',
url='',
keywords='',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
test_suite='acctwatch.tests',
install_requires=requires,
tests_require=tests_require,
extras_require = {
'develop': develop_requires,
'testing': testing_requires,
},
entry_points="""\
[console_scripts]
acctwatch = acctwatch.acctwatch:main
""",
)
| <commit_before>import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README')).read()
CHANGES = open(os.path.join(here, 'CHANGES')).read()
requires = [
'oauth2client',
]
tests_require = []
testing_requires = tests_require + [
'nose',
'coverage',
]
develop_requires = [
]
setup(name='acctwatch',
version='0.0',
description='acctwatch',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
],
author='Bert JW Regeer',
author_email='bertjw@regeer.org',
url='',
keywords='',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
test_suite='acctwatch.tests',
install_requires=requires,
tests_require=tests_require,
extras_require = {
'develop': develop_requires,
'testing': testing_requires,
},
entry_points="""\
[console_scripts]
acctwatch = acctwatch.acctwatch:main
""",
)
<commit_msg>Add the Google API client as a requirement<commit_after> | import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README')).read()
CHANGES = open(os.path.join(here, 'CHANGES')).read()
requires = [
'oauth2client',
'google-api-python-client',
]
tests_require = []
testing_requires = tests_require + [
'nose',
'coverage',
]
develop_requires = [
]
setup(name='acctwatch',
version='0.0',
description='acctwatch',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
],
author='Bert JW Regeer',
author_email='bertjw@regeer.org',
url='',
keywords='',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
test_suite='acctwatch.tests',
install_requires=requires,
tests_require=tests_require,
extras_require = {
'develop': develop_requires,
'testing': testing_requires,
},
entry_points="""\
[console_scripts]
acctwatch = acctwatch.acctwatch:main
""",
)
| import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README')).read()
CHANGES = open(os.path.join(here, 'CHANGES')).read()
requires = [
'oauth2client',
]
tests_require = []
testing_requires = tests_require + [
'nose',
'coverage',
]
develop_requires = [
]
setup(name='acctwatch',
version='0.0',
description='acctwatch',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
],
author='Bert JW Regeer',
author_email='bertjw@regeer.org',
url='',
keywords='',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
test_suite='acctwatch.tests',
install_requires=requires,
tests_require=tests_require,
extras_require = {
'develop': develop_requires,
'testing': testing_requires,
},
entry_points="""\
[console_scripts]
acctwatch = acctwatch.acctwatch:main
""",
)
Add the Google API client as a requirementimport os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README')).read()
CHANGES = open(os.path.join(here, 'CHANGES')).read()
requires = [
'oauth2client',
'google-api-python-client',
]
tests_require = []
testing_requires = tests_require + [
'nose',
'coverage',
]
develop_requires = [
]
setup(name='acctwatch',
version='0.0',
description='acctwatch',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
],
author='Bert JW Regeer',
author_email='bertjw@regeer.org',
url='',
keywords='',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
test_suite='acctwatch.tests',
install_requires=requires,
tests_require=tests_require,
extras_require = {
'develop': develop_requires,
'testing': testing_requires,
},
entry_points="""\
[console_scripts]
acctwatch = acctwatch.acctwatch:main
""",
)
| <commit_before>import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README')).read()
CHANGES = open(os.path.join(here, 'CHANGES')).read()
requires = [
'oauth2client',
]
tests_require = []
testing_requires = tests_require + [
'nose',
'coverage',
]
develop_requires = [
]
setup(name='acctwatch',
version='0.0',
description='acctwatch',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
],
author='Bert JW Regeer',
author_email='bertjw@regeer.org',
url='',
keywords='',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
test_suite='acctwatch.tests',
install_requires=requires,
tests_require=tests_require,
extras_require = {
'develop': develop_requires,
'testing': testing_requires,
},
entry_points="""\
[console_scripts]
acctwatch = acctwatch.acctwatch:main
""",
)
<commit_msg>Add the Google API client as a requirement<commit_after>import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README')).read()
CHANGES = open(os.path.join(here, 'CHANGES')).read()
requires = [
'oauth2client',
'google-api-python-client',
]
tests_require = []
testing_requires = tests_require + [
'nose',
'coverage',
]
develop_requires = [
]
setup(name='acctwatch',
version='0.0',
description='acctwatch',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
],
author='Bert JW Regeer',
author_email='bertjw@regeer.org',
url='',
keywords='',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
test_suite='acctwatch.tests',
install_requires=requires,
tests_require=tests_require,
extras_require = {
'develop': develop_requires,
'testing': testing_requires,
},
entry_points="""\
[console_scripts]
acctwatch = acctwatch.acctwatch:main
""",
)
|
05bfc141b279dc8f30089e8b72502f9042a2ff3b | setup.py | setup.py | #!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
#
# Copyright (c) 2011-2015 Genestack Limited
# All Rights Reserved
# THIS IS UNPUBLISHED PROPRIETARY SOURCE CODE OF GENESTACK LIMITED
# The copyright notice above does not evidence any
# actual or intended publication of such source code.
#
from distutils.core import setup
setup(
name='genestack',
version='0.1',
packages=['genestack', 'genestack.settings'],
url='',
license='',
author='Genestack Limited',
author_email='',
description='Genestack API',
scripts=['genestack-user-setup.py', 'genestack-application-manager.py'],
install_requires=['keyring']
)
| #!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
#
# Copyright (c) 2011-2015 Genestack Limited
# All Rights Reserved
# THIS IS UNPUBLISHED PROPRIETARY SOURCE CODE OF GENESTACK LIMITED
# The copyright notice above does not evidence any
# actual or intended publication of such source code.
#
from distutils.core import setup
exec(open('genestack/version.py').read())
setup(
name='genestack',
version=__version__,
packages=['genestack', 'genestack.settings'],
url='',
license='',
author='Genestack Limited',
author_email='',
description='Genestack API',
scripts=['genestack-user-setup.py', 'genestack-application-manager.py'],
install_requires=['keyring']
)
| Use move version for sutup.py | Use move version for sutup.py
| Python | mit | genestack/python-client | #!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
#
# Copyright (c) 2011-2015 Genestack Limited
# All Rights Reserved
# THIS IS UNPUBLISHED PROPRIETARY SOURCE CODE OF GENESTACK LIMITED
# The copyright notice above does not evidence any
# actual or intended publication of such source code.
#
from distutils.core import setup
setup(
name='genestack',
version='0.1',
packages=['genestack', 'genestack.settings'],
url='',
license='',
author='Genestack Limited',
author_email='',
description='Genestack API',
scripts=['genestack-user-setup.py', 'genestack-application-manager.py'],
install_requires=['keyring']
)
Use move version for sutup.py | #!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
#
# Copyright (c) 2011-2015 Genestack Limited
# All Rights Reserved
# THIS IS UNPUBLISHED PROPRIETARY SOURCE CODE OF GENESTACK LIMITED
# The copyright notice above does not evidence any
# actual or intended publication of such source code.
#
from distutils.core import setup
exec(open('genestack/version.py').read())
setup(
name='genestack',
version=__version__,
packages=['genestack', 'genestack.settings'],
url='',
license='',
author='Genestack Limited',
author_email='',
description='Genestack API',
scripts=['genestack-user-setup.py', 'genestack-application-manager.py'],
install_requires=['keyring']
)
| <commit_before>#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
#
# Copyright (c) 2011-2015 Genestack Limited
# All Rights Reserved
# THIS IS UNPUBLISHED PROPRIETARY SOURCE CODE OF GENESTACK LIMITED
# The copyright notice above does not evidence any
# actual or intended publication of such source code.
#
from distutils.core import setup
setup(
name='genestack',
version='0.1',
packages=['genestack', 'genestack.settings'],
url='',
license='',
author='Genestack Limited',
author_email='',
description='Genestack API',
scripts=['genestack-user-setup.py', 'genestack-application-manager.py'],
install_requires=['keyring']
)
<commit_msg>Use move version for sutup.py<commit_after> | #!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
#
# Copyright (c) 2011-2015 Genestack Limited
# All Rights Reserved
# THIS IS UNPUBLISHED PROPRIETARY SOURCE CODE OF GENESTACK LIMITED
# The copyright notice above does not evidence any
# actual or intended publication of such source code.
#
from distutils.core import setup
exec(open('genestack/version.py').read())
setup(
name='genestack',
version=__version__,
packages=['genestack', 'genestack.settings'],
url='',
license='',
author='Genestack Limited',
author_email='',
description='Genestack API',
scripts=['genestack-user-setup.py', 'genestack-application-manager.py'],
install_requires=['keyring']
)
| #!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
#
# Copyright (c) 2011-2015 Genestack Limited
# All Rights Reserved
# THIS IS UNPUBLISHED PROPRIETARY SOURCE CODE OF GENESTACK LIMITED
# The copyright notice above does not evidence any
# actual or intended publication of such source code.
#
from distutils.core import setup
setup(
name='genestack',
version='0.1',
packages=['genestack', 'genestack.settings'],
url='',
license='',
author='Genestack Limited',
author_email='',
description='Genestack API',
scripts=['genestack-user-setup.py', 'genestack-application-manager.py'],
install_requires=['keyring']
)
Use move version for sutup.py#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
#
# Copyright (c) 2011-2015 Genestack Limited
# All Rights Reserved
# THIS IS UNPUBLISHED PROPRIETARY SOURCE CODE OF GENESTACK LIMITED
# The copyright notice above does not evidence any
# actual or intended publication of such source code.
#
from distutils.core import setup
exec(open('genestack/version.py').read())
setup(
name='genestack',
version=__version__,
packages=['genestack', 'genestack.settings'],
url='',
license='',
author='Genestack Limited',
author_email='',
description='Genestack API',
scripts=['genestack-user-setup.py', 'genestack-application-manager.py'],
install_requires=['keyring']
)
| <commit_before>#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
#
# Copyright (c) 2011-2015 Genestack Limited
# All Rights Reserved
# THIS IS UNPUBLISHED PROPRIETARY SOURCE CODE OF GENESTACK LIMITED
# The copyright notice above does not evidence any
# actual or intended publication of such source code.
#
from distutils.core import setup
setup(
name='genestack',
version='0.1',
packages=['genestack', 'genestack.settings'],
url='',
license='',
author='Genestack Limited',
author_email='',
description='Genestack API',
scripts=['genestack-user-setup.py', 'genestack-application-manager.py'],
install_requires=['keyring']
)
<commit_msg>Use move version for sutup.py<commit_after>#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
#
# Copyright (c) 2011-2015 Genestack Limited
# All Rights Reserved
# THIS IS UNPUBLISHED PROPRIETARY SOURCE CODE OF GENESTACK LIMITED
# The copyright notice above does not evidence any
# actual or intended publication of such source code.
#
from distutils.core import setup
exec(open('genestack/version.py').read())
setup(
name='genestack',
version=__version__,
packages=['genestack', 'genestack.settings'],
url='',
license='',
author='Genestack Limited',
author_email='',
description='Genestack API',
scripts=['genestack-user-setup.py', 'genestack-application-manager.py'],
install_requires=['keyring']
)
|
cc92b1770acdc5a34eb32c596c0b2ece6bf32b0f | qiprofile_rest/server/settings.py | qiprofile_rest/server/settings.py | # This file specifies the Eve configuration.
import os
# The run environment default is production.
# Modify this by setting the NODE_ENV environment variable.
env = os.getenv('NODE_ENV') or 'production'
# The MongoDB database.
if env == 'production':
MONGO_DBNAME = 'qiprofile'
else:
MONGO_DBNAME = 'qiprofile_test'
# Even though the domain is defined by the Eve MongoEngine
# adapter, a DOMAIN setting is required by Eve. This setting
# is only used to avoid an Eve complaint about a missing domain.
DOMAIN = {'eve-mongoengine': {}}
| """This ``settings`` file specifies the Eve configuration."""
import os
# The run environment default is production.
# Modify this by setting the NODE_ENV environment variable.
env = os.getenv('NODE_ENV') or 'production'
# The MongoDB database.
if env == 'production':
MONGO_DBNAME = 'qiprofile'
else:
MONGO_DBNAME = 'qiprofile_test'
# The MongoDB host default is localhost, but can be reset
# by the MONGO_HOST environment variable.
host = os.getenv('MONGO_HOST')
if host:
MONGO_HOST = host
# Even though the domain is defined by the Eve MongoEngine
# adapter, a DOMAIN setting is required by Eve. This setting
# is only used to avoid an Eve complaint about a missing domain.
DOMAIN = {'eve-mongoengine': {}}
| Allow MONGO_HOST env var override. | Allow MONGO_HOST env var override.
| Python | bsd-2-clause | ohsu-qin/qirest,ohsu-qin/qiprofile-rest | # This file specifies the Eve configuration.
import os
# The run environment default is production.
# Modify this by setting the NODE_ENV environment variable.
env = os.getenv('NODE_ENV') or 'production'
# The MongoDB database.
if env == 'production':
MONGO_DBNAME = 'qiprofile'
else:
MONGO_DBNAME = 'qiprofile_test'
# Even though the domain is defined by the Eve MongoEngine
# adapter, a DOMAIN setting is required by Eve. This setting
# is only used to avoid an Eve complaint about a missing domain.
DOMAIN = {'eve-mongoengine': {}}
Allow MONGO_HOST env var override. | """This ``settings`` file specifies the Eve configuration."""
import os
# The run environment default is production.
# Modify this by setting the NODE_ENV environment variable.
env = os.getenv('NODE_ENV') or 'production'
# The MongoDB database.
if env == 'production':
MONGO_DBNAME = 'qiprofile'
else:
MONGO_DBNAME = 'qiprofile_test'
# The MongoDB host default is localhost, but can be reset
# by the MONGO_HOST environment variable.
host = os.getenv('MONGO_HOST')
if host:
MONGO_HOST = host
# Even though the domain is defined by the Eve MongoEngine
# adapter, a DOMAIN setting is required by Eve. This setting
# is only used to avoid an Eve complaint about a missing domain.
DOMAIN = {'eve-mongoengine': {}}
| <commit_before># This file specifies the Eve configuration.
import os
# The run environment default is production.
# Modify this by setting the NODE_ENV environment variable.
env = os.getenv('NODE_ENV') or 'production'
# The MongoDB database.
if env == 'production':
MONGO_DBNAME = 'qiprofile'
else:
MONGO_DBNAME = 'qiprofile_test'
# Even though the domain is defined by the Eve MongoEngine
# adapter, a DOMAIN setting is required by Eve. This setting
# is only used to avoid an Eve complaint about a missing domain.
DOMAIN = {'eve-mongoengine': {}}
<commit_msg>Allow MONGO_HOST env var override.<commit_after> | """This ``settings`` file specifies the Eve configuration."""
import os
# The run environment default is production.
# Modify this by setting the NODE_ENV environment variable.
env = os.getenv('NODE_ENV') or 'production'
# The MongoDB database.
if env == 'production':
MONGO_DBNAME = 'qiprofile'
else:
MONGO_DBNAME = 'qiprofile_test'
# The MongoDB host default is localhost, but can be reset
# by the MONGO_HOST environment variable.
host = os.getenv('MONGO_HOST')
if host:
MONGO_HOST = host
# Even though the domain is defined by the Eve MongoEngine
# adapter, a DOMAIN setting is required by Eve. This setting
# is only used to avoid an Eve complaint about a missing domain.
DOMAIN = {'eve-mongoengine': {}}
| # This file specifies the Eve configuration.
import os
# The run environment default is production.
# Modify this by setting the NODE_ENV environment variable.
env = os.getenv('NODE_ENV') or 'production'
# The MongoDB database.
if env == 'production':
MONGO_DBNAME = 'qiprofile'
else:
MONGO_DBNAME = 'qiprofile_test'
# Even though the domain is defined by the Eve MongoEngine
# adapter, a DOMAIN setting is required by Eve. This setting
# is only used to avoid an Eve complaint about a missing domain.
DOMAIN = {'eve-mongoengine': {}}
Allow MONGO_HOST env var override."""This ``settings`` file specifies the Eve configuration."""
import os
# The run environment default is production.
# Modify this by setting the NODE_ENV environment variable.
env = os.getenv('NODE_ENV') or 'production'
# The MongoDB database.
if env == 'production':
MONGO_DBNAME = 'qiprofile'
else:
MONGO_DBNAME = 'qiprofile_test'
# The MongoDB host default is localhost, but can be reset
# by the MONGO_HOST environment variable.
host = os.getenv('MONGO_HOST')
if host:
MONGO_HOST = host
# Even though the domain is defined by the Eve MongoEngine
# adapter, a DOMAIN setting is required by Eve. This setting
# is only used to avoid an Eve complaint about a missing domain.
DOMAIN = {'eve-mongoengine': {}}
| <commit_before># This file specifies the Eve configuration.
import os
# The run environment default is production.
# Modify this by setting the NODE_ENV environment variable.
env = os.getenv('NODE_ENV') or 'production'
# The MongoDB database.
if env == 'production':
MONGO_DBNAME = 'qiprofile'
else:
MONGO_DBNAME = 'qiprofile_test'
# Even though the domain is defined by the Eve MongoEngine
# adapter, a DOMAIN setting is required by Eve. This setting
# is only used to avoid an Eve complaint about a missing domain.
DOMAIN = {'eve-mongoengine': {}}
<commit_msg>Allow MONGO_HOST env var override.<commit_after>"""This ``settings`` file specifies the Eve configuration."""
import os
# The run environment default is production.
# Modify this by setting the NODE_ENV environment variable.
env = os.getenv('NODE_ENV') or 'production'
# The MongoDB database.
if env == 'production':
MONGO_DBNAME = 'qiprofile'
else:
MONGO_DBNAME = 'qiprofile_test'
# The MongoDB host default is localhost, but can be reset
# by the MONGO_HOST environment variable.
host = os.getenv('MONGO_HOST')
if host:
MONGO_HOST = host
# Even though the domain is defined by the Eve MongoEngine
# adapter, a DOMAIN setting is required by Eve. This setting
# is only used to avoid an Eve complaint about a missing domain.
DOMAIN = {'eve-mongoengine': {}}
|
8cab1d360218f6d8075bad08fd38ef90c75e5549 | turbustat/tests/setup_package.py | turbustat/tests/setup_package.py |
def get_package_data():
return {
_ASTROPY_PACKAGE_NAME_ + '.tests': ['data/*.fits', 'data/*.npz']
}
|
def get_package_data():
return {
_ASTROPY_PACKAGE_NAME_ + '.tests': ['data/*.fits', 'data/*.npz',
'coveragerc']
}
| Add coveragerc to package data | Add coveragerc to package data
| Python | mit | e-koch/TurbuStat,Astroua/TurbuStat |
def get_package_data():
return {
_ASTROPY_PACKAGE_NAME_ + '.tests': ['data/*.fits', 'data/*.npz']
}
Add coveragerc to package data |
def get_package_data():
return {
_ASTROPY_PACKAGE_NAME_ + '.tests': ['data/*.fits', 'data/*.npz',
'coveragerc']
}
| <commit_before>
def get_package_data():
return {
_ASTROPY_PACKAGE_NAME_ + '.tests': ['data/*.fits', 'data/*.npz']
}
<commit_msg>Add coveragerc to package data<commit_after> |
def get_package_data():
return {
_ASTROPY_PACKAGE_NAME_ + '.tests': ['data/*.fits', 'data/*.npz',
'coveragerc']
}
|
def get_package_data():
return {
_ASTROPY_PACKAGE_NAME_ + '.tests': ['data/*.fits', 'data/*.npz']
}
Add coveragerc to package data
def get_package_data():
return {
_ASTROPY_PACKAGE_NAME_ + '.tests': ['data/*.fits', 'data/*.npz',
'coveragerc']
}
| <commit_before>
def get_package_data():
return {
_ASTROPY_PACKAGE_NAME_ + '.tests': ['data/*.fits', 'data/*.npz']
}
<commit_msg>Add coveragerc to package data<commit_after>
def get_package_data():
return {
_ASTROPY_PACKAGE_NAME_ + '.tests': ['data/*.fits', 'data/*.npz',
'coveragerc']
}
|
9371b962e43d6876ff8f902283d3fb1963c076d3 | pytest-{{cookiecutter.plugin_name}}/pytest_{{cookiecutter.plugin_name}}.py | pytest-{{cookiecutter.plugin_name}}/pytest_{{cookiecutter.plugin_name}}.py | # -*- coding: utf-8 -*-
import pytest
def pytest_addoption(parser):
group = parser.getgroup('{{cookiecutter.plugin_name}}')
group.addoption(
'--foo',
action='store_const',
dest='foo',
help='alias for --foo'
)
| Implement a very basic plugin to add an option | Implement a very basic plugin to add an option
| Python | mit | luzfcb/cookiecutter-pytest-plugin,s0undt3ch/cookiecutter-pytest-plugin,pytest-dev/cookiecutter-pytest-plugin | Implement a very basic plugin to add an option | # -*- coding: utf-8 -*-
import pytest
def pytest_addoption(parser):
group = parser.getgroup('{{cookiecutter.plugin_name}}')
group.addoption(
'--foo',
action='store_const',
dest='foo',
help='alias for --foo'
)
| <commit_before><commit_msg>Implement a very basic plugin to add an option<commit_after> | # -*- coding: utf-8 -*-
import pytest
def pytest_addoption(parser):
group = parser.getgroup('{{cookiecutter.plugin_name}}')
group.addoption(
'--foo',
action='store_const',
dest='foo',
help='alias for --foo'
)
| Implement a very basic plugin to add an option# -*- coding: utf-8 -*-
import pytest
def pytest_addoption(parser):
group = parser.getgroup('{{cookiecutter.plugin_name}}')
group.addoption(
'--foo',
action='store_const',
dest='foo',
help='alias for --foo'
)
| <commit_before><commit_msg>Implement a very basic plugin to add an option<commit_after># -*- coding: utf-8 -*-
import pytest
def pytest_addoption(parser):
group = parser.getgroup('{{cookiecutter.plugin_name}}')
group.addoption(
'--foo',
action='store_const',
dest='foo',
help='alias for --foo'
)
| |
6d1626327f3577a86cdd3c54e5732b65e59a3402 | test2.py | test2.py | import json
import itertools
with open('products.json') as data_file:
data = json.load(data_file)
products = data['products']
products_temp = []
for index, item in enumerate(products):
products_temp.append(item)
products = products_temp
products_temp = [] # delete the variable
for index, item in enumerate(products):
products_temp.append(item)
products = products_temp
products_temp = None
computers,keyboards = [],[]
for index, item in enumerate(products):
if item ['product_type'] == 'Computer':
computers.append((item['title'],item['options']))
print item
print "====="
else: pass
if item ['product_type'] == 'Keyboard':
keyboards.append(item)
print item
print "==="
else: pass
print computers
| #notes: will do it using oop.
import json
import itertools
with open('products.json') as data_file:
data = json.load(data_file)
products = data['products']
products_temp = []
for index, item in enumerate(products):
products_temp.append(item)
products = products_temp
products_temp = [] # delete the variable
for index, item in enumerate(products):
products_temp.append(item)
products = products_temp
products_temp = None
computers,keyboards = [],[]
for index, item in enumerate(products):
if item ['product_type'] == 'Computer':
computers.append((item['title'],item['options']))
else: pass
if item ['product_type'] == 'Keyboard':
keyboards.append((item['title'],item['options']))
print item
else: pass
for index, item in enumerate(item):
# Do the second step
pass
print computers
| Add an additional layer for the for loop | Add an additional layer for the for loop
| Python | mit | zhang96/JSONWithPython | import json
import itertools
with open('products.json') as data_file:
data = json.load(data_file)
products = data['products']
products_temp = []
for index, item in enumerate(products):
products_temp.append(item)
products = products_temp
products_temp = [] # delete the variable
for index, item in enumerate(products):
products_temp.append(item)
products = products_temp
products_temp = None
computers,keyboards = [],[]
for index, item in enumerate(products):
if item ['product_type'] == 'Computer':
computers.append((item['title'],item['options']))
print item
print "====="
else: pass
if item ['product_type'] == 'Keyboard':
keyboards.append(item)
print item
print "==="
else: pass
print computers
Add an additional layer for the for loop | #notes: will do it using oop.
import json
import itertools
with open('products.json') as data_file:
data = json.load(data_file)
products = data['products']
products_temp = []
for index, item in enumerate(products):
products_temp.append(item)
products = products_temp
products_temp = [] # delete the variable
for index, item in enumerate(products):
products_temp.append(item)
products = products_temp
products_temp = None
computers,keyboards = [],[]
for index, item in enumerate(products):
if item ['product_type'] == 'Computer':
computers.append((item['title'],item['options']))
else: pass
if item ['product_type'] == 'Keyboard':
keyboards.append((item['title'],item['options']))
print item
else: pass
for index, item in enumerate(item):
# Do the second step
pass
print computers
| <commit_before>import json
import itertools
with open('products.json') as data_file:
data = json.load(data_file)
products = data['products']
products_temp = []
for index, item in enumerate(products):
products_temp.append(item)
products = products_temp
products_temp = [] # delete the variable
for index, item in enumerate(products):
products_temp.append(item)
products = products_temp
products_temp = None
computers,keyboards = [],[]
for index, item in enumerate(products):
if item ['product_type'] == 'Computer':
computers.append((item['title'],item['options']))
print item
print "====="
else: pass
if item ['product_type'] == 'Keyboard':
keyboards.append(item)
print item
print "==="
else: pass
print computers
<commit_msg>Add an additional layer for the for loop<commit_after> | #notes: will do it using oop.
import json
import itertools
with open('products.json') as data_file:
data = json.load(data_file)
products = data['products']
products_temp = []
for index, item in enumerate(products):
products_temp.append(item)
products = products_temp
products_temp = [] # delete the variable
for index, item in enumerate(products):
products_temp.append(item)
products = products_temp
products_temp = None
computers,keyboards = [],[]
for index, item in enumerate(products):
if item ['product_type'] == 'Computer':
computers.append((item['title'],item['options']))
else: pass
if item ['product_type'] == 'Keyboard':
keyboards.append((item['title'],item['options']))
print item
else: pass
for index, item in enumerate(item):
# Do the second step
pass
print computers
| import json
import itertools
with open('products.json') as data_file:
data = json.load(data_file)
products = data['products']
products_temp = []
for index, item in enumerate(products):
products_temp.append(item)
products = products_temp
products_temp = [] # delete the variable
for index, item in enumerate(products):
products_temp.append(item)
products = products_temp
products_temp = None
computers,keyboards = [],[]
for index, item in enumerate(products):
if item ['product_type'] == 'Computer':
computers.append((item['title'],item['options']))
print item
print "====="
else: pass
if item ['product_type'] == 'Keyboard':
keyboards.append(item)
print item
print "==="
else: pass
print computers
Add an additional layer for the for loop#notes: will do it using oop.
import json
import itertools
with open('products.json') as data_file:
data = json.load(data_file)
products = data['products']
products_temp = []
for index, item in enumerate(products):
products_temp.append(item)
products = products_temp
products_temp = [] # delete the variable
for index, item in enumerate(products):
products_temp.append(item)
products = products_temp
products_temp = None
computers,keyboards = [],[]
for index, item in enumerate(products):
if item ['product_type'] == 'Computer':
computers.append((item['title'],item['options']))
else: pass
if item ['product_type'] == 'Keyboard':
keyboards.append((item['title'],item['options']))
print item
else: pass
for index, item in enumerate(item):
# Do the second step
pass
print computers
| <commit_before>import json
import itertools
with open('products.json') as data_file:
data = json.load(data_file)
products = data['products']
products_temp = []
for index, item in enumerate(products):
products_temp.append(item)
products = products_temp
products_temp = [] # delete the variable
for index, item in enumerate(products):
products_temp.append(item)
products = products_temp
products_temp = None
computers,keyboards = [],[]
for index, item in enumerate(products):
if item ['product_type'] == 'Computer':
computers.append((item['title'],item['options']))
print item
print "====="
else: pass
if item ['product_type'] == 'Keyboard':
keyboards.append(item)
print item
print "==="
else: pass
print computers
<commit_msg>Add an additional layer for the for loop<commit_after>#notes: will do it using oop.
import json
import itertools
with open('products.json') as data_file:
data = json.load(data_file)
products = data['products']
products_temp = []
for index, item in enumerate(products):
products_temp.append(item)
products = products_temp
products_temp = [] # delete the variable
for index, item in enumerate(products):
products_temp.append(item)
products = products_temp
products_temp = None
computers,keyboards = [],[]
for index, item in enumerate(products):
if item ['product_type'] == 'Computer':
computers.append((item['title'],item['options']))
else: pass
if item ['product_type'] == 'Keyboard':
keyboards.append((item['title'],item['options']))
print item
else: pass
for index, item in enumerate(item):
# Do the second step
pass
print computers
|
8c5007bd5a1f898ca0987e7b79b8dd8f0a2642c5 | pfamserver/api.py | pfamserver/api.py | from application import app
from flask.ext.restful import Api, Resource
import os
from subprocess import Popen as run, PIPE
api = Api(app)
class QueryAPI(Resource):
def get(self, query):
cmd = ['./hmmer/binaries/esl-afetch', 'Pfam-A.full', query]
output = run(cmd, stdout=PIPE).communicate()[0]
return {'query': query,
'output': output}
api.add_resource(QueryAPI, '/api/query/<string:query>', endpoint = 'query')
| from application import app
from flask.ext.restful import Api, Resource
import os
from subprocess import Popen as run, PIPE
api = Api(app)
def db(query):
cmd = ['./hmmer/binaries/esl-afetch', 'Pfam-A.full', query]
return run(cmd, stdout=PIPE).communicate()[0]
class QueryAPI(Resource):
def get(self, query):
queries = [query, query.capitalize(), query.upper(), query.lower()]
for q in queries:
output = db(q)
if output:
return {'query': q, 'output': output}
return {'query': query, 'output': output}
api.add_resource(QueryAPI, '/api/query/<string:query>', endpoint = 'query')
| Check variations little variations if the query fails. | Check variations little variations if the query fails.
| Python | agpl-3.0 | ecolell/pfamserver,ecolell/pfamserver,ecolell/pfamserver | from application import app
from flask.ext.restful import Api, Resource
import os
from subprocess import Popen as run, PIPE
api = Api(app)
class QueryAPI(Resource):
def get(self, query):
cmd = ['./hmmer/binaries/esl-afetch', 'Pfam-A.full', query]
output = run(cmd, stdout=PIPE).communicate()[0]
return {'query': query,
'output': output}
api.add_resource(QueryAPI, '/api/query/<string:query>', endpoint = 'query')
Check variations little variations if the query fails. | from application import app
from flask.ext.restful import Api, Resource
import os
from subprocess import Popen as run, PIPE
api = Api(app)
def db(query):
cmd = ['./hmmer/binaries/esl-afetch', 'Pfam-A.full', query]
return run(cmd, stdout=PIPE).communicate()[0]
class QueryAPI(Resource):
def get(self, query):
queries = [query, query.capitalize(), query.upper(), query.lower()]
for q in queries:
output = db(q)
if output:
return {'query': q, 'output': output}
return {'query': query, 'output': output}
api.add_resource(QueryAPI, '/api/query/<string:query>', endpoint = 'query')
| <commit_before>from application import app
from flask.ext.restful import Api, Resource
import os
from subprocess import Popen as run, PIPE
api = Api(app)
class QueryAPI(Resource):
def get(self, query):
cmd = ['./hmmer/binaries/esl-afetch', 'Pfam-A.full', query]
output = run(cmd, stdout=PIPE).communicate()[0]
return {'query': query,
'output': output}
api.add_resource(QueryAPI, '/api/query/<string:query>', endpoint = 'query')
<commit_msg>Check variations little variations if the query fails.<commit_after> | from application import app
from flask.ext.restful import Api, Resource
import os
from subprocess import Popen as run, PIPE
api = Api(app)
def db(query):
cmd = ['./hmmer/binaries/esl-afetch', 'Pfam-A.full', query]
return run(cmd, stdout=PIPE).communicate()[0]
class QueryAPI(Resource):
def get(self, query):
queries = [query, query.capitalize(), query.upper(), query.lower()]
for q in queries:
output = db(q)
if output:
return {'query': q, 'output': output}
return {'query': query, 'output': output}
api.add_resource(QueryAPI, '/api/query/<string:query>', endpoint = 'query')
| from application import app
from flask.ext.restful import Api, Resource
import os
from subprocess import Popen as run, PIPE
api = Api(app)
class QueryAPI(Resource):
def get(self, query):
cmd = ['./hmmer/binaries/esl-afetch', 'Pfam-A.full', query]
output = run(cmd, stdout=PIPE).communicate()[0]
return {'query': query,
'output': output}
api.add_resource(QueryAPI, '/api/query/<string:query>', endpoint = 'query')
Check variations little variations if the query fails.from application import app
from flask.ext.restful import Api, Resource
import os
from subprocess import Popen as run, PIPE
api = Api(app)
def db(query):
cmd = ['./hmmer/binaries/esl-afetch', 'Pfam-A.full', query]
return run(cmd, stdout=PIPE).communicate()[0]
class QueryAPI(Resource):
def get(self, query):
queries = [query, query.capitalize(), query.upper(), query.lower()]
for q in queries:
output = db(q)
if output:
return {'query': q, 'output': output}
return {'query': query, 'output': output}
api.add_resource(QueryAPI, '/api/query/<string:query>', endpoint = 'query')
| <commit_before>from application import app
from flask.ext.restful import Api, Resource
import os
from subprocess import Popen as run, PIPE
api = Api(app)
class QueryAPI(Resource):
def get(self, query):
cmd = ['./hmmer/binaries/esl-afetch', 'Pfam-A.full', query]
output = run(cmd, stdout=PIPE).communicate()[0]
return {'query': query,
'output': output}
api.add_resource(QueryAPI, '/api/query/<string:query>', endpoint = 'query')
<commit_msg>Check variations little variations if the query fails.<commit_after>from application import app
from flask.ext.restful import Api, Resource
import os
from subprocess import Popen as run, PIPE
api = Api(app)
def db(query):
cmd = ['./hmmer/binaries/esl-afetch', 'Pfam-A.full', query]
return run(cmd, stdout=PIPE).communicate()[0]
class QueryAPI(Resource):
def get(self, query):
queries = [query, query.capitalize(), query.upper(), query.lower()]
for q in queries:
output = db(q)
if output:
return {'query': q, 'output': output}
return {'query': query, 'output': output}
api.add_resource(QueryAPI, '/api/query/<string:query>', endpoint = 'query')
|
f019cb2f0e3604b264aeb55a3a01641f998d27d7 | test/fuzz/gen-dict.py | test/fuzz/gen-dict.py | import json
import sys
def find_literals(literals, node):
'''Recursively find STRING literals in the grammar definition'''
if type(node) is dict:
if 'type' in node and node['type'] == 'STRING' and 'value' in node:
literals.add(node['value'])
for key, value in node.iteritems():
find_literals(literals, value)
elif type(node) is list:
for item in node:
find_literals(literals, item)
def main():
'''Generate a libFuzzer / AFL dictionary from a tree-sitter grammar.json'''
with open(sys.argv[1]) as f:
grammar = json.load(f)
literals = set()
find_literals(literals, grammar)
for lit in sorted(literals):
if lit:
print '"%s"' % ''.join([(c if c.isalnum() else '\\x%02x' % ord(c)) for c in lit])
if __name__ == '__main__':
main()
| import json
import sys
def find_literals(literals, node):
'''Recursively find STRING literals in the grammar definition'''
if type(node) is dict:
if 'type' in node and node['type'] == 'STRING' and 'value' in node:
literals.add(node['value'])
for key, value in node.iteritems():
find_literals(literals, value)
elif type(node) is list:
for item in node:
find_literals(literals, item)
def main():
'''Generate a libFuzzer / AFL dictionary from a tree-sitter grammar.json'''
with open(sys.argv[1]) as f:
grammar = json.load(f)
literals = set()
find_literals(literals, grammar)
for lit in sorted(literals):
if lit:
print '"%s"' % ''.join(['\\x%02x' % ord(b) for b in lit.encode('utf-8')])
if __name__ == '__main__':
main()
| Handle non-ascii characters when generating fuzzing dictionary | Handle non-ascii characters when generating fuzzing dictionary
This caused a failure when generating the dictionary for `tree-sitter-agda`.
| Python | mit | tree-sitter/tree-sitter,tree-sitter/tree-sitter,tree-sitter/tree-sitter,tree-sitter/tree-sitter,tree-sitter/tree-sitter,tree-sitter/tree-sitter,tree-sitter/tree-sitter | import json
import sys
def find_literals(literals, node):
'''Recursively find STRING literals in the grammar definition'''
if type(node) is dict:
if 'type' in node and node['type'] == 'STRING' and 'value' in node:
literals.add(node['value'])
for key, value in node.iteritems():
find_literals(literals, value)
elif type(node) is list:
for item in node:
find_literals(literals, item)
def main():
'''Generate a libFuzzer / AFL dictionary from a tree-sitter grammar.json'''
with open(sys.argv[1]) as f:
grammar = json.load(f)
literals = set()
find_literals(literals, grammar)
for lit in sorted(literals):
if lit:
print '"%s"' % ''.join([(c if c.isalnum() else '\\x%02x' % ord(c)) for c in lit])
if __name__ == '__main__':
main()
Handle non-ascii characters when generating fuzzing dictionary
This caused a failure when generating the dictionary for `tree-sitter-agda`. | import json
import sys
def find_literals(literals, node):
'''Recursively find STRING literals in the grammar definition'''
if type(node) is dict:
if 'type' in node and node['type'] == 'STRING' and 'value' in node:
literals.add(node['value'])
for key, value in node.iteritems():
find_literals(literals, value)
elif type(node) is list:
for item in node:
find_literals(literals, item)
def main():
'''Generate a libFuzzer / AFL dictionary from a tree-sitter grammar.json'''
with open(sys.argv[1]) as f:
grammar = json.load(f)
literals = set()
find_literals(literals, grammar)
for lit in sorted(literals):
if lit:
print '"%s"' % ''.join(['\\x%02x' % ord(b) for b in lit.encode('utf-8')])
if __name__ == '__main__':
main()
| <commit_before>import json
import sys
def find_literals(literals, node):
'''Recursively find STRING literals in the grammar definition'''
if type(node) is dict:
if 'type' in node and node['type'] == 'STRING' and 'value' in node:
literals.add(node['value'])
for key, value in node.iteritems():
find_literals(literals, value)
elif type(node) is list:
for item in node:
find_literals(literals, item)
def main():
'''Generate a libFuzzer / AFL dictionary from a tree-sitter grammar.json'''
with open(sys.argv[1]) as f:
grammar = json.load(f)
literals = set()
find_literals(literals, grammar)
for lit in sorted(literals):
if lit:
print '"%s"' % ''.join([(c if c.isalnum() else '\\x%02x' % ord(c)) for c in lit])
if __name__ == '__main__':
main()
<commit_msg>Handle non-ascii characters when generating fuzzing dictionary
This caused a failure when generating the dictionary for `tree-sitter-agda`.<commit_after> | import json
import sys
def find_literals(literals, node):
'''Recursively find STRING literals in the grammar definition'''
if type(node) is dict:
if 'type' in node and node['type'] == 'STRING' and 'value' in node:
literals.add(node['value'])
for key, value in node.iteritems():
find_literals(literals, value)
elif type(node) is list:
for item in node:
find_literals(literals, item)
def main():
'''Generate a libFuzzer / AFL dictionary from a tree-sitter grammar.json'''
with open(sys.argv[1]) as f:
grammar = json.load(f)
literals = set()
find_literals(literals, grammar)
for lit in sorted(literals):
if lit:
print '"%s"' % ''.join(['\\x%02x' % ord(b) for b in lit.encode('utf-8')])
if __name__ == '__main__':
main()
| import json
import sys
def find_literals(literals, node):
'''Recursively find STRING literals in the grammar definition'''
if type(node) is dict:
if 'type' in node and node['type'] == 'STRING' and 'value' in node:
literals.add(node['value'])
for key, value in node.iteritems():
find_literals(literals, value)
elif type(node) is list:
for item in node:
find_literals(literals, item)
def main():
'''Generate a libFuzzer / AFL dictionary from a tree-sitter grammar.json'''
with open(sys.argv[1]) as f:
grammar = json.load(f)
literals = set()
find_literals(literals, grammar)
for lit in sorted(literals):
if lit:
print '"%s"' % ''.join([(c if c.isalnum() else '\\x%02x' % ord(c)) for c in lit])
if __name__ == '__main__':
main()
Handle non-ascii characters when generating fuzzing dictionary
This caused a failure when generating the dictionary for `tree-sitter-agda`.import json
import sys
def find_literals(literals, node):
'''Recursively find STRING literals in the grammar definition'''
if type(node) is dict:
if 'type' in node and node['type'] == 'STRING' and 'value' in node:
literals.add(node['value'])
for key, value in node.iteritems():
find_literals(literals, value)
elif type(node) is list:
for item in node:
find_literals(literals, item)
def main():
'''Generate a libFuzzer / AFL dictionary from a tree-sitter grammar.json'''
with open(sys.argv[1]) as f:
grammar = json.load(f)
literals = set()
find_literals(literals, grammar)
for lit in sorted(literals):
if lit:
print '"%s"' % ''.join(['\\x%02x' % ord(b) for b in lit.encode('utf-8')])
if __name__ == '__main__':
main()
| <commit_before>import json
import sys
def find_literals(literals, node):
'''Recursively find STRING literals in the grammar definition'''
if type(node) is dict:
if 'type' in node and node['type'] == 'STRING' and 'value' in node:
literals.add(node['value'])
for key, value in node.iteritems():
find_literals(literals, value)
elif type(node) is list:
for item in node:
find_literals(literals, item)
def main():
'''Generate a libFuzzer / AFL dictionary from a tree-sitter grammar.json'''
with open(sys.argv[1]) as f:
grammar = json.load(f)
literals = set()
find_literals(literals, grammar)
for lit in sorted(literals):
if lit:
print '"%s"' % ''.join([(c if c.isalnum() else '\\x%02x' % ord(c)) for c in lit])
if __name__ == '__main__':
main()
<commit_msg>Handle non-ascii characters when generating fuzzing dictionary
This caused a failure when generating the dictionary for `tree-sitter-agda`.<commit_after>import json
import sys
def find_literals(literals, node):
'''Recursively find STRING literals in the grammar definition'''
if type(node) is dict:
if 'type' in node and node['type'] == 'STRING' and 'value' in node:
literals.add(node['value'])
for key, value in node.iteritems():
find_literals(literals, value)
elif type(node) is list:
for item in node:
find_literals(literals, item)
def main():
'''Generate a libFuzzer / AFL dictionary from a tree-sitter grammar.json'''
with open(sys.argv[1]) as f:
grammar = json.load(f)
literals = set()
find_literals(literals, grammar)
for lit in sorted(literals):
if lit:
print '"%s"' % ''.join(['\\x%02x' % ord(b) for b in lit.encode('utf-8')])
if __name__ == '__main__':
main()
|
6dd5a006892b1ba51c7f4f338693bf780293b897 | dedupe/_typing.py | dedupe/_typing.py | import numpy
import sys
from typing import (Iterator,
Tuple,
Mapping,
Union,
Iterable,
List,
Any)
if sys.version_info >= (3, 8):
from typing import TypedDict, Protocol, Literal
else:
from typing_extensions import TypedDict, Protocol, Literal
RecordDict = Mapping[str, Any]
RecordID = Union[int, str]
Record = Tuple[RecordID, RecordDict]
RecordPair = Tuple[Record, Record]
RecordPairs = Iterator[RecordPair]
Blocks = Iterator[List[RecordPair]]
Cluster = Tuple[Tuple[RecordID, ...], Union[numpy.ndarray, Tuple]]
Clusters = Iterable[Cluster]
Data = Mapping[RecordID, RecordDict]
TrainingExample = Tuple[RecordDict, RecordDict]
Links = Iterable[Union[numpy.ndarray,
Tuple[Tuple[RecordID, RecordID], float]]]
LookupResults = Iterable[Tuple[RecordID, Tuple[Tuple[RecordID, float], ...]]]
JoinConstraint = Literal['one-to-one', 'many-to-one', 'many-to-many']
class _TrainingData(TypedDict):
match: List[TrainingExample]
distinct: List[TrainingExample]
class TrainingData(_TrainingData, total=False):
uncertain: List[TrainingExample] # optional key
class Classifier(Protocol):
def fit(self, X: object, y: object) -> None:
...
def predict_proba(self, X: object) -> Any:
...
| import numpy
import sys
from typing import (Iterator,
Tuple,
Mapping,
Union,
Iterable,
List,
Any)
if sys.version_info >= (3, 8):
from typing import TypedDict, Protocol, Literal
else:
from typing_extensions import TypedDict, Protocol, Literal
RecordDict = Mapping[str, Any]
RecordID = Union[int, str]
Record = Tuple[RecordID, RecordDict]
RecordPair = Tuple[Record, Record]
RecordPairs = Iterator[RecordPair]
Blocks = Iterator[List[RecordPair]]
Cluster = Tuple[Tuple[RecordID, ...], Union[numpy.ndarray, Tuple]]
Clusters = Iterable[Cluster]
Data = Mapping[RecordID, RecordDict]
TrainingExample = Tuple[RecordDict, RecordDict]
Links = Iterable[Union[numpy.ndarray,
Tuple[Tuple[RecordID, RecordID], float]]]
LookupResults = Iterable[Tuple[RecordID, Tuple[Tuple[RecordID, float], ...]]]
JoinConstraint = Literal['one-to-one', 'many-to-one', 'many-to-many']
class TrainingData(TypedDict):
match: List[TrainingExample]
distinct: List[TrainingExample]
class Classifier(Protocol):
def fit(self, X: object, y: object) -> None:
...
def predict_proba(self, X: object) -> Any:
...
| Remove use of "unsure" from TrainingData type | Remove use of "unsure" from TrainingData type
The "unsure" key isn't used anywhere else
| Python | mit | dedupeio/dedupe,dedupeio/dedupe | import numpy
import sys
from typing import (Iterator,
Tuple,
Mapping,
Union,
Iterable,
List,
Any)
if sys.version_info >= (3, 8):
from typing import TypedDict, Protocol, Literal
else:
from typing_extensions import TypedDict, Protocol, Literal
RecordDict = Mapping[str, Any]
RecordID = Union[int, str]
Record = Tuple[RecordID, RecordDict]
RecordPair = Tuple[Record, Record]
RecordPairs = Iterator[RecordPair]
Blocks = Iterator[List[RecordPair]]
Cluster = Tuple[Tuple[RecordID, ...], Union[numpy.ndarray, Tuple]]
Clusters = Iterable[Cluster]
Data = Mapping[RecordID, RecordDict]
TrainingExample = Tuple[RecordDict, RecordDict]
Links = Iterable[Union[numpy.ndarray,
Tuple[Tuple[RecordID, RecordID], float]]]
LookupResults = Iterable[Tuple[RecordID, Tuple[Tuple[RecordID, float], ...]]]
JoinConstraint = Literal['one-to-one', 'many-to-one', 'many-to-many']
class _TrainingData(TypedDict):
match: List[TrainingExample]
distinct: List[TrainingExample]
class TrainingData(_TrainingData, total=False):
uncertain: List[TrainingExample] # optional key
class Classifier(Protocol):
def fit(self, X: object, y: object) -> None:
...
def predict_proba(self, X: object) -> Any:
...
Remove use of "unsure" from TrainingData type
The "unsure" key isn't used anywhere else | import numpy
import sys
from typing import (Iterator,
Tuple,
Mapping,
Union,
Iterable,
List,
Any)
if sys.version_info >= (3, 8):
from typing import TypedDict, Protocol, Literal
else:
from typing_extensions import TypedDict, Protocol, Literal
RecordDict = Mapping[str, Any]
RecordID = Union[int, str]
Record = Tuple[RecordID, RecordDict]
RecordPair = Tuple[Record, Record]
RecordPairs = Iterator[RecordPair]
Blocks = Iterator[List[RecordPair]]
Cluster = Tuple[Tuple[RecordID, ...], Union[numpy.ndarray, Tuple]]
Clusters = Iterable[Cluster]
Data = Mapping[RecordID, RecordDict]
TrainingExample = Tuple[RecordDict, RecordDict]
Links = Iterable[Union[numpy.ndarray,
Tuple[Tuple[RecordID, RecordID], float]]]
LookupResults = Iterable[Tuple[RecordID, Tuple[Tuple[RecordID, float], ...]]]
JoinConstraint = Literal['one-to-one', 'many-to-one', 'many-to-many']
class TrainingData(TypedDict):
match: List[TrainingExample]
distinct: List[TrainingExample]
class Classifier(Protocol):
def fit(self, X: object, y: object) -> None:
...
def predict_proba(self, X: object) -> Any:
...
| <commit_before>import numpy
import sys
from typing import (Iterator,
Tuple,
Mapping,
Union,
Iterable,
List,
Any)
if sys.version_info >= (3, 8):
from typing import TypedDict, Protocol, Literal
else:
from typing_extensions import TypedDict, Protocol, Literal
RecordDict = Mapping[str, Any]
RecordID = Union[int, str]
Record = Tuple[RecordID, RecordDict]
RecordPair = Tuple[Record, Record]
RecordPairs = Iterator[RecordPair]
Blocks = Iterator[List[RecordPair]]
Cluster = Tuple[Tuple[RecordID, ...], Union[numpy.ndarray, Tuple]]
Clusters = Iterable[Cluster]
Data = Mapping[RecordID, RecordDict]
TrainingExample = Tuple[RecordDict, RecordDict]
Links = Iterable[Union[numpy.ndarray,
Tuple[Tuple[RecordID, RecordID], float]]]
LookupResults = Iterable[Tuple[RecordID, Tuple[Tuple[RecordID, float], ...]]]
JoinConstraint = Literal['one-to-one', 'many-to-one', 'many-to-many']
class _TrainingData(TypedDict):
match: List[TrainingExample]
distinct: List[TrainingExample]
class TrainingData(_TrainingData, total=False):
uncertain: List[TrainingExample] # optional key
class Classifier(Protocol):
def fit(self, X: object, y: object) -> None:
...
def predict_proba(self, X: object) -> Any:
...
<commit_msg>Remove use of "unsure" from TrainingData type
The "unsure" key isn't used anywhere else<commit_after> | import numpy
import sys
from typing import (Iterator,
Tuple,
Mapping,
Union,
Iterable,
List,
Any)
if sys.version_info >= (3, 8):
from typing import TypedDict, Protocol, Literal
else:
from typing_extensions import TypedDict, Protocol, Literal
RecordDict = Mapping[str, Any]
RecordID = Union[int, str]
Record = Tuple[RecordID, RecordDict]
RecordPair = Tuple[Record, Record]
RecordPairs = Iterator[RecordPair]
Blocks = Iterator[List[RecordPair]]
Cluster = Tuple[Tuple[RecordID, ...], Union[numpy.ndarray, Tuple]]
Clusters = Iterable[Cluster]
Data = Mapping[RecordID, RecordDict]
TrainingExample = Tuple[RecordDict, RecordDict]
Links = Iterable[Union[numpy.ndarray,
Tuple[Tuple[RecordID, RecordID], float]]]
LookupResults = Iterable[Tuple[RecordID, Tuple[Tuple[RecordID, float], ...]]]
JoinConstraint = Literal['one-to-one', 'many-to-one', 'many-to-many']
class TrainingData(TypedDict):
match: List[TrainingExample]
distinct: List[TrainingExample]
class Classifier(Protocol):
def fit(self, X: object, y: object) -> None:
...
def predict_proba(self, X: object) -> Any:
...
| import numpy
import sys
from typing import (Iterator,
Tuple,
Mapping,
Union,
Iterable,
List,
Any)
if sys.version_info >= (3, 8):
from typing import TypedDict, Protocol, Literal
else:
from typing_extensions import TypedDict, Protocol, Literal
RecordDict = Mapping[str, Any]
RecordID = Union[int, str]
Record = Tuple[RecordID, RecordDict]
RecordPair = Tuple[Record, Record]
RecordPairs = Iterator[RecordPair]
Blocks = Iterator[List[RecordPair]]
Cluster = Tuple[Tuple[RecordID, ...], Union[numpy.ndarray, Tuple]]
Clusters = Iterable[Cluster]
Data = Mapping[RecordID, RecordDict]
TrainingExample = Tuple[RecordDict, RecordDict]
Links = Iterable[Union[numpy.ndarray,
Tuple[Tuple[RecordID, RecordID], float]]]
LookupResults = Iterable[Tuple[RecordID, Tuple[Tuple[RecordID, float], ...]]]
JoinConstraint = Literal['one-to-one', 'many-to-one', 'many-to-many']
class _TrainingData(TypedDict):
match: List[TrainingExample]
distinct: List[TrainingExample]
class TrainingData(_TrainingData, total=False):
uncertain: List[TrainingExample] # optional key
class Classifier(Protocol):
def fit(self, X: object, y: object) -> None:
...
def predict_proba(self, X: object) -> Any:
...
Remove use of "unsure" from TrainingData type
The "unsure" key isn't used anywhere elseimport numpy
import sys
from typing import (Iterator,
Tuple,
Mapping,
Union,
Iterable,
List,
Any)
if sys.version_info >= (3, 8):
from typing import TypedDict, Protocol, Literal
else:
from typing_extensions import TypedDict, Protocol, Literal
RecordDict = Mapping[str, Any]
RecordID = Union[int, str]
Record = Tuple[RecordID, RecordDict]
RecordPair = Tuple[Record, Record]
RecordPairs = Iterator[RecordPair]
Blocks = Iterator[List[RecordPair]]
Cluster = Tuple[Tuple[RecordID, ...], Union[numpy.ndarray, Tuple]]
Clusters = Iterable[Cluster]
Data = Mapping[RecordID, RecordDict]
TrainingExample = Tuple[RecordDict, RecordDict]
Links = Iterable[Union[numpy.ndarray,
Tuple[Tuple[RecordID, RecordID], float]]]
LookupResults = Iterable[Tuple[RecordID, Tuple[Tuple[RecordID, float], ...]]]
JoinConstraint = Literal['one-to-one', 'many-to-one', 'many-to-many']
class TrainingData(TypedDict):
match: List[TrainingExample]
distinct: List[TrainingExample]
class Classifier(Protocol):
def fit(self, X: object, y: object) -> None:
...
def predict_proba(self, X: object) -> Any:
...
| <commit_before>import numpy
import sys
from typing import (Iterator,
Tuple,
Mapping,
Union,
Iterable,
List,
Any)
if sys.version_info >= (3, 8):
from typing import TypedDict, Protocol, Literal
else:
from typing_extensions import TypedDict, Protocol, Literal
RecordDict = Mapping[str, Any]
RecordID = Union[int, str]
Record = Tuple[RecordID, RecordDict]
RecordPair = Tuple[Record, Record]
RecordPairs = Iterator[RecordPair]
Blocks = Iterator[List[RecordPair]]
Cluster = Tuple[Tuple[RecordID, ...], Union[numpy.ndarray, Tuple]]
Clusters = Iterable[Cluster]
Data = Mapping[RecordID, RecordDict]
TrainingExample = Tuple[RecordDict, RecordDict]
Links = Iterable[Union[numpy.ndarray,
Tuple[Tuple[RecordID, RecordID], float]]]
LookupResults = Iterable[Tuple[RecordID, Tuple[Tuple[RecordID, float], ...]]]
JoinConstraint = Literal['one-to-one', 'many-to-one', 'many-to-many']
class _TrainingData(TypedDict):
match: List[TrainingExample]
distinct: List[TrainingExample]
class TrainingData(_TrainingData, total=False):
uncertain: List[TrainingExample] # optional key
class Classifier(Protocol):
def fit(self, X: object, y: object) -> None:
...
def predict_proba(self, X: object) -> Any:
...
<commit_msg>Remove use of "unsure" from TrainingData type
The "unsure" key isn't used anywhere else<commit_after>import numpy
import sys
from typing import (Iterator,
Tuple,
Mapping,
Union,
Iterable,
List,
Any)
if sys.version_info >= (3, 8):
from typing import TypedDict, Protocol, Literal
else:
from typing_extensions import TypedDict, Protocol, Literal
RecordDict = Mapping[str, Any]
RecordID = Union[int, str]
Record = Tuple[RecordID, RecordDict]
RecordPair = Tuple[Record, Record]
RecordPairs = Iterator[RecordPair]
Blocks = Iterator[List[RecordPair]]
Cluster = Tuple[Tuple[RecordID, ...], Union[numpy.ndarray, Tuple]]
Clusters = Iterable[Cluster]
Data = Mapping[RecordID, RecordDict]
TrainingExample = Tuple[RecordDict, RecordDict]
Links = Iterable[Union[numpy.ndarray,
Tuple[Tuple[RecordID, RecordID], float]]]
LookupResults = Iterable[Tuple[RecordID, Tuple[Tuple[RecordID, float], ...]]]
JoinConstraint = Literal['one-to-one', 'many-to-one', 'many-to-many']
class TrainingData(TypedDict):
match: List[TrainingExample]
distinct: List[TrainingExample]
class Classifier(Protocol):
def fit(self, X: object, y: object) -> None:
...
def predict_proba(self, X: object) -> Any:
...
|
476338ba2edce4ff78f9451ae9cca6a2c91f787b | opps/core/admin/article.py | opps/core/admin/article.py | # -*- coding: utf-8 -*-
from django.contrib import admin
from django import forms
from opps.core.models import Post, PostImage
from opps.core.models import Image
from redactor.widgets import RedactorEditor
class PostImageInline(admin.TabularInline):
model = PostImage
fk_name = 'post'
raw_id_fields = ['image']
actions = None
extra = 1
fieldsets = [(None, {'fields': ('image', 'order')})]
class PostAdminForm(forms.ModelForm):
class Meta:
model = Post
widgets = {'content': RedactorEditor(),}
class PostAdmin(admin.ModelAdmin):
form = PostAdminForm
prepopulated_fields = {"slug": ("title",)}
inlines = [PostImageInline]
def save_model(self, request, obj, form, change):
if not obj.user:
obj.user = request.user
obj.save()
admin.site.register(Post, PostAdmin)
| # -*- coding: utf-8 -*-
from django.contrib.sites.models import Site
from django.contrib import admin
from django import forms
from opps.core.models import Post, PostImage
from opps.core.models import Image
from redactor.widgets import RedactorEditor
class PostImageInline(admin.TabularInline):
model = PostImage
fk_name = 'post'
raw_id_fields = ['image']
actions = None
extra = 1
fieldsets = [(None, {'fields': ('image', 'order')})]
class PostAdminForm(forms.ModelForm):
class Meta:
model = Post
widgets = {'content': RedactorEditor(),}
class PostAdmin(admin.ModelAdmin):
form = PostAdminForm
prepopulated_fields = {"slug": ("title",)}
inlines = [PostImageInline]
fieldsets = (
(None, {'fields': ('title', 'short_title', 'headline', 'channel',
'content',)
}),
(None, {'fields': ('main_image', 'credit', 'slug',)})
)
exclude = ('user',)
def save_model(self, request, obj, form, change):
if not obj.user:
obj.user = request.user
obj.save()
admin.site.register(Post, PostAdmin)
| Fix field set on post admin, opps core | Fix field set on post admin, opps core
| Python | mit | YACOWS/opps,jeanmask/opps,opps/opps,jeanmask/opps,opps/opps,opps/opps,williamroot/opps,jeanmask/opps,YACOWS/opps,williamroot/opps,YACOWS/opps,jeanmask/opps,williamroot/opps,YACOWS/opps,opps/opps,williamroot/opps | # -*- coding: utf-8 -*-
from django.contrib import admin
from django import forms
from opps.core.models import Post, PostImage
from opps.core.models import Image
from redactor.widgets import RedactorEditor
class PostImageInline(admin.TabularInline):
model = PostImage
fk_name = 'post'
raw_id_fields = ['image']
actions = None
extra = 1
fieldsets = [(None, {'fields': ('image', 'order')})]
class PostAdminForm(forms.ModelForm):
class Meta:
model = Post
widgets = {'content': RedactorEditor(),}
class PostAdmin(admin.ModelAdmin):
form = PostAdminForm
prepopulated_fields = {"slug": ("title",)}
inlines = [PostImageInline]
def save_model(self, request, obj, form, change):
if not obj.user:
obj.user = request.user
obj.save()
admin.site.register(Post, PostAdmin)
Fix field set on post admin, opps core | # -*- coding: utf-8 -*-
from django.contrib.sites.models import Site
from django.contrib import admin
from django import forms
from opps.core.models import Post, PostImage
from opps.core.models import Image
from redactor.widgets import RedactorEditor
class PostImageInline(admin.TabularInline):
model = PostImage
fk_name = 'post'
raw_id_fields = ['image']
actions = None
extra = 1
fieldsets = [(None, {'fields': ('image', 'order')})]
class PostAdminForm(forms.ModelForm):
class Meta:
model = Post
widgets = {'content': RedactorEditor(),}
class PostAdmin(admin.ModelAdmin):
form = PostAdminForm
prepopulated_fields = {"slug": ("title",)}
inlines = [PostImageInline]
fieldsets = (
(None, {'fields': ('title', 'short_title', 'headline', 'channel',
'content',)
}),
(None, {'fields': ('main_image', 'credit', 'slug',)})
)
exclude = ('user',)
def save_model(self, request, obj, form, change):
if not obj.user:
obj.user = request.user
obj.save()
admin.site.register(Post, PostAdmin)
| <commit_before># -*- coding: utf-8 -*-
from django.contrib import admin
from django import forms
from opps.core.models import Post, PostImage
from opps.core.models import Image
from redactor.widgets import RedactorEditor
class PostImageInline(admin.TabularInline):
model = PostImage
fk_name = 'post'
raw_id_fields = ['image']
actions = None
extra = 1
fieldsets = [(None, {'fields': ('image', 'order')})]
class PostAdminForm(forms.ModelForm):
class Meta:
model = Post
widgets = {'content': RedactorEditor(),}
class PostAdmin(admin.ModelAdmin):
form = PostAdminForm
prepopulated_fields = {"slug": ("title",)}
inlines = [PostImageInline]
def save_model(self, request, obj, form, change):
if not obj.user:
obj.user = request.user
obj.save()
admin.site.register(Post, PostAdmin)
<commit_msg>Fix field set on post admin, opps core<commit_after> | # -*- coding: utf-8 -*-
from django.contrib.sites.models import Site
from django.contrib import admin
from django import forms
from opps.core.models import Post, PostImage
from opps.core.models import Image
from redactor.widgets import RedactorEditor
class PostImageInline(admin.TabularInline):
model = PostImage
fk_name = 'post'
raw_id_fields = ['image']
actions = None
extra = 1
fieldsets = [(None, {'fields': ('image', 'order')})]
class PostAdminForm(forms.ModelForm):
class Meta:
model = Post
widgets = {'content': RedactorEditor(),}
class PostAdmin(admin.ModelAdmin):
form = PostAdminForm
prepopulated_fields = {"slug": ("title",)}
inlines = [PostImageInline]
fieldsets = (
(None, {'fields': ('title', 'short_title', 'headline', 'channel',
'content',)
}),
(None, {'fields': ('main_image', 'credit', 'slug',)})
)
exclude = ('user',)
def save_model(self, request, obj, form, change):
if not obj.user:
obj.user = request.user
obj.save()
admin.site.register(Post, PostAdmin)
| # -*- coding: utf-8 -*-
from django.contrib import admin
from django import forms
from opps.core.models import Post, PostImage
from opps.core.models import Image
from redactor.widgets import RedactorEditor
class PostImageInline(admin.TabularInline):
model = PostImage
fk_name = 'post'
raw_id_fields = ['image']
actions = None
extra = 1
fieldsets = [(None, {'fields': ('image', 'order')})]
class PostAdminForm(forms.ModelForm):
class Meta:
model = Post
widgets = {'content': RedactorEditor(),}
class PostAdmin(admin.ModelAdmin):
form = PostAdminForm
prepopulated_fields = {"slug": ("title",)}
inlines = [PostImageInline]
def save_model(self, request, obj, form, change):
if not obj.user:
obj.user = request.user
obj.save()
admin.site.register(Post, PostAdmin)
Fix field set on post admin, opps core# -*- coding: utf-8 -*-
from django.contrib.sites.models import Site
from django.contrib import admin
from django import forms
from opps.core.models import Post, PostImage
from opps.core.models import Image
from redactor.widgets import RedactorEditor
class PostImageInline(admin.TabularInline):
model = PostImage
fk_name = 'post'
raw_id_fields = ['image']
actions = None
extra = 1
fieldsets = [(None, {'fields': ('image', 'order')})]
class PostAdminForm(forms.ModelForm):
class Meta:
model = Post
widgets = {'content': RedactorEditor(),}
class PostAdmin(admin.ModelAdmin):
form = PostAdminForm
prepopulated_fields = {"slug": ("title",)}
inlines = [PostImageInline]
fieldsets = (
(None, {'fields': ('title', 'short_title', 'headline', 'channel',
'content',)
}),
(None, {'fields': ('main_image', 'credit', 'slug',)})
)
exclude = ('user',)
def save_model(self, request, obj, form, change):
if not obj.user:
obj.user = request.user
obj.save()
admin.site.register(Post, PostAdmin)
| <commit_before># -*- coding: utf-8 -*-
from django.contrib import admin
from django import forms
from opps.core.models import Post, PostImage
from opps.core.models import Image
from redactor.widgets import RedactorEditor
class PostImageInline(admin.TabularInline):
model = PostImage
fk_name = 'post'
raw_id_fields = ['image']
actions = None
extra = 1
fieldsets = [(None, {'fields': ('image', 'order')})]
class PostAdminForm(forms.ModelForm):
class Meta:
model = Post
widgets = {'content': RedactorEditor(),}
class PostAdmin(admin.ModelAdmin):
form = PostAdminForm
prepopulated_fields = {"slug": ("title",)}
inlines = [PostImageInline]
def save_model(self, request, obj, form, change):
if not obj.user:
obj.user = request.user
obj.save()
admin.site.register(Post, PostAdmin)
<commit_msg>Fix field set on post admin, opps core<commit_after># -*- coding: utf-8 -*-
from django.contrib.sites.models import Site
from django.contrib import admin
from django import forms
from opps.core.models import Post, PostImage
from opps.core.models import Image
from redactor.widgets import RedactorEditor
class PostImageInline(admin.TabularInline):
model = PostImage
fk_name = 'post'
raw_id_fields = ['image']
actions = None
extra = 1
fieldsets = [(None, {'fields': ('image', 'order')})]
class PostAdminForm(forms.ModelForm):
class Meta:
model = Post
widgets = {'content': RedactorEditor(),}
class PostAdmin(admin.ModelAdmin):
form = PostAdminForm
prepopulated_fields = {"slug": ("title",)}
inlines = [PostImageInline]
fieldsets = (
(None, {'fields': ('title', 'short_title', 'headline', 'channel',
'content',)
}),
(None, {'fields': ('main_image', 'credit', 'slug',)})
)
exclude = ('user',)
def save_model(self, request, obj, form, change):
if not obj.user:
obj.user = request.user
obj.save()
admin.site.register(Post, PostAdmin)
|
c9ed6fe84b7f55ba2e9dc75d9ddf8cb0e7f9eb8c | pixelmap/pixel.py | pixelmap/pixel.py | """Pixel
A pixel data structure with it's own uid that makes a Pixelmap.
Last updated: March 7, 2017
"""
from itertools import count
class Pixel:
new_id = count(1)
def __init__(self):
"""Pixel constructor"""
self.id = next(self.new_id)
def __str__(self):
return str(self.id)
def __repr__(self):
return self.__str__()
| """Pixel
A pixel data structure with it's own uid that makes a Pixelmap.
Last updated: March 11, 2017
"""
from itertools import count
class Pixel:
new_id = count(1)
def __init__(self, data=None):
"""Pixel constructor"""
self.id = next(self.new_id)
self.data = data
def __str__(self):
return str(self.data)
| Add data dict as Pixel member. | Add data dict as Pixel member.
| Python | mit | yebra06/pixelmap | """Pixel
A pixel data structure with it's own uid that makes a Pixelmap.
Last updated: March 7, 2017
"""
from itertools import count
class Pixel:
new_id = count(1)
def __init__(self):
"""Pixel constructor"""
self.id = next(self.new_id)
def __str__(self):
return str(self.id)
def __repr__(self):
return self.__str__()
Add data dict as Pixel member. | """Pixel
A pixel data structure with it's own uid that makes a Pixelmap.
Last updated: March 11, 2017
"""
from itertools import count
class Pixel:
new_id = count(1)
def __init__(self, data=None):
"""Pixel constructor"""
self.id = next(self.new_id)
self.data = data
def __str__(self):
return str(self.data)
| <commit_before>"""Pixel
A pixel data structure with it's own uid that makes a Pixelmap.
Last updated: March 7, 2017
"""
from itertools import count
class Pixel:
new_id = count(1)
def __init__(self):
"""Pixel constructor"""
self.id = next(self.new_id)
def __str__(self):
return str(self.id)
def __repr__(self):
return self.__str__()
<commit_msg>Add data dict as Pixel member.<commit_after> | """Pixel
A pixel data structure with it's own uid that makes a Pixelmap.
Last updated: March 11, 2017
"""
from itertools import count
class Pixel:
new_id = count(1)
def __init__(self, data=None):
"""Pixel constructor"""
self.id = next(self.new_id)
self.data = data
def __str__(self):
return str(self.data)
| """Pixel
A pixel data structure with it's own uid that makes a Pixelmap.
Last updated: March 7, 2017
"""
from itertools import count
class Pixel:
new_id = count(1)
def __init__(self):
"""Pixel constructor"""
self.id = next(self.new_id)
def __str__(self):
return str(self.id)
def __repr__(self):
return self.__str__()
Add data dict as Pixel member."""Pixel
A pixel data structure with it's own uid that makes a Pixelmap.
Last updated: March 11, 2017
"""
from itertools import count
class Pixel:
new_id = count(1)
def __init__(self, data=None):
"""Pixel constructor"""
self.id = next(self.new_id)
self.data = data
def __str__(self):
return str(self.data)
| <commit_before>"""Pixel
A pixel data structure with it's own uid that makes a Pixelmap.
Last updated: March 7, 2017
"""
from itertools import count
class Pixel:
new_id = count(1)
def __init__(self):
"""Pixel constructor"""
self.id = next(self.new_id)
def __str__(self):
return str(self.id)
def __repr__(self):
return self.__str__()
<commit_msg>Add data dict as Pixel member.<commit_after>"""Pixel
A pixel data structure with it's own uid that makes a Pixelmap.
Last updated: March 11, 2017
"""
from itertools import count
class Pixel:
new_id = count(1)
def __init__(self, data=None):
"""Pixel constructor"""
self.id = next(self.new_id)
self.data = data
def __str__(self):
return str(self.data)
|
8a7b6be29b3a839ba8e5c2cb33322d90d51d5fc4 | karbor/tests/unit/conf_fixture.py | karbor/tests/unit/conf_fixture.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from oslo_config import cfg
CONF = cfg.CONF
CONF.import_opt('policy_file', 'karbor.policy', group='oslo_policy')
def set_defaults(conf):
conf.set_default('connection', 'sqlite://', group='database')
conf.set_default('sqlite_synchronous', False, group='database')
conf.set_default('policy_file', 'karbor.tests.unit/policy.json',
group='oslo_policy')
conf.set_default('policy_dirs', [], group='oslo_policy')
conf.set_default('auth_strategy', 'noauth')
conf.set_default('state_path', os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', '..', '..')))
conf.set_default('provider_config_dir',
os.path.join(os.path.dirname(__file__), 'fake_providers'))
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from oslo_config import cfg
CONF = cfg.CONF
CONF.import_opt('policy_file', 'karbor.policy', group='oslo_policy')
CONF.import_opt('provider_config_dir', 'karbor.services.protection.provider')
def set_defaults(conf):
conf.set_default('connection', 'sqlite://', group='database')
conf.set_default('sqlite_synchronous', False, group='database')
conf.set_default('policy_file', 'karbor.tests.unit/policy.json',
group='oslo_policy')
conf.set_default('policy_dirs', [], group='oslo_policy')
conf.set_default('auth_strategy', 'noauth')
conf.set_default('state_path', os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', '..', '..')))
conf.set_default('provider_config_dir',
os.path.join(os.path.dirname(__file__), 'fake_providers'))
| Fix loading 'provider_config_dir' opt error | Fix loading 'provider_config_dir' opt error
When run unit test using 'ostestr --pdb' command. it may get
an error that can not find the config opt 'provider_config_dir'.
Change-Id: Ibc1c693a1531c791ad434ff56ee349ba3afb3d63
Closes-Bug: #1649443
| Python | apache-2.0 | openstack/smaug,openstack/smaug | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from oslo_config import cfg
CONF = cfg.CONF
CONF.import_opt('policy_file', 'karbor.policy', group='oslo_policy')
def set_defaults(conf):
conf.set_default('connection', 'sqlite://', group='database')
conf.set_default('sqlite_synchronous', False, group='database')
conf.set_default('policy_file', 'karbor.tests.unit/policy.json',
group='oslo_policy')
conf.set_default('policy_dirs', [], group='oslo_policy')
conf.set_default('auth_strategy', 'noauth')
conf.set_default('state_path', os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', '..', '..')))
conf.set_default('provider_config_dir',
os.path.join(os.path.dirname(__file__), 'fake_providers'))
Fix loading 'provider_config_dir' opt error
When run unit test using 'ostestr --pdb' command. it may get
an error that can not find the config opt 'provider_config_dir'.
Change-Id: Ibc1c693a1531c791ad434ff56ee349ba3afb3d63
Closes-Bug: #1649443 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from oslo_config import cfg
CONF = cfg.CONF
CONF.import_opt('policy_file', 'karbor.policy', group='oslo_policy')
CONF.import_opt('provider_config_dir', 'karbor.services.protection.provider')
def set_defaults(conf):
conf.set_default('connection', 'sqlite://', group='database')
conf.set_default('sqlite_synchronous', False, group='database')
conf.set_default('policy_file', 'karbor.tests.unit/policy.json',
group='oslo_policy')
conf.set_default('policy_dirs', [], group='oslo_policy')
conf.set_default('auth_strategy', 'noauth')
conf.set_default('state_path', os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', '..', '..')))
conf.set_default('provider_config_dir',
os.path.join(os.path.dirname(__file__), 'fake_providers'))
| <commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from oslo_config import cfg
CONF = cfg.CONF
CONF.import_opt('policy_file', 'karbor.policy', group='oslo_policy')
def set_defaults(conf):
conf.set_default('connection', 'sqlite://', group='database')
conf.set_default('sqlite_synchronous', False, group='database')
conf.set_default('policy_file', 'karbor.tests.unit/policy.json',
group='oslo_policy')
conf.set_default('policy_dirs', [], group='oslo_policy')
conf.set_default('auth_strategy', 'noauth')
conf.set_default('state_path', os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', '..', '..')))
conf.set_default('provider_config_dir',
os.path.join(os.path.dirname(__file__), 'fake_providers'))
<commit_msg>Fix loading 'provider_config_dir' opt error
When run unit test using 'ostestr --pdb' command. it may get
an error that can not find the config opt 'provider_config_dir'.
Change-Id: Ibc1c693a1531c791ad434ff56ee349ba3afb3d63
Closes-Bug: #1649443<commit_after> | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from oslo_config import cfg
CONF = cfg.CONF
CONF.import_opt('policy_file', 'karbor.policy', group='oslo_policy')
CONF.import_opt('provider_config_dir', 'karbor.services.protection.provider')
def set_defaults(conf):
conf.set_default('connection', 'sqlite://', group='database')
conf.set_default('sqlite_synchronous', False, group='database')
conf.set_default('policy_file', 'karbor.tests.unit/policy.json',
group='oslo_policy')
conf.set_default('policy_dirs', [], group='oslo_policy')
conf.set_default('auth_strategy', 'noauth')
conf.set_default('state_path', os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', '..', '..')))
conf.set_default('provider_config_dir',
os.path.join(os.path.dirname(__file__), 'fake_providers'))
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from oslo_config import cfg
CONF = cfg.CONF
CONF.import_opt('policy_file', 'karbor.policy', group='oslo_policy')
def set_defaults(conf):
conf.set_default('connection', 'sqlite://', group='database')
conf.set_default('sqlite_synchronous', False, group='database')
conf.set_default('policy_file', 'karbor.tests.unit/policy.json',
group='oslo_policy')
conf.set_default('policy_dirs', [], group='oslo_policy')
conf.set_default('auth_strategy', 'noauth')
conf.set_default('state_path', os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', '..', '..')))
conf.set_default('provider_config_dir',
os.path.join(os.path.dirname(__file__), 'fake_providers'))
Fix loading 'provider_config_dir' opt error
When run unit test using 'ostestr --pdb' command. it may get
an error that can not find the config opt 'provider_config_dir'.
Change-Id: Ibc1c693a1531c791ad434ff56ee349ba3afb3d63
Closes-Bug: #1649443# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from oslo_config import cfg
CONF = cfg.CONF
CONF.import_opt('policy_file', 'karbor.policy', group='oslo_policy')
CONF.import_opt('provider_config_dir', 'karbor.services.protection.provider')
def set_defaults(conf):
conf.set_default('connection', 'sqlite://', group='database')
conf.set_default('sqlite_synchronous', False, group='database')
conf.set_default('policy_file', 'karbor.tests.unit/policy.json',
group='oslo_policy')
conf.set_default('policy_dirs', [], group='oslo_policy')
conf.set_default('auth_strategy', 'noauth')
conf.set_default('state_path', os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', '..', '..')))
conf.set_default('provider_config_dir',
os.path.join(os.path.dirname(__file__), 'fake_providers'))
| <commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from oslo_config import cfg
CONF = cfg.CONF
CONF.import_opt('policy_file', 'karbor.policy', group='oslo_policy')
def set_defaults(conf):
conf.set_default('connection', 'sqlite://', group='database')
conf.set_default('sqlite_synchronous', False, group='database')
conf.set_default('policy_file', 'karbor.tests.unit/policy.json',
group='oslo_policy')
conf.set_default('policy_dirs', [], group='oslo_policy')
conf.set_default('auth_strategy', 'noauth')
conf.set_default('state_path', os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', '..', '..')))
conf.set_default('provider_config_dir',
os.path.join(os.path.dirname(__file__), 'fake_providers'))
<commit_msg>Fix loading 'provider_config_dir' opt error
When run unit test using 'ostestr --pdb' command. it may get
an error that can not find the config opt 'provider_config_dir'.
Change-Id: Ibc1c693a1531c791ad434ff56ee349ba3afb3d63
Closes-Bug: #1649443<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from oslo_config import cfg
CONF = cfg.CONF
CONF.import_opt('policy_file', 'karbor.policy', group='oslo_policy')
CONF.import_opt('provider_config_dir', 'karbor.services.protection.provider')
def set_defaults(conf):
conf.set_default('connection', 'sqlite://', group='database')
conf.set_default('sqlite_synchronous', False, group='database')
conf.set_default('policy_file', 'karbor.tests.unit/policy.json',
group='oslo_policy')
conf.set_default('policy_dirs', [], group='oslo_policy')
conf.set_default('auth_strategy', 'noauth')
conf.set_default('state_path', os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', '..', '..')))
conf.set_default('provider_config_dir',
os.path.join(os.path.dirname(__file__), 'fake_providers'))
|
117c3e6c1f301c4e5c07e22b3c76f330b18ea36e | bin/create_contour_data.py | bin/create_contour_data.py | #!/usr/bin/env python3
import sys
import os
sys.path.append('../nsmaps')
import nsmaps
DATA_DIR = './website/nsmaps-data'
def test():
stations = nsmaps.station.Stations(DATA_DIR)
departure_station_name = 'Utrecht Centraal'
departure_station = stations.find_station(departure_station_name)
filepath_out = os.path.join(DATA_DIR, 'contours_' + departure_station.get_code() + '.geojson')
test_config = nsmaps.contourmap.ContourPlotConfig()
contourmap = nsmaps.contourmap.Contour(departure_station, stations, test_config, DATA_DIR)
contourmap.create_contour_data(filepath_out)
def create_all():
stations = nsmaps.station.Stations(DATA_DIR)
# test_config = nsmaps.contourmap.TestConfig()
config = nsmaps.contourmap.ContourPlotConfig()
for station in stations:
if station.has_travel_time_data():
contourmap = nsmaps.contourmap.Contour(station, stations, config, DATA_DIR)
contourmap.create_contour_data(filepath_out)
if __name__ == "__main__":
test()
# create_all() | #!/usr/bin/env python3
import sys
import os
sys.path.append('../nsmaps')
import nsmaps
DATA_DIR = './website/nsmaps-data'
def test():
stations = nsmaps.station.Stations(DATA_DIR)
departure_station_name = 'Utrecht Centraal'
departure_station = stations.find_station(departure_station_name)
assert os.path.exists(os.path.join(DATA_DIR, 'contours/'))
filepath_out = os.path.join(DATA_DIR, 'contours/' + departure_station.get_code() + '.geojson')
test_config = nsmaps.contourmap.ContourPlotConfig()
test_config.print_bounding_box()
contourmap = nsmaps.contourmap.Contour(departure_station, stations, test_config, DATA_DIR)
# contourmap.create_contour_data(filepath_out)
contourmap.create_geojson_tiles(filepath_out)
def create_all():
stations = nsmaps.station.Stations(DATA_DIR)
# test_config = nsmaps.contourmap.TestConfig()
config = nsmaps.contourmap.ContourPlotConfig()
for station in stations:
if station.has_travel_time_data():
contourmap = nsmaps.contourmap.Contour(station, stations, config, DATA_DIR)
contourmap.create_contour_data(filepath_out)
contourmap.create_geojson_tiles(filepath_out)
if __name__ == "__main__":
test()
# create_all() | Create tiles in create contour command | Create tiles in create contour command
| Python | mit | bartromgens/nsmaps,bartromgens/nsmaps,bartromgens/nsmaps | #!/usr/bin/env python3
import sys
import os
sys.path.append('../nsmaps')
import nsmaps
DATA_DIR = './website/nsmaps-data'
def test():
stations = nsmaps.station.Stations(DATA_DIR)
departure_station_name = 'Utrecht Centraal'
departure_station = stations.find_station(departure_station_name)
filepath_out = os.path.join(DATA_DIR, 'contours_' + departure_station.get_code() + '.geojson')
test_config = nsmaps.contourmap.ContourPlotConfig()
contourmap = nsmaps.contourmap.Contour(departure_station, stations, test_config, DATA_DIR)
contourmap.create_contour_data(filepath_out)
def create_all():
stations = nsmaps.station.Stations(DATA_DIR)
# test_config = nsmaps.contourmap.TestConfig()
config = nsmaps.contourmap.ContourPlotConfig()
for station in stations:
if station.has_travel_time_data():
contourmap = nsmaps.contourmap.Contour(station, stations, config, DATA_DIR)
contourmap.create_contour_data(filepath_out)
if __name__ == "__main__":
test()
# create_all()Create tiles in create contour command | #!/usr/bin/env python3
import sys
import os
sys.path.append('../nsmaps')
import nsmaps
DATA_DIR = './website/nsmaps-data'
def test():
stations = nsmaps.station.Stations(DATA_DIR)
departure_station_name = 'Utrecht Centraal'
departure_station = stations.find_station(departure_station_name)
assert os.path.exists(os.path.join(DATA_DIR, 'contours/'))
filepath_out = os.path.join(DATA_DIR, 'contours/' + departure_station.get_code() + '.geojson')
test_config = nsmaps.contourmap.ContourPlotConfig()
test_config.print_bounding_box()
contourmap = nsmaps.contourmap.Contour(departure_station, stations, test_config, DATA_DIR)
# contourmap.create_contour_data(filepath_out)
contourmap.create_geojson_tiles(filepath_out)
def create_all():
stations = nsmaps.station.Stations(DATA_DIR)
# test_config = nsmaps.contourmap.TestConfig()
config = nsmaps.contourmap.ContourPlotConfig()
for station in stations:
if station.has_travel_time_data():
contourmap = nsmaps.contourmap.Contour(station, stations, config, DATA_DIR)
contourmap.create_contour_data(filepath_out)
contourmap.create_geojson_tiles(filepath_out)
if __name__ == "__main__":
test()
# create_all() | <commit_before>#!/usr/bin/env python3
import sys
import os
sys.path.append('../nsmaps')
import nsmaps
DATA_DIR = './website/nsmaps-data'
def test():
stations = nsmaps.station.Stations(DATA_DIR)
departure_station_name = 'Utrecht Centraal'
departure_station = stations.find_station(departure_station_name)
filepath_out = os.path.join(DATA_DIR, 'contours_' + departure_station.get_code() + '.geojson')
test_config = nsmaps.contourmap.ContourPlotConfig()
contourmap = nsmaps.contourmap.Contour(departure_station, stations, test_config, DATA_DIR)
contourmap.create_contour_data(filepath_out)
def create_all():
stations = nsmaps.station.Stations(DATA_DIR)
# test_config = nsmaps.contourmap.TestConfig()
config = nsmaps.contourmap.ContourPlotConfig()
for station in stations:
if station.has_travel_time_data():
contourmap = nsmaps.contourmap.Contour(station, stations, config, DATA_DIR)
contourmap.create_contour_data(filepath_out)
if __name__ == "__main__":
test()
# create_all()<commit_msg>Create tiles in create contour command<commit_after> | #!/usr/bin/env python3
import sys
import os
sys.path.append('../nsmaps')
import nsmaps
DATA_DIR = './website/nsmaps-data'
def test():
stations = nsmaps.station.Stations(DATA_DIR)
departure_station_name = 'Utrecht Centraal'
departure_station = stations.find_station(departure_station_name)
assert os.path.exists(os.path.join(DATA_DIR, 'contours/'))
filepath_out = os.path.join(DATA_DIR, 'contours/' + departure_station.get_code() + '.geojson')
test_config = nsmaps.contourmap.ContourPlotConfig()
test_config.print_bounding_box()
contourmap = nsmaps.contourmap.Contour(departure_station, stations, test_config, DATA_DIR)
# contourmap.create_contour_data(filepath_out)
contourmap.create_geojson_tiles(filepath_out)
def create_all():
stations = nsmaps.station.Stations(DATA_DIR)
# test_config = nsmaps.contourmap.TestConfig()
config = nsmaps.contourmap.ContourPlotConfig()
for station in stations:
if station.has_travel_time_data():
contourmap = nsmaps.contourmap.Contour(station, stations, config, DATA_DIR)
contourmap.create_contour_data(filepath_out)
contourmap.create_geojson_tiles(filepath_out)
if __name__ == "__main__":
test()
# create_all() | #!/usr/bin/env python3
import sys
import os
sys.path.append('../nsmaps')
import nsmaps
DATA_DIR = './website/nsmaps-data'
def test():
stations = nsmaps.station.Stations(DATA_DIR)
departure_station_name = 'Utrecht Centraal'
departure_station = stations.find_station(departure_station_name)
filepath_out = os.path.join(DATA_DIR, 'contours_' + departure_station.get_code() + '.geojson')
test_config = nsmaps.contourmap.ContourPlotConfig()
contourmap = nsmaps.contourmap.Contour(departure_station, stations, test_config, DATA_DIR)
contourmap.create_contour_data(filepath_out)
def create_all():
stations = nsmaps.station.Stations(DATA_DIR)
# test_config = nsmaps.contourmap.TestConfig()
config = nsmaps.contourmap.ContourPlotConfig()
for station in stations:
if station.has_travel_time_data():
contourmap = nsmaps.contourmap.Contour(station, stations, config, DATA_DIR)
contourmap.create_contour_data(filepath_out)
if __name__ == "__main__":
test()
# create_all()Create tiles in create contour command#!/usr/bin/env python3
import sys
import os
sys.path.append('../nsmaps')
import nsmaps
DATA_DIR = './website/nsmaps-data'
def test():
stations = nsmaps.station.Stations(DATA_DIR)
departure_station_name = 'Utrecht Centraal'
departure_station = stations.find_station(departure_station_name)
assert os.path.exists(os.path.join(DATA_DIR, 'contours/'))
filepath_out = os.path.join(DATA_DIR, 'contours/' + departure_station.get_code() + '.geojson')
test_config = nsmaps.contourmap.ContourPlotConfig()
test_config.print_bounding_box()
contourmap = nsmaps.contourmap.Contour(departure_station, stations, test_config, DATA_DIR)
# contourmap.create_contour_data(filepath_out)
contourmap.create_geojson_tiles(filepath_out)
def create_all():
stations = nsmaps.station.Stations(DATA_DIR)
# test_config = nsmaps.contourmap.TestConfig()
config = nsmaps.contourmap.ContourPlotConfig()
for station in stations:
if station.has_travel_time_data():
contourmap = nsmaps.contourmap.Contour(station, stations, config, DATA_DIR)
contourmap.create_contour_data(filepath_out)
contourmap.create_geojson_tiles(filepath_out)
if __name__ == "__main__":
test()
# create_all() | <commit_before>#!/usr/bin/env python3
import sys
import os
sys.path.append('../nsmaps')
import nsmaps
DATA_DIR = './website/nsmaps-data'
def test():
stations = nsmaps.station.Stations(DATA_DIR)
departure_station_name = 'Utrecht Centraal'
departure_station = stations.find_station(departure_station_name)
filepath_out = os.path.join(DATA_DIR, 'contours_' + departure_station.get_code() + '.geojson')
test_config = nsmaps.contourmap.ContourPlotConfig()
contourmap = nsmaps.contourmap.Contour(departure_station, stations, test_config, DATA_DIR)
contourmap.create_contour_data(filepath_out)
def create_all():
stations = nsmaps.station.Stations(DATA_DIR)
# test_config = nsmaps.contourmap.TestConfig()
config = nsmaps.contourmap.ContourPlotConfig()
for station in stations:
if station.has_travel_time_data():
contourmap = nsmaps.contourmap.Contour(station, stations, config, DATA_DIR)
contourmap.create_contour_data(filepath_out)
if __name__ == "__main__":
test()
# create_all()<commit_msg>Create tiles in create contour command<commit_after>#!/usr/bin/env python3
import sys
import os
sys.path.append('../nsmaps')
import nsmaps
DATA_DIR = './website/nsmaps-data'
def test():
stations = nsmaps.station.Stations(DATA_DIR)
departure_station_name = 'Utrecht Centraal'
departure_station = stations.find_station(departure_station_name)
assert os.path.exists(os.path.join(DATA_DIR, 'contours/'))
filepath_out = os.path.join(DATA_DIR, 'contours/' + departure_station.get_code() + '.geojson')
test_config = nsmaps.contourmap.ContourPlotConfig()
test_config.print_bounding_box()
contourmap = nsmaps.contourmap.Contour(departure_station, stations, test_config, DATA_DIR)
# contourmap.create_contour_data(filepath_out)
contourmap.create_geojson_tiles(filepath_out)
def create_all():
stations = nsmaps.station.Stations(DATA_DIR)
# test_config = nsmaps.contourmap.TestConfig()
config = nsmaps.contourmap.ContourPlotConfig()
for station in stations:
if station.has_travel_time_data():
contourmap = nsmaps.contourmap.Contour(station, stations, config, DATA_DIR)
contourmap.create_contour_data(filepath_out)
contourmap.create_geojson_tiles(filepath_out)
if __name__ == "__main__":
test()
# create_all() |
5dc6488f5d7d0eb1d78b9c2edbb61b177cec6109 | run.py | run.py | #!/usr/bin/env python
import os
os.environ['APP_SETTINGS'] = 'config.DevelopmentConfig'
from pskb_website import app
# Uncomment to see the config you're running with
#for key, value in app.config.iteritems():
#print key, value
app.run()
| #!/usr/bin/env python
import os
os.environ['APP_SETTINGS'] = 'config.DevelopmentConfig'
#os.environ['APP_SETTINGS'] = 'config.DebugProductionConfig'
from pskb_website import app
# Uncomment to see the config you're running with
#for key, value in app.config.iteritems():
#print key, value
app.run()
| Add line to easily uncomment and switch back and forth to production settings locally | Add line to easily uncomment and switch back and forth to production settings locally
| Python | agpl-3.0 | paulocheque/guides-cms,pluralsight/guides-cms,paulocheque/guides-cms,paulocheque/guides-cms,pluralsight/guides-cms,pluralsight/guides-cms | #!/usr/bin/env python
import os
os.environ['APP_SETTINGS'] = 'config.DevelopmentConfig'
from pskb_website import app
# Uncomment to see the config you're running with
#for key, value in app.config.iteritems():
#print key, value
app.run()
Add line to easily uncomment and switch back and forth to production settings locally | #!/usr/bin/env python
import os
os.environ['APP_SETTINGS'] = 'config.DevelopmentConfig'
#os.environ['APP_SETTINGS'] = 'config.DebugProductionConfig'
from pskb_website import app
# Uncomment to see the config you're running with
#for key, value in app.config.iteritems():
#print key, value
app.run()
| <commit_before>#!/usr/bin/env python
import os
os.environ['APP_SETTINGS'] = 'config.DevelopmentConfig'
from pskb_website import app
# Uncomment to see the config you're running with
#for key, value in app.config.iteritems():
#print key, value
app.run()
<commit_msg>Add line to easily uncomment and switch back and forth to production settings locally<commit_after> | #!/usr/bin/env python
import os
os.environ['APP_SETTINGS'] = 'config.DevelopmentConfig'
#os.environ['APP_SETTINGS'] = 'config.DebugProductionConfig'
from pskb_website import app
# Uncomment to see the config you're running with
#for key, value in app.config.iteritems():
#print key, value
app.run()
| #!/usr/bin/env python
import os
os.environ['APP_SETTINGS'] = 'config.DevelopmentConfig'
from pskb_website import app
# Uncomment to see the config you're running with
#for key, value in app.config.iteritems():
#print key, value
app.run()
Add line to easily uncomment and switch back and forth to production settings locally#!/usr/bin/env python
import os
os.environ['APP_SETTINGS'] = 'config.DevelopmentConfig'
#os.environ['APP_SETTINGS'] = 'config.DebugProductionConfig'
from pskb_website import app
# Uncomment to see the config you're running with
#for key, value in app.config.iteritems():
#print key, value
app.run()
| <commit_before>#!/usr/bin/env python
import os
os.environ['APP_SETTINGS'] = 'config.DevelopmentConfig'
from pskb_website import app
# Uncomment to see the config you're running with
#for key, value in app.config.iteritems():
#print key, value
app.run()
<commit_msg>Add line to easily uncomment and switch back and forth to production settings locally<commit_after>#!/usr/bin/env python
import os
os.environ['APP_SETTINGS'] = 'config.DevelopmentConfig'
#os.environ['APP_SETTINGS'] = 'config.DebugProductionConfig'
from pskb_website import app
# Uncomment to see the config you're running with
#for key, value in app.config.iteritems():
#print key, value
app.run()
|
e2541a9de3b4239f8f3cb7cc06dd9e7f48dd18a9 | objectTopGroup.py | objectTopGroup.py | #**********************************************************************************************#
#********* Return the top most group name of an object ****************************************#
#********* by Djordje Spasic ******************************************************************#
#********* issworld2000@yahoo.com 6-May-2014 **************************************************#
"""
This small function replicates the "ObjectTopGroup" RhinoScript function, which still hasn't been implemented
into PythonScript.
Returns the top most group name that an object is assigned. This function primarily applies to objects that are
members of nested groups.
"""
import rhinoscriptsyntax as rs
import scriptcontext as sc
def objectTopGroup(_id):
groupNames = sc.doc.Groups.GroupNames(False)
groupName = False
for i in range(rs.GroupCount()):
groupRO = sc.doc.Groups.GroupMembers(i)
for ele in groupRO:
if rs.coercerhinoobject(ele).Id == _id:
groupName = groupNames[i]
if groupName:
print groupName
else:
print "The element you chose does not belong to any group"
id = rs.GetObject()
objectTopGroup(id)
| #**********************************************************************************************#
#********* Return the top most group name of an object ****************************************#
#********* by Djordje Spasic ******************************************************************#
#********* issworld2000@yahoo.com 6-May-2014 **************************************************#
"""
This small function replicates the "ObjectTopGroup" RhinoScript function, which still hasn't been implemented
into PythonScript.
Returns the top most group name that an object is assigned. This function primarily applies to objects that are
members of nested groups.
"""
import rhinoscriptsyntax as rs
import scriptcontext as sc
def objectTopGroup(_id):
groupNames = sc.doc.Groups.GroupNames(False)
groupName = False
for i in range(rs.GroupCount()):
groupRO = sc.doc.Groups.GroupMembers(i)
for ele in groupRO:
if ele.Id == _id:
groupName = groupNames[i]
if groupName:
print groupName
else:
print "The element you chose does not belong to any group"
id = rs.GetObject()
objectTopGroup(id)
| Return the top most group name of an object | Return the top most group name of an object | Python | unlicense | stgeorges/pythonscripts | #**********************************************************************************************#
#********* Return the top most group name of an object ****************************************#
#********* by Djordje Spasic ******************************************************************#
#********* issworld2000@yahoo.com 6-May-2014 **************************************************#
"""
This small function replicates the "ObjectTopGroup" RhinoScript function, which still hasn't been implemented
into PythonScript.
Returns the top most group name that an object is assigned. This function primarily applies to objects that are
members of nested groups.
"""
import rhinoscriptsyntax as rs
import scriptcontext as sc
def objectTopGroup(_id):
groupNames = sc.doc.Groups.GroupNames(False)
groupName = False
for i in range(rs.GroupCount()):
groupRO = sc.doc.Groups.GroupMembers(i)
for ele in groupRO:
if rs.coercerhinoobject(ele).Id == _id:
groupName = groupNames[i]
if groupName:
print groupName
else:
print "The element you chose does not belong to any group"
id = rs.GetObject()
objectTopGroup(id)
Return the top most group name of an object | #**********************************************************************************************#
#********* Return the top most group name of an object ****************************************#
#********* by Djordje Spasic ******************************************************************#
#********* issworld2000@yahoo.com 6-May-2014 **************************************************#
"""
This small function replicates the "ObjectTopGroup" RhinoScript function, which still hasn't been implemented
into PythonScript.
Returns the top most group name that an object is assigned. This function primarily applies to objects that are
members of nested groups.
"""
import rhinoscriptsyntax as rs
import scriptcontext as sc
def objectTopGroup(_id):
groupNames = sc.doc.Groups.GroupNames(False)
groupName = False
for i in range(rs.GroupCount()):
groupRO = sc.doc.Groups.GroupMembers(i)
for ele in groupRO:
if ele.Id == _id:
groupName = groupNames[i]
if groupName:
print groupName
else:
print "The element you chose does not belong to any group"
id = rs.GetObject()
objectTopGroup(id)
| <commit_before>#**********************************************************************************************#
#********* Return the top most group name of an object ****************************************#
#********* by Djordje Spasic ******************************************************************#
#********* issworld2000@yahoo.com 6-May-2014 **************************************************#
"""
This small function replicates the "ObjectTopGroup" RhinoScript function, which still hasn't been implemented
into PythonScript.
Returns the top most group name that an object is assigned. This function primarily applies to objects that are
members of nested groups.
"""
import rhinoscriptsyntax as rs
import scriptcontext as sc
def objectTopGroup(_id):
groupNames = sc.doc.Groups.GroupNames(False)
groupName = False
for i in range(rs.GroupCount()):
groupRO = sc.doc.Groups.GroupMembers(i)
for ele in groupRO:
if rs.coercerhinoobject(ele).Id == _id:
groupName = groupNames[i]
if groupName:
print groupName
else:
print "The element you chose does not belong to any group"
id = rs.GetObject()
objectTopGroup(id)
<commit_msg>Return the top most group name of an object<commit_after> | #**********************************************************************************************#
#********* Return the top most group name of an object ****************************************#
#********* by Djordje Spasic ******************************************************************#
#********* issworld2000@yahoo.com 6-May-2014 **************************************************#
"""
This small function replicates the "ObjectTopGroup" RhinoScript function, which still hasn't been implemented
into PythonScript.
Returns the top most group name that an object is assigned. This function primarily applies to objects that are
members of nested groups.
"""
import rhinoscriptsyntax as rs
import scriptcontext as sc
def objectTopGroup(_id):
groupNames = sc.doc.Groups.GroupNames(False)
groupName = False
for i in range(rs.GroupCount()):
groupRO = sc.doc.Groups.GroupMembers(i)
for ele in groupRO:
if ele.Id == _id:
groupName = groupNames[i]
if groupName:
print groupName
else:
print "The element you chose does not belong to any group"
id = rs.GetObject()
objectTopGroup(id)
| #**********************************************************************************************#
#********* Return the top most group name of an object ****************************************#
#********* by Djordje Spasic ******************************************************************#
#********* issworld2000@yahoo.com 6-May-2014 **************************************************#
"""
This small function replicates the "ObjectTopGroup" RhinoScript function, which still hasn't been implemented
into PythonScript.
Returns the top most group name that an object is assigned. This function primarily applies to objects that are
members of nested groups.
"""
import rhinoscriptsyntax as rs
import scriptcontext as sc
def objectTopGroup(_id):
groupNames = sc.doc.Groups.GroupNames(False)
groupName = False
for i in range(rs.GroupCount()):
groupRO = sc.doc.Groups.GroupMembers(i)
for ele in groupRO:
if rs.coercerhinoobject(ele).Id == _id:
groupName = groupNames[i]
if groupName:
print groupName
else:
print "The element you chose does not belong to any group"
id = rs.GetObject()
objectTopGroup(id)
Return the top most group name of an object#**********************************************************************************************#
#********* Return the top most group name of an object ****************************************#
#********* by Djordje Spasic ******************************************************************#
#********* issworld2000@yahoo.com 6-May-2014 **************************************************#
"""
This small function replicates the "ObjectTopGroup" RhinoScript function, which still hasn't been implemented
into PythonScript.
Returns the top most group name that an object is assigned. This function primarily applies to objects that are
members of nested groups.
"""
import rhinoscriptsyntax as rs
import scriptcontext as sc
def objectTopGroup(_id):
groupNames = sc.doc.Groups.GroupNames(False)
groupName = False
for i in range(rs.GroupCount()):
groupRO = sc.doc.Groups.GroupMembers(i)
for ele in groupRO:
if ele.Id == _id:
groupName = groupNames[i]
if groupName:
print groupName
else:
print "The element you chose does not belong to any group"
id = rs.GetObject()
objectTopGroup(id)
| <commit_before>#**********************************************************************************************#
#********* Return the top most group name of an object ****************************************#
#********* by Djordje Spasic ******************************************************************#
#********* issworld2000@yahoo.com 6-May-2014 **************************************************#
"""
This small function replicates the "ObjectTopGroup" RhinoScript function, which still hasn't been implemented
into PythonScript.
Returns the top most group name that an object is assigned. This function primarily applies to objects that are
members of nested groups.
"""
import rhinoscriptsyntax as rs
import scriptcontext as sc
def objectTopGroup(_id):
groupNames = sc.doc.Groups.GroupNames(False)
groupName = False
for i in range(rs.GroupCount()):
groupRO = sc.doc.Groups.GroupMembers(i)
for ele in groupRO:
if rs.coercerhinoobject(ele).Id == _id:
groupName = groupNames[i]
if groupName:
print groupName
else:
print "The element you chose does not belong to any group"
id = rs.GetObject()
objectTopGroup(id)
<commit_msg>Return the top most group name of an object<commit_after>#**********************************************************************************************#
#********* Return the top most group name of an object ****************************************#
#********* by Djordje Spasic ******************************************************************#
#********* issworld2000@yahoo.com 6-May-2014 **************************************************#
"""
This small function replicates the "ObjectTopGroup" RhinoScript function, which still hasn't been implemented
into PythonScript.
Returns the top most group name that an object is assigned. This function primarily applies to objects that are
members of nested groups.
"""
import rhinoscriptsyntax as rs
import scriptcontext as sc
def objectTopGroup(_id):
groupNames = sc.doc.Groups.GroupNames(False)
groupName = False
for i in range(rs.GroupCount()):
groupRO = sc.doc.Groups.GroupMembers(i)
for ele in groupRO:
if ele.Id == _id:
groupName = groupNames[i]
if groupName:
print groupName
else:
print "The element you chose does not belong to any group"
id = rs.GetObject()
objectTopGroup(id)
|
a28c3e9614cc8ab82ed0d1796d68a5b03906f801 | seleniumbase/config/proxy_list.py | seleniumbase/config/proxy_list.py | """
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
"""
PROXY_LIST = {
"example1": "45.133.182.18:18080", # (Example) - set your own proxy here
"example2": "95.174.67.50:18080", # (Example) - set your own proxy here
"example3": "83.97.23.90:18080", # (Example) - set your own proxy here
"example4": "82.200.233.4:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
| """
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
"""
PROXY_LIST = {
"example1": "212.87.220.2:3128", # (Example) - set your own proxy here
"example2": "51.75.147.44:3128", # (Example) - set your own proxy here
"example3": "82.200.233.4:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
| Update the sample proxy list | Update the sample proxy list
| Python | mit | mdmintz/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase | """
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
"""
PROXY_LIST = {
"example1": "45.133.182.18:18080", # (Example) - set your own proxy here
"example2": "95.174.67.50:18080", # (Example) - set your own proxy here
"example3": "83.97.23.90:18080", # (Example) - set your own proxy here
"example4": "82.200.233.4:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
Update the sample proxy list | """
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
"""
PROXY_LIST = {
"example1": "212.87.220.2:3128", # (Example) - set your own proxy here
"example2": "51.75.147.44:3128", # (Example) - set your own proxy here
"example3": "82.200.233.4:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
| <commit_before>"""
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
"""
PROXY_LIST = {
"example1": "45.133.182.18:18080", # (Example) - set your own proxy here
"example2": "95.174.67.50:18080", # (Example) - set your own proxy here
"example3": "83.97.23.90:18080", # (Example) - set your own proxy here
"example4": "82.200.233.4:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
<commit_msg>Update the sample proxy list<commit_after> | """
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
"""
PROXY_LIST = {
"example1": "212.87.220.2:3128", # (Example) - set your own proxy here
"example2": "51.75.147.44:3128", # (Example) - set your own proxy here
"example3": "82.200.233.4:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
| """
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
"""
PROXY_LIST = {
"example1": "45.133.182.18:18080", # (Example) - set your own proxy here
"example2": "95.174.67.50:18080", # (Example) - set your own proxy here
"example3": "83.97.23.90:18080", # (Example) - set your own proxy here
"example4": "82.200.233.4:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
Update the sample proxy list"""
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
"""
PROXY_LIST = {
"example1": "212.87.220.2:3128", # (Example) - set your own proxy here
"example2": "51.75.147.44:3128", # (Example) - set your own proxy here
"example3": "82.200.233.4:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
| <commit_before>"""
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
"""
PROXY_LIST = {
"example1": "45.133.182.18:18080", # (Example) - set your own proxy here
"example2": "95.174.67.50:18080", # (Example) - set your own proxy here
"example3": "83.97.23.90:18080", # (Example) - set your own proxy here
"example4": "82.200.233.4:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
<commit_msg>Update the sample proxy list<commit_after>"""
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
"""
PROXY_LIST = {
"example1": "212.87.220.2:3128", # (Example) - set your own proxy here
"example2": "51.75.147.44:3128", # (Example) - set your own proxy here
"example3": "82.200.233.4:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
|
9d59bca61b2836e7db3c50d5558a46aa2dbaea08 | tests/run_tests.py | tests/run_tests.py | #! /usr/bin/env python
# ======================================================================
# matscipy - Python materials science tools
# https://github.com/libAtoms/matscipy
#
# Copyright (2014) James Kermode, King's College London
# Lars Pastewka, Karlsruhe Institute of Technology
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# ======================================================================
import unittest
from cubic_elastic_moduli import *
###
unittest.main()
| #! /usr/bin/env python
# ======================================================================
# matscipy - Python materials science tools
# https://github.com/libAtoms/matscipy
#
# Copyright (2014) James Kermode, King's College London
# Lars Pastewka, Karlsruhe Institute of Technology
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# ======================================================================
import unittest
from cubic_crystal_crack import *
from cubic_elastic_moduli import *
###
unittest.main()
| Add crack test to test runner. | Add crack test to test runner.
| Python | lgpl-2.1 | libAtoms/matscipy,libAtoms/matscipy,libAtoms/matscipy,libAtoms/matscipy | #! /usr/bin/env python
# ======================================================================
# matscipy - Python materials science tools
# https://github.com/libAtoms/matscipy
#
# Copyright (2014) James Kermode, King's College London
# Lars Pastewka, Karlsruhe Institute of Technology
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# ======================================================================
import unittest
from cubic_elastic_moduli import *
###
unittest.main()
Add crack test to test runner. | #! /usr/bin/env python
# ======================================================================
# matscipy - Python materials science tools
# https://github.com/libAtoms/matscipy
#
# Copyright (2014) James Kermode, King's College London
# Lars Pastewka, Karlsruhe Institute of Technology
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# ======================================================================
import unittest
from cubic_crystal_crack import *
from cubic_elastic_moduli import *
###
unittest.main()
| <commit_before>#! /usr/bin/env python
# ======================================================================
# matscipy - Python materials science tools
# https://github.com/libAtoms/matscipy
#
# Copyright (2014) James Kermode, King's College London
# Lars Pastewka, Karlsruhe Institute of Technology
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# ======================================================================
import unittest
from cubic_elastic_moduli import *
###
unittest.main()
<commit_msg>Add crack test to test runner.<commit_after> | #! /usr/bin/env python
# ======================================================================
# matscipy - Python materials science tools
# https://github.com/libAtoms/matscipy
#
# Copyright (2014) James Kermode, King's College London
# Lars Pastewka, Karlsruhe Institute of Technology
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# ======================================================================
import unittest
from cubic_crystal_crack import *
from cubic_elastic_moduli import *
###
unittest.main()
| #! /usr/bin/env python
# ======================================================================
# matscipy - Python materials science tools
# https://github.com/libAtoms/matscipy
#
# Copyright (2014) James Kermode, King's College London
# Lars Pastewka, Karlsruhe Institute of Technology
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# ======================================================================
import unittest
from cubic_elastic_moduli import *
###
unittest.main()
Add crack test to test runner.#! /usr/bin/env python
# ======================================================================
# matscipy - Python materials science tools
# https://github.com/libAtoms/matscipy
#
# Copyright (2014) James Kermode, King's College London
# Lars Pastewka, Karlsruhe Institute of Technology
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# ======================================================================
import unittest
from cubic_crystal_crack import *
from cubic_elastic_moduli import *
###
unittest.main()
| <commit_before>#! /usr/bin/env python
# ======================================================================
# matscipy - Python materials science tools
# https://github.com/libAtoms/matscipy
#
# Copyright (2014) James Kermode, King's College London
# Lars Pastewka, Karlsruhe Institute of Technology
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# ======================================================================
import unittest
from cubic_elastic_moduli import *
###
unittest.main()
<commit_msg>Add crack test to test runner.<commit_after>#! /usr/bin/env python
# ======================================================================
# matscipy - Python materials science tools
# https://github.com/libAtoms/matscipy
#
# Copyright (2014) James Kermode, King's College London
# Lars Pastewka, Karlsruhe Institute of Technology
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# ======================================================================
import unittest
from cubic_crystal_crack import *
from cubic_elastic_moduli import *
###
unittest.main()
|
6d942a84da5f9a07ea1fac96ec0667ded623be60 | tests/test_util.py | tests/test_util.py | import unittest
import tabula
try:
FileNotFoundError
from unittest.mock import patch, MagicMock
from urllib.request import Request
except NameError:
FileNotFoundError = IOError
from mock import patch, MagicMock
from urllib2 import Request
class TestUtil(unittest.TestCase):
def test_environment_info(self):
self.assertEqual(tabula.environment_info(), None)
@patch('tabula.file_util.shutil.copyfileobj')
@patch('tabula.file_util.urlopen')
@patch('tabula.file_util._create_request')
def test_localize_file_with_user_agent(self, mock_fun, mock_urlopen, mock_copyfileobj):
uri = "https://github.com/tabulapdf/tabula-java/raw/master/src/test/resources/technology/tabula/12s0324.pdf"
user_agent='Mozilla/5.0'
cm = MagicMock()
cm.getcode.return_value = 200
cm.read.return_value = b'contents'
cm.geturl.return_value = uri
mock_urlopen.return_value = cm
tabula.file_util.localize_file(uri, user_agent=user_agent)
mock_fun.assert_called_with(uri, user_agent)
mock_urlopen.assert_called()
mock_copyfileobj.assert_called()
if __name__ == '__main__':
unittest.main()
| import unittest
import tabula
try:
FileNotFoundError
from unittest.mock import patch, MagicMock
from urllib.request import Request
except NameError:
FileNotFoundError = IOError
from mock import patch, MagicMock
from urllib2 import Request
class TestUtil(unittest.TestCase):
def test_environment_info(self):
self.assertEqual(tabula.environment_info(), None)
@patch('tabula.file_util.shutil.copyfileobj')
@patch('tabula.file_util.urlopen')
@patch('tabula.file_util._create_request')
def test_localize_file_with_user_agent(self, mock_fun, mock_urlopen, mock_copyfileobj):
uri = "https://github.com/tabulapdf/tabula-java/raw/master/src/test/resources/technology/tabula/12s0324.pdf"
user_agent='Mozilla/5.0'
cm = MagicMock()
cm.getcode.return_value = 200
cm.read.return_value = b'contents'
cm.geturl.return_value = uri
mock_urlopen.return_value = cm
tabula.file_util.localize_file(uri, user_agent=user_agent)
mock_fun.assert_called_with(uri, user_agent)
if __name__ == '__main__':
unittest.main()
| Remove assert_called for Python 3.5 compatibility | fix: Remove assert_called for Python 3.5 compatibility
| Python | mit | chezou/tabula-py | import unittest
import tabula
try:
FileNotFoundError
from unittest.mock import patch, MagicMock
from urllib.request import Request
except NameError:
FileNotFoundError = IOError
from mock import patch, MagicMock
from urllib2 import Request
class TestUtil(unittest.TestCase):
def test_environment_info(self):
self.assertEqual(tabula.environment_info(), None)
@patch('tabula.file_util.shutil.copyfileobj')
@patch('tabula.file_util.urlopen')
@patch('tabula.file_util._create_request')
def test_localize_file_with_user_agent(self, mock_fun, mock_urlopen, mock_copyfileobj):
uri = "https://github.com/tabulapdf/tabula-java/raw/master/src/test/resources/technology/tabula/12s0324.pdf"
user_agent='Mozilla/5.0'
cm = MagicMock()
cm.getcode.return_value = 200
cm.read.return_value = b'contents'
cm.geturl.return_value = uri
mock_urlopen.return_value = cm
tabula.file_util.localize_file(uri, user_agent=user_agent)
mock_fun.assert_called_with(uri, user_agent)
mock_urlopen.assert_called()
mock_copyfileobj.assert_called()
if __name__ == '__main__':
unittest.main()
fix: Remove assert_called for Python 3.5 compatibility | import unittest
import tabula
try:
FileNotFoundError
from unittest.mock import patch, MagicMock
from urllib.request import Request
except NameError:
FileNotFoundError = IOError
from mock import patch, MagicMock
from urllib2 import Request
class TestUtil(unittest.TestCase):
def test_environment_info(self):
self.assertEqual(tabula.environment_info(), None)
@patch('tabula.file_util.shutil.copyfileobj')
@patch('tabula.file_util.urlopen')
@patch('tabula.file_util._create_request')
def test_localize_file_with_user_agent(self, mock_fun, mock_urlopen, mock_copyfileobj):
uri = "https://github.com/tabulapdf/tabula-java/raw/master/src/test/resources/technology/tabula/12s0324.pdf"
user_agent='Mozilla/5.0'
cm = MagicMock()
cm.getcode.return_value = 200
cm.read.return_value = b'contents'
cm.geturl.return_value = uri
mock_urlopen.return_value = cm
tabula.file_util.localize_file(uri, user_agent=user_agent)
mock_fun.assert_called_with(uri, user_agent)
if __name__ == '__main__':
unittest.main()
| <commit_before>import unittest
import tabula
try:
FileNotFoundError
from unittest.mock import patch, MagicMock
from urllib.request import Request
except NameError:
FileNotFoundError = IOError
from mock import patch, MagicMock
from urllib2 import Request
class TestUtil(unittest.TestCase):
def test_environment_info(self):
self.assertEqual(tabula.environment_info(), None)
@patch('tabula.file_util.shutil.copyfileobj')
@patch('tabula.file_util.urlopen')
@patch('tabula.file_util._create_request')
def test_localize_file_with_user_agent(self, mock_fun, mock_urlopen, mock_copyfileobj):
uri = "https://github.com/tabulapdf/tabula-java/raw/master/src/test/resources/technology/tabula/12s0324.pdf"
user_agent='Mozilla/5.0'
cm = MagicMock()
cm.getcode.return_value = 200
cm.read.return_value = b'contents'
cm.geturl.return_value = uri
mock_urlopen.return_value = cm
tabula.file_util.localize_file(uri, user_agent=user_agent)
mock_fun.assert_called_with(uri, user_agent)
mock_urlopen.assert_called()
mock_copyfileobj.assert_called()
if __name__ == '__main__':
unittest.main()
<commit_msg>fix: Remove assert_called for Python 3.5 compatibility<commit_after> | import unittest
import tabula
try:
FileNotFoundError
from unittest.mock import patch, MagicMock
from urllib.request import Request
except NameError:
FileNotFoundError = IOError
from mock import patch, MagicMock
from urllib2 import Request
class TestUtil(unittest.TestCase):
def test_environment_info(self):
self.assertEqual(tabula.environment_info(), None)
@patch('tabula.file_util.shutil.copyfileobj')
@patch('tabula.file_util.urlopen')
@patch('tabula.file_util._create_request')
def test_localize_file_with_user_agent(self, mock_fun, mock_urlopen, mock_copyfileobj):
uri = "https://github.com/tabulapdf/tabula-java/raw/master/src/test/resources/technology/tabula/12s0324.pdf"
user_agent='Mozilla/5.0'
cm = MagicMock()
cm.getcode.return_value = 200
cm.read.return_value = b'contents'
cm.geturl.return_value = uri
mock_urlopen.return_value = cm
tabula.file_util.localize_file(uri, user_agent=user_agent)
mock_fun.assert_called_with(uri, user_agent)
if __name__ == '__main__':
unittest.main()
| import unittest
import tabula
try:
FileNotFoundError
from unittest.mock import patch, MagicMock
from urllib.request import Request
except NameError:
FileNotFoundError = IOError
from mock import patch, MagicMock
from urllib2 import Request
class TestUtil(unittest.TestCase):
def test_environment_info(self):
self.assertEqual(tabula.environment_info(), None)
@patch('tabula.file_util.shutil.copyfileobj')
@patch('tabula.file_util.urlopen')
@patch('tabula.file_util._create_request')
def test_localize_file_with_user_agent(self, mock_fun, mock_urlopen, mock_copyfileobj):
uri = "https://github.com/tabulapdf/tabula-java/raw/master/src/test/resources/technology/tabula/12s0324.pdf"
user_agent='Mozilla/5.0'
cm = MagicMock()
cm.getcode.return_value = 200
cm.read.return_value = b'contents'
cm.geturl.return_value = uri
mock_urlopen.return_value = cm
tabula.file_util.localize_file(uri, user_agent=user_agent)
mock_fun.assert_called_with(uri, user_agent)
mock_urlopen.assert_called()
mock_copyfileobj.assert_called()
if __name__ == '__main__':
unittest.main()
fix: Remove assert_called for Python 3.5 compatibilityimport unittest
import tabula
try:
FileNotFoundError
from unittest.mock import patch, MagicMock
from urllib.request import Request
except NameError:
FileNotFoundError = IOError
from mock import patch, MagicMock
from urllib2 import Request
class TestUtil(unittest.TestCase):
def test_environment_info(self):
self.assertEqual(tabula.environment_info(), None)
@patch('tabula.file_util.shutil.copyfileobj')
@patch('tabula.file_util.urlopen')
@patch('tabula.file_util._create_request')
def test_localize_file_with_user_agent(self, mock_fun, mock_urlopen, mock_copyfileobj):
uri = "https://github.com/tabulapdf/tabula-java/raw/master/src/test/resources/technology/tabula/12s0324.pdf"
user_agent='Mozilla/5.0'
cm = MagicMock()
cm.getcode.return_value = 200
cm.read.return_value = b'contents'
cm.geturl.return_value = uri
mock_urlopen.return_value = cm
tabula.file_util.localize_file(uri, user_agent=user_agent)
mock_fun.assert_called_with(uri, user_agent)
if __name__ == '__main__':
unittest.main()
| <commit_before>import unittest
import tabula
try:
FileNotFoundError
from unittest.mock import patch, MagicMock
from urllib.request import Request
except NameError:
FileNotFoundError = IOError
from mock import patch, MagicMock
from urllib2 import Request
class TestUtil(unittest.TestCase):
def test_environment_info(self):
self.assertEqual(tabula.environment_info(), None)
@patch('tabula.file_util.shutil.copyfileobj')
@patch('tabula.file_util.urlopen')
@patch('tabula.file_util._create_request')
def test_localize_file_with_user_agent(self, mock_fun, mock_urlopen, mock_copyfileobj):
uri = "https://github.com/tabulapdf/tabula-java/raw/master/src/test/resources/technology/tabula/12s0324.pdf"
user_agent='Mozilla/5.0'
cm = MagicMock()
cm.getcode.return_value = 200
cm.read.return_value = b'contents'
cm.geturl.return_value = uri
mock_urlopen.return_value = cm
tabula.file_util.localize_file(uri, user_agent=user_agent)
mock_fun.assert_called_with(uri, user_agent)
mock_urlopen.assert_called()
mock_copyfileobj.assert_called()
if __name__ == '__main__':
unittest.main()
<commit_msg>fix: Remove assert_called for Python 3.5 compatibility<commit_after>import unittest
import tabula
try:
FileNotFoundError
from unittest.mock import patch, MagicMock
from urllib.request import Request
except NameError:
FileNotFoundError = IOError
from mock import patch, MagicMock
from urllib2 import Request
class TestUtil(unittest.TestCase):
def test_environment_info(self):
self.assertEqual(tabula.environment_info(), None)
@patch('tabula.file_util.shutil.copyfileobj')
@patch('tabula.file_util.urlopen')
@patch('tabula.file_util._create_request')
def test_localize_file_with_user_agent(self, mock_fun, mock_urlopen, mock_copyfileobj):
uri = "https://github.com/tabulapdf/tabula-java/raw/master/src/test/resources/technology/tabula/12s0324.pdf"
user_agent='Mozilla/5.0'
cm = MagicMock()
cm.getcode.return_value = 200
cm.read.return_value = b'contents'
cm.geturl.return_value = uri
mock_urlopen.return_value = cm
tabula.file_util.localize_file(uri, user_agent=user_agent)
mock_fun.assert_called_with(uri, user_agent)
if __name__ == '__main__':
unittest.main()
|
5d4210ceb34773dffce7d0bb27f38115bb8e1a9f | tests/testcases.py | tests/testcases.py | from __future__ import unicode_literals
from __future__ import absolute_import
from fig.packages.docker import Client
from fig.service import Service
from fig.cli.utils import docker_url
from . import unittest
class DockerClientTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.client = Client(docker_url())
cls.client.pull('ubuntu', tag='latest')
def setUp(self):
for c in self.client.containers(all=True):
if c['Names'] and 'figtest' in c['Names'][0]:
self.client.kill(c['Id'])
self.client.remove_container(c['Id'])
for i in self.client.images():
if 'figtest' in i['Tag']:
self.client.remove_image(i)
def create_service(self, name, **kwargs):
return Service(
project='figtest',
name=name,
client=self.client,
image="ubuntu",
command=["/bin/sleep", "300"],
**kwargs
)
| from __future__ import unicode_literals
from __future__ import absolute_import
from fig.packages.docker import Client
from fig.service import Service
from fig.cli.utils import docker_url
from . import unittest
class DockerClientTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.client = Client(docker_url())
cls.client.pull('ubuntu', tag='latest')
def setUp(self):
for c in self.client.containers(all=True):
if c['Names'] and 'figtest' in c['Names'][0]:
self.client.kill(c['Id'])
self.client.remove_container(c['Id'])
for i in self.client.images():
if isinstance(i['Tag'], basestring) and 'figtest' in i['Tag']:
self.client.remove_image(i)
def create_service(self, name, **kwargs):
return Service(
project='figtest',
name=name,
client=self.client,
image="ubuntu",
command=["/bin/sleep", "300"],
**kwargs
)
| Fix tests when there is an image with int tag | Fix tests when there is an image with int tag
| Python | apache-2.0 | mosquito/docker-compose,thaJeztah/docker.github.io,thaJeztah/docker.github.io,bsmr-docker/compose,gdevillele/docker.github.io,BSWANG/denverdino.github.io,jiekechoo/compose,jgrowl/compose,docker/docker.github.io,brunocascio/compose,ZJaffee/compose,gdevillele/docker.github.io,LuisBosquez/docker.github.io,kojiromike/compose,Katlean/fig,dockerhn/compose,ekristen/compose,alexisbellido/docker.github.io,charleswhchan/compose,unodba/compose,screwgoth/compose,unodba/compose,RobertNorthard/compose,rillig/docker.github.io,vlajos/compose,michael-k/docker-compose,joaofnfernandes/docker.github.io,shin-/compose,lukemarsden/compose,twitherspoon/compose,johnstep/docker.github.io,funkyfuture/docker-compose,aduermael/docker.github.io,bdwill/docker.github.io,viranch/compose,vlajos/compose,bdwill/docker.github.io,johnstep/docker.github.io,mark-adams/compose,mchasal/compose,mnuessler/compose,jgrowl/compose,Katlean/fig,bdwill/docker.github.io,alunduil/fig,shin-/docker.github.io,aduermael/docker.github.io,simonista/compose,au-phiware/compose,joeuo/docker.github.io,ralphtheninja/compose,londoncalling/docker.github.io,runcom/compose,genki/compose,jorgeLuizChaves/compose,nerro/compose,londoncalling/docker.github.io,ouziel-slama/compose,saada/compose,docker-zh/docker.github.io,prologic/compose,ouziel-slama/compose,j-fuentes/compose,bcicen/fig,screwgoth/compose,glogiotatidis/compose,docker/docker.github.io,dilgerma/compose,marcusmartins/compose,vdemeester/compose,jonaseck2/compose,ralphtheninja/compose,phiroict/docker,dbdd4us/compose,TheDataShed/compose,joaofnfernandes/docker.github.io,londoncalling/docker.github.io,ain/compose,amitsaha/compose,denverdino/denverdino.github.io,ionrock/compose,mdaue/compose,mnowster/compose,nerro/compose,iamluc/compose,artemkaint/compose,benhamill/compose,feelobot/compose,j-fuentes/compose,sebglazebrook/compose,au-phiware/compose,mohitsoni/compose,dockerhn/compose,DoubleMalt/compose,ChrisChinchilla/compose,mindaugasrukas/compose,jeanpralo/compose,tiry/compose,sebglazebrook/compose,albers/compose,shubheksha/docker.github.io,dnephin/compose,zhangspook/compose,joaofnfernandes/docker.github.io,denverdino/docker.github.io,Yelp/docker-compose,VinceBarresi/compose,shakamunyi/fig,bdwill/docker.github.io,anweiss/docker.github.io,jzwlqx/denverdino.github.io,viranch/compose,KalleDK/compose,ph-One/compose,bdwill/docker.github.io,VinceBarresi/compose,feelobot/compose,JimGalasyn/docker.github.io,joaofnfernandes/docker.github.io,thaJeztah/docker.github.io,cclauss/compose,jessekl/compose,thieman/compose,jiekechoo/compose,BSWANG/denverdino.github.io,talolard/compose,JimGalasyn/docker.github.io,anweiss/docker.github.io,heroku/fig,denverdino/docker.github.io,ph-One/compose,alunduil/fig,bbirand/compose,anweiss/docker.github.io,jessekl/compose,denverdino/denverdino.github.io,dopry/compose,rstacruz/compose,goloveychuk/compose,heroku/fig,philwrenn/compose,albers/compose,denverdino/docker.github.io,sanscontext/docker.github.io,charleswhchan/compose,xydinesh/compose,lmesz/compose,sanscontext/docker.github.io,rgbkrk/compose,bfirsh/fig,dnephin/compose,shubheksha/docker.github.io,lmesz/compose,shin-/docker.github.io,jrabbit/compose,Chouser/compose,tpounds/compose,MSakamaki/compose,swoopla/compose,hypriot/compose,andrewgee/compose,mdaue/compose,jzwlqx/denverdino.github.io,londoncalling/docker.github.io,phiroict/docker,shin-/docker.github.io,kojiromike/compose,xydinesh/compose,mrfuxi/compose,uvgroovy/compose,thaJeztah/compose,docker/docker.github.io,JimGalasyn/docker.github.io,denverdino/denverdino.github.io,aduermael/docker.github.io,iamluc/compose,uvgroovy/compose,rgbkrk/compose,menglingwei/denverdino.github.io,bcicen/fig,danix800/docker.github.io,browning/compose,qzio/compose,aanand/fig,michael-k/docker-compose,DoubleMalt/compose,shin-/docker.github.io,noironetworks/compose,prologic/compose,mbailey/compose,pspierce/compose,joeuo/docker.github.io,bobphill/compose,cclauss/compose,Dakno/compose,menglingwei/denverdino.github.io,abesto/fig,rstacruz/compose,hoogenm/compose,sanscontext/docker.github.io,BSWANG/denverdino.github.io,BSWANG/denverdino.github.io,bfirsh/fig,johnstep/docker.github.io,gdevillele/docker.github.io,simonista/compose,joeuo/docker.github.io,sanscontext/docker.github.io,tangkun75/compose,schmunk42/compose,ekristen/compose,thaJeztah/compose,denverdino/compose,johnstep/docker.github.io,artemkaint/compose,Dakno/compose,twitherspoon/compose,jonaseck2/compose,phiroict/docker,menglingwei/denverdino.github.io,ionrock/compose,bsmr-docker/compose,LuisBosquez/docker.github.io,alexandrev/compose,funkyfuture/docker-compose,aanand/fig,GM-Alex/compose,KevinGreene/compose,denverdino/docker.github.io,danix800/docker.github.io,swoopla/compose,denverdino/denverdino.github.io,nhumrich/compose,TheDataShed/compose,gdevillele/docker.github.io,kikkomep/compose,mosquito/docker-compose,mindaugasrukas/compose,nhumrich/compose,mnowster/compose,schmunk42/compose,denverdino/docker.github.io,GM-Alex/compose,tangkun75/compose,Chouser/compose,jrabbit/compose,MSakamaki/compose,genki/compose,mark-adams/compose,browning/compose,docker-zh/docker.github.io,cgvarela/compose,tiry/compose,dbdd4us/compose,shubheksha/docker.github.io,ain/compose,calou/compose,KevinGreene/compose,benhamill/compose,docker/docker.github.io,ggtools/compose,alexisbellido/docker.github.io,danix800/docker.github.io,heroku/fig,ZJaffee/compose,talolard/compose,JimGalasyn/docker.github.io,ggtools/compose,thaJeztah/docker.github.io,Yelp/docker-compose,shakamunyi/fig,docker-zh/docker.github.io,sanscontext/docker.github.io,noironetworks/compose,JimGalasyn/docker.github.io,rillig/docker.github.io,TomasTomecek/compose,thaJeztah/docker.github.io,joeuo/docker.github.io,sdurrheimer/compose,shin-/docker.github.io,vdemeester/compose,mchasal/compose,josephpage/compose,shubheksha/docker.github.io,tpounds/compose,hoogenm/compose,anweiss/docker.github.io,troy0820/docker.github.io,goloveychuk/compose,rillig/docker.github.io,calou/compose,qzio/compose,gtrdotmcs/compose,shubheksha/docker.github.io,philwrenn/compose,jzwlqx/denverdino.github.io,alexisbellido/docker.github.io,moxiegirl/compose,bbirand/compose,andrewgee/compose,marcusmartins/compose,shin-/compose,troy0820/docker.github.io,jorgeLuizChaves/compose,hypriot/compose,joeuo/docker.github.io,LuisBosquez/docker.github.io,menglingwei/denverdino.github.io,LuisBosquez/docker.github.io,dilgerma/compose,kikkomep/compose,phiroict/docker,menglingwei/denverdino.github.io,mnuessler/compose,bobphill/compose,troy0820/docker.github.io,gtrdotmcs/compose,gdevillele/docker.github.io,lukemarsden/compose,danix800/docker.github.io,cgvarela/compose,phiroict/docker,troy0820/docker.github.io,aduermael/docker.github.io,mbailey/compose,josephpage/compose,glogiotatidis/compose,anweiss/docker.github.io,LuisBosquez/docker.github.io,docker-zh/docker.github.io,runcom/compose,jzwlqx/denverdino.github.io,thieman/compose,mohitsoni/compose,d2bit/compose,joaofnfernandes/docker.github.io,jzwlqx/denverdino.github.io,TomasTomecek/compose,ChrisChinchilla/compose,alexisbellido/docker.github.io,denverdino/denverdino.github.io,saada/compose,alexisbellido/docker.github.io,bcicen/fig,BSWANG/denverdino.github.io,KalleDK/compose,johnstep/docker.github.io,docker/docker.github.io,abesto/fig,brunocascio/compose,zhangspook/compose,rillig/docker.github.io,RobertNorthard/compose,docker-zh/docker.github.io,moxiegirl/compose,amitsaha/compose,pspierce/compose,alexandrev/compose,d2bit/compose,londoncalling/docker.github.io,sdurrheimer/compose,denverdino/compose,mrfuxi/compose,dopry/compose,jeanpralo/compose | from __future__ import unicode_literals
from __future__ import absolute_import
from fig.packages.docker import Client
from fig.service import Service
from fig.cli.utils import docker_url
from . import unittest
class DockerClientTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.client = Client(docker_url())
cls.client.pull('ubuntu', tag='latest')
def setUp(self):
for c in self.client.containers(all=True):
if c['Names'] and 'figtest' in c['Names'][0]:
self.client.kill(c['Id'])
self.client.remove_container(c['Id'])
for i in self.client.images():
if 'figtest' in i['Tag']:
self.client.remove_image(i)
def create_service(self, name, **kwargs):
return Service(
project='figtest',
name=name,
client=self.client,
image="ubuntu",
command=["/bin/sleep", "300"],
**kwargs
)
Fix tests when there is an image with int tag | from __future__ import unicode_literals
from __future__ import absolute_import
from fig.packages.docker import Client
from fig.service import Service
from fig.cli.utils import docker_url
from . import unittest
class DockerClientTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.client = Client(docker_url())
cls.client.pull('ubuntu', tag='latest')
def setUp(self):
for c in self.client.containers(all=True):
if c['Names'] and 'figtest' in c['Names'][0]:
self.client.kill(c['Id'])
self.client.remove_container(c['Id'])
for i in self.client.images():
if isinstance(i['Tag'], basestring) and 'figtest' in i['Tag']:
self.client.remove_image(i)
def create_service(self, name, **kwargs):
return Service(
project='figtest',
name=name,
client=self.client,
image="ubuntu",
command=["/bin/sleep", "300"],
**kwargs
)
| <commit_before>from __future__ import unicode_literals
from __future__ import absolute_import
from fig.packages.docker import Client
from fig.service import Service
from fig.cli.utils import docker_url
from . import unittest
class DockerClientTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.client = Client(docker_url())
cls.client.pull('ubuntu', tag='latest')
def setUp(self):
for c in self.client.containers(all=True):
if c['Names'] and 'figtest' in c['Names'][0]:
self.client.kill(c['Id'])
self.client.remove_container(c['Id'])
for i in self.client.images():
if 'figtest' in i['Tag']:
self.client.remove_image(i)
def create_service(self, name, **kwargs):
return Service(
project='figtest',
name=name,
client=self.client,
image="ubuntu",
command=["/bin/sleep", "300"],
**kwargs
)
<commit_msg>Fix tests when there is an image with int tag<commit_after> | from __future__ import unicode_literals
from __future__ import absolute_import
from fig.packages.docker import Client
from fig.service import Service
from fig.cli.utils import docker_url
from . import unittest
class DockerClientTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.client = Client(docker_url())
cls.client.pull('ubuntu', tag='latest')
def setUp(self):
for c in self.client.containers(all=True):
if c['Names'] and 'figtest' in c['Names'][0]:
self.client.kill(c['Id'])
self.client.remove_container(c['Id'])
for i in self.client.images():
if isinstance(i['Tag'], basestring) and 'figtest' in i['Tag']:
self.client.remove_image(i)
def create_service(self, name, **kwargs):
return Service(
project='figtest',
name=name,
client=self.client,
image="ubuntu",
command=["/bin/sleep", "300"],
**kwargs
)
| from __future__ import unicode_literals
from __future__ import absolute_import
from fig.packages.docker import Client
from fig.service import Service
from fig.cli.utils import docker_url
from . import unittest
class DockerClientTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.client = Client(docker_url())
cls.client.pull('ubuntu', tag='latest')
def setUp(self):
for c in self.client.containers(all=True):
if c['Names'] and 'figtest' in c['Names'][0]:
self.client.kill(c['Id'])
self.client.remove_container(c['Id'])
for i in self.client.images():
if 'figtest' in i['Tag']:
self.client.remove_image(i)
def create_service(self, name, **kwargs):
return Service(
project='figtest',
name=name,
client=self.client,
image="ubuntu",
command=["/bin/sleep", "300"],
**kwargs
)
Fix tests when there is an image with int tagfrom __future__ import unicode_literals
from __future__ import absolute_import
from fig.packages.docker import Client
from fig.service import Service
from fig.cli.utils import docker_url
from . import unittest
class DockerClientTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.client = Client(docker_url())
cls.client.pull('ubuntu', tag='latest')
def setUp(self):
for c in self.client.containers(all=True):
if c['Names'] and 'figtest' in c['Names'][0]:
self.client.kill(c['Id'])
self.client.remove_container(c['Id'])
for i in self.client.images():
if isinstance(i['Tag'], basestring) and 'figtest' in i['Tag']:
self.client.remove_image(i)
def create_service(self, name, **kwargs):
return Service(
project='figtest',
name=name,
client=self.client,
image="ubuntu",
command=["/bin/sleep", "300"],
**kwargs
)
| <commit_before>from __future__ import unicode_literals
from __future__ import absolute_import
from fig.packages.docker import Client
from fig.service import Service
from fig.cli.utils import docker_url
from . import unittest
class DockerClientTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.client = Client(docker_url())
cls.client.pull('ubuntu', tag='latest')
def setUp(self):
for c in self.client.containers(all=True):
if c['Names'] and 'figtest' in c['Names'][0]:
self.client.kill(c['Id'])
self.client.remove_container(c['Id'])
for i in self.client.images():
if 'figtest' in i['Tag']:
self.client.remove_image(i)
def create_service(self, name, **kwargs):
return Service(
project='figtest',
name=name,
client=self.client,
image="ubuntu",
command=["/bin/sleep", "300"],
**kwargs
)
<commit_msg>Fix tests when there is an image with int tag<commit_after>from __future__ import unicode_literals
from __future__ import absolute_import
from fig.packages.docker import Client
from fig.service import Service
from fig.cli.utils import docker_url
from . import unittest
class DockerClientTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.client = Client(docker_url())
cls.client.pull('ubuntu', tag='latest')
def setUp(self):
for c in self.client.containers(all=True):
if c['Names'] and 'figtest' in c['Names'][0]:
self.client.kill(c['Id'])
self.client.remove_container(c['Id'])
for i in self.client.images():
if isinstance(i['Tag'], basestring) and 'figtest' in i['Tag']:
self.client.remove_image(i)
def create_service(self, name, **kwargs):
return Service(
project='figtest',
name=name,
client=self.client,
image="ubuntu",
command=["/bin/sleep", "300"],
**kwargs
)
|
8c97ffed1531315dd50639c40b0bccad0fc1ef2d | textual_runtime.py | textual_runtime.py | # Runtime for managing the interactive component of the game. Allows user to play the game
# through a text based interface.
from game import DiscState
class TextualRuntime:
def __init__(self, game):
self.game = game
self.state = {
"continue": True
}
def start(self):
while self.state["continue"]:
self.render()
self.eval(self.get_input())
def render(self):
str_repr = ["Current board state:\n"]
str_repr += [" %i " % col_index for col_index in range(self.game.grid.width)]
for row in self.game.grid:
row_repr = []
for disc_value in row:
if disc_value is DiscState.empty:
row_repr.append("| |")
elif disc_value is DiscState.red:
row_repr.append("|O|")
else: # disc_value is black
row_repr.append("|X|")
row_repr.append("\n")
str_repr += row_repr
print("".join(str_repr))
def get_input(self):
return input("--> ")
def eval(self, command):
tokens = command.split()
if len(tokens) == 1:
if tokens[0] == "quit":
self.state["continue"] = False
| # Runtime for managing the interactive component of the game. Allows user to play the game
# through a text based interface.
from game import DiscState
class TextualRuntime:
def __init__(self, game):
self.game = game
self.state = {
"continue": True
}
def start(self):
while self.state["continue"]:
self.render()
self.eval(self.get_input())
def render(self):
str_repr = ["Current board state:\n"]
str_repr += [" %i " % col_index for col_index in range(self.game.grid.width)] + ["\n"]
for row in self.game.grid:
row_repr = []
for disc_value in row:
if disc_value is DiscState.empty:
row_repr.append("| |")
elif disc_value is DiscState.red:
row_repr.append("|O|")
else: # disc_value is black
row_repr.append("|X|")
row_repr.append("\n")
str_repr += row_repr
print("".join(str_repr))
def get_input(self):
return input("--> ")
def eval(self, command):
tokens = command.split()
if len(tokens) == 1:
if tokens[0] == "quit":
self.state["continue"] = False
elif tokens[0].isdigit():
col_index = int(tokens[0])
new_point = self.game.try_turn(self.game.current_player, col_index)
| Add ability to drop discs on slots | Add ability to drop discs on slots
| Python | mit | misterwilliam/connect-four | # Runtime for managing the interactive component of the game. Allows user to play the game
# through a text based interface.
from game import DiscState
class TextualRuntime:
def __init__(self, game):
self.game = game
self.state = {
"continue": True
}
def start(self):
while self.state["continue"]:
self.render()
self.eval(self.get_input())
def render(self):
str_repr = ["Current board state:\n"]
str_repr += [" %i " % col_index for col_index in range(self.game.grid.width)]
for row in self.game.grid:
row_repr = []
for disc_value in row:
if disc_value is DiscState.empty:
row_repr.append("| |")
elif disc_value is DiscState.red:
row_repr.append("|O|")
else: # disc_value is black
row_repr.append("|X|")
row_repr.append("\n")
str_repr += row_repr
print("".join(str_repr))
def get_input(self):
return input("--> ")
def eval(self, command):
tokens = command.split()
if len(tokens) == 1:
if tokens[0] == "quit":
self.state["continue"] = False
Add ability to drop discs on slots | # Runtime for managing the interactive component of the game. Allows user to play the game
# through a text based interface.
from game import DiscState
class TextualRuntime:
def __init__(self, game):
self.game = game
self.state = {
"continue": True
}
def start(self):
while self.state["continue"]:
self.render()
self.eval(self.get_input())
def render(self):
str_repr = ["Current board state:\n"]
str_repr += [" %i " % col_index for col_index in range(self.game.grid.width)] + ["\n"]
for row in self.game.grid:
row_repr = []
for disc_value in row:
if disc_value is DiscState.empty:
row_repr.append("| |")
elif disc_value is DiscState.red:
row_repr.append("|O|")
else: # disc_value is black
row_repr.append("|X|")
row_repr.append("\n")
str_repr += row_repr
print("".join(str_repr))
def get_input(self):
return input("--> ")
def eval(self, command):
tokens = command.split()
if len(tokens) == 1:
if tokens[0] == "quit":
self.state["continue"] = False
elif tokens[0].isdigit():
col_index = int(tokens[0])
new_point = self.game.try_turn(self.game.current_player, col_index)
| <commit_before># Runtime for managing the interactive component of the game. Allows user to play the game
# through a text based interface.
from game import DiscState
class TextualRuntime:
def __init__(self, game):
self.game = game
self.state = {
"continue": True
}
def start(self):
while self.state["continue"]:
self.render()
self.eval(self.get_input())
def render(self):
str_repr = ["Current board state:\n"]
str_repr += [" %i " % col_index for col_index in range(self.game.grid.width)]
for row in self.game.grid:
row_repr = []
for disc_value in row:
if disc_value is DiscState.empty:
row_repr.append("| |")
elif disc_value is DiscState.red:
row_repr.append("|O|")
else: # disc_value is black
row_repr.append("|X|")
row_repr.append("\n")
str_repr += row_repr
print("".join(str_repr))
def get_input(self):
return input("--> ")
def eval(self, command):
tokens = command.split()
if len(tokens) == 1:
if tokens[0] == "quit":
self.state["continue"] = False
<commit_msg>Add ability to drop discs on slots<commit_after> | # Runtime for managing the interactive component of the game. Allows user to play the game
# through a text based interface.
from game import DiscState
class TextualRuntime:
def __init__(self, game):
self.game = game
self.state = {
"continue": True
}
def start(self):
while self.state["continue"]:
self.render()
self.eval(self.get_input())
def render(self):
str_repr = ["Current board state:\n"]
str_repr += [" %i " % col_index for col_index in range(self.game.grid.width)] + ["\n"]
for row in self.game.grid:
row_repr = []
for disc_value in row:
if disc_value is DiscState.empty:
row_repr.append("| |")
elif disc_value is DiscState.red:
row_repr.append("|O|")
else: # disc_value is black
row_repr.append("|X|")
row_repr.append("\n")
str_repr += row_repr
print("".join(str_repr))
def get_input(self):
return input("--> ")
def eval(self, command):
tokens = command.split()
if len(tokens) == 1:
if tokens[0] == "quit":
self.state["continue"] = False
elif tokens[0].isdigit():
col_index = int(tokens[0])
new_point = self.game.try_turn(self.game.current_player, col_index)
| # Runtime for managing the interactive component of the game. Allows user to play the game
# through a text based interface.
from game import DiscState
class TextualRuntime:
def __init__(self, game):
self.game = game
self.state = {
"continue": True
}
def start(self):
while self.state["continue"]:
self.render()
self.eval(self.get_input())
def render(self):
str_repr = ["Current board state:\n"]
str_repr += [" %i " % col_index for col_index in range(self.game.grid.width)]
for row in self.game.grid:
row_repr = []
for disc_value in row:
if disc_value is DiscState.empty:
row_repr.append("| |")
elif disc_value is DiscState.red:
row_repr.append("|O|")
else: # disc_value is black
row_repr.append("|X|")
row_repr.append("\n")
str_repr += row_repr
print("".join(str_repr))
def get_input(self):
return input("--> ")
def eval(self, command):
tokens = command.split()
if len(tokens) == 1:
if tokens[0] == "quit":
self.state["continue"] = False
Add ability to drop discs on slots# Runtime for managing the interactive component of the game. Allows user to play the game
# through a text based interface.
from game import DiscState
class TextualRuntime:
def __init__(self, game):
self.game = game
self.state = {
"continue": True
}
def start(self):
while self.state["continue"]:
self.render()
self.eval(self.get_input())
def render(self):
str_repr = ["Current board state:\n"]
str_repr += [" %i " % col_index for col_index in range(self.game.grid.width)] + ["\n"]
for row in self.game.grid:
row_repr = []
for disc_value in row:
if disc_value is DiscState.empty:
row_repr.append("| |")
elif disc_value is DiscState.red:
row_repr.append("|O|")
else: # disc_value is black
row_repr.append("|X|")
row_repr.append("\n")
str_repr += row_repr
print("".join(str_repr))
def get_input(self):
return input("--> ")
def eval(self, command):
tokens = command.split()
if len(tokens) == 1:
if tokens[0] == "quit":
self.state["continue"] = False
elif tokens[0].isdigit():
col_index = int(tokens[0])
new_point = self.game.try_turn(self.game.current_player, col_index)
| <commit_before># Runtime for managing the interactive component of the game. Allows user to play the game
# through a text based interface.
from game import DiscState
class TextualRuntime:
def __init__(self, game):
self.game = game
self.state = {
"continue": True
}
def start(self):
while self.state["continue"]:
self.render()
self.eval(self.get_input())
def render(self):
str_repr = ["Current board state:\n"]
str_repr += [" %i " % col_index for col_index in range(self.game.grid.width)]
for row in self.game.grid:
row_repr = []
for disc_value in row:
if disc_value is DiscState.empty:
row_repr.append("| |")
elif disc_value is DiscState.red:
row_repr.append("|O|")
else: # disc_value is black
row_repr.append("|X|")
row_repr.append("\n")
str_repr += row_repr
print("".join(str_repr))
def get_input(self):
return input("--> ")
def eval(self, command):
tokens = command.split()
if len(tokens) == 1:
if tokens[0] == "quit":
self.state["continue"] = False
<commit_msg>Add ability to drop discs on slots<commit_after># Runtime for managing the interactive component of the game. Allows user to play the game
# through a text based interface.
from game import DiscState
class TextualRuntime:
def __init__(self, game):
self.game = game
self.state = {
"continue": True
}
def start(self):
while self.state["continue"]:
self.render()
self.eval(self.get_input())
def render(self):
str_repr = ["Current board state:\n"]
str_repr += [" %i " % col_index for col_index in range(self.game.grid.width)] + ["\n"]
for row in self.game.grid:
row_repr = []
for disc_value in row:
if disc_value is DiscState.empty:
row_repr.append("| |")
elif disc_value is DiscState.red:
row_repr.append("|O|")
else: # disc_value is black
row_repr.append("|X|")
row_repr.append("\n")
str_repr += row_repr
print("".join(str_repr))
def get_input(self):
return input("--> ")
def eval(self, command):
tokens = command.split()
if len(tokens) == 1:
if tokens[0] == "quit":
self.state["continue"] = False
elif tokens[0].isdigit():
col_index = int(tokens[0])
new_point = self.game.try_turn(self.game.current_player, col_index)
|
46c7798003ce2eef60440860cc305372bd73a57d | salt/returners/cassandra_return.py | salt/returners/cassandra_return.py | '''
Return data to a Cassandra ColumFamily
Here's an example Keyspace/ColumnFamily setup that works with this
returner::
create keyspace salt;
use salt;
create column family returns
with key_validation_class='UTF8Type'
and comparator='UTF8Type'
and default_validation_class='UTF8Type';
'''
import logging
import pycassa
log = logging.getLogger(__name__)
__opts__ = {'cassandra.servers': ['localhost:9160'],
'cassandra.keyspace': 'salt',
'cassandra.column_family': 'returns',
'cassandra.consistency_level': 'ONE'}
def returner(ret):
'''
Return data to a Cassandra ColumnFamily
'''
consistency_level = getattr(pycassa.ConsistencyLevel,
__opts__['cassandra.consistency_level'])
pool = pycassa.ConnectionPool(__opts__['cassandra.keyspace'],
__opts__['cassandra.servers'])
cf = pycassa.ColumnFamily(pool, __opts__['cassandra.column_family'],
write_consistency_level=consistency_level)
columns = {'fun': ret['fun'],
'id': ret['id']}
if isinstance(ret['return'], dict):
for key, value in ret['return'].iteritems():
columns['return.%s' % (key,)] = str(value)
else:
columns['return'] = str(ret['return'])
log.debug(back)
cf.insert(ret['jid'], columns)
| '''
Return data to a Cassandra ColumFamily
Here's an example Keyspace/ColumnFamily setup that works with this
returner::
create keyspace salt;
use salt;
create column family returns
with key_validation_class='UTF8Type'
and comparator='UTF8Type'
and default_validation_class='UTF8Type';
'''
import logging
import pycassa
log = logging.getLogger(__name__)
__opts__ = {'cassandra.servers': ['localhost:9160'],
'cassandra.keyspace': 'salt',
'cassandra.column_family': 'returns',
'cassandra.consistency_level': 'ONE'}
def returner(ret):
'''
Return data to a Cassandra ColumnFamily
'''
consistency_level = getattr(pycassa.ConsistencyLevel,
__opts__['cassandra.consistency_level'])
pool = pycassa.ConnectionPool(__opts__['cassandra.keyspace'],
__opts__['cassandra.servers'])
cf = pycassa.ColumnFamily(pool, __opts__['cassandra.column_family'],
write_consistency_level=consistency_level)
columns = {'fun': ret['fun'],
'id': ret['id']}
if isinstance(ret['return'], dict):
for key, value in ret['return'].iteritems():
columns['return.%s' % (key,)] = str(value)
else:
columns['return'] = str(ret['return'])
log.debug(columns)
cf.insert(ret['jid'], columns)
| Debug statement used the wrong variable. | Debug statement used the wrong variable.
| Python | apache-2.0 | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | '''
Return data to a Cassandra ColumFamily
Here's an example Keyspace/ColumnFamily setup that works with this
returner::
create keyspace salt;
use salt;
create column family returns
with key_validation_class='UTF8Type'
and comparator='UTF8Type'
and default_validation_class='UTF8Type';
'''
import logging
import pycassa
log = logging.getLogger(__name__)
__opts__ = {'cassandra.servers': ['localhost:9160'],
'cassandra.keyspace': 'salt',
'cassandra.column_family': 'returns',
'cassandra.consistency_level': 'ONE'}
def returner(ret):
'''
Return data to a Cassandra ColumnFamily
'''
consistency_level = getattr(pycassa.ConsistencyLevel,
__opts__['cassandra.consistency_level'])
pool = pycassa.ConnectionPool(__opts__['cassandra.keyspace'],
__opts__['cassandra.servers'])
cf = pycassa.ColumnFamily(pool, __opts__['cassandra.column_family'],
write_consistency_level=consistency_level)
columns = {'fun': ret['fun'],
'id': ret['id']}
if isinstance(ret['return'], dict):
for key, value in ret['return'].iteritems():
columns['return.%s' % (key,)] = str(value)
else:
columns['return'] = str(ret['return'])
log.debug(back)
cf.insert(ret['jid'], columns)
Debug statement used the wrong variable. | '''
Return data to a Cassandra ColumFamily
Here's an example Keyspace/ColumnFamily setup that works with this
returner::
create keyspace salt;
use salt;
create column family returns
with key_validation_class='UTF8Type'
and comparator='UTF8Type'
and default_validation_class='UTF8Type';
'''
import logging
import pycassa
log = logging.getLogger(__name__)
__opts__ = {'cassandra.servers': ['localhost:9160'],
'cassandra.keyspace': 'salt',
'cassandra.column_family': 'returns',
'cassandra.consistency_level': 'ONE'}
def returner(ret):
'''
Return data to a Cassandra ColumnFamily
'''
consistency_level = getattr(pycassa.ConsistencyLevel,
__opts__['cassandra.consistency_level'])
pool = pycassa.ConnectionPool(__opts__['cassandra.keyspace'],
__opts__['cassandra.servers'])
cf = pycassa.ColumnFamily(pool, __opts__['cassandra.column_family'],
write_consistency_level=consistency_level)
columns = {'fun': ret['fun'],
'id': ret['id']}
if isinstance(ret['return'], dict):
for key, value in ret['return'].iteritems():
columns['return.%s' % (key,)] = str(value)
else:
columns['return'] = str(ret['return'])
log.debug(columns)
cf.insert(ret['jid'], columns)
| <commit_before>'''
Return data to a Cassandra ColumFamily
Here's an example Keyspace/ColumnFamily setup that works with this
returner::
create keyspace salt;
use salt;
create column family returns
with key_validation_class='UTF8Type'
and comparator='UTF8Type'
and default_validation_class='UTF8Type';
'''
import logging
import pycassa
log = logging.getLogger(__name__)
__opts__ = {'cassandra.servers': ['localhost:9160'],
'cassandra.keyspace': 'salt',
'cassandra.column_family': 'returns',
'cassandra.consistency_level': 'ONE'}
def returner(ret):
'''
Return data to a Cassandra ColumnFamily
'''
consistency_level = getattr(pycassa.ConsistencyLevel,
__opts__['cassandra.consistency_level'])
pool = pycassa.ConnectionPool(__opts__['cassandra.keyspace'],
__opts__['cassandra.servers'])
cf = pycassa.ColumnFamily(pool, __opts__['cassandra.column_family'],
write_consistency_level=consistency_level)
columns = {'fun': ret['fun'],
'id': ret['id']}
if isinstance(ret['return'], dict):
for key, value in ret['return'].iteritems():
columns['return.%s' % (key,)] = str(value)
else:
columns['return'] = str(ret['return'])
log.debug(back)
cf.insert(ret['jid'], columns)
<commit_msg>Debug statement used the wrong variable.<commit_after> | '''
Return data to a Cassandra ColumFamily
Here's an example Keyspace/ColumnFamily setup that works with this
returner::
create keyspace salt;
use salt;
create column family returns
with key_validation_class='UTF8Type'
and comparator='UTF8Type'
and default_validation_class='UTF8Type';
'''
import logging
import pycassa
log = logging.getLogger(__name__)
__opts__ = {'cassandra.servers': ['localhost:9160'],
'cassandra.keyspace': 'salt',
'cassandra.column_family': 'returns',
'cassandra.consistency_level': 'ONE'}
def returner(ret):
'''
Return data to a Cassandra ColumnFamily
'''
consistency_level = getattr(pycassa.ConsistencyLevel,
__opts__['cassandra.consistency_level'])
pool = pycassa.ConnectionPool(__opts__['cassandra.keyspace'],
__opts__['cassandra.servers'])
cf = pycassa.ColumnFamily(pool, __opts__['cassandra.column_family'],
write_consistency_level=consistency_level)
columns = {'fun': ret['fun'],
'id': ret['id']}
if isinstance(ret['return'], dict):
for key, value in ret['return'].iteritems():
columns['return.%s' % (key,)] = str(value)
else:
columns['return'] = str(ret['return'])
log.debug(columns)
cf.insert(ret['jid'], columns)
| '''
Return data to a Cassandra ColumFamily
Here's an example Keyspace/ColumnFamily setup that works with this
returner::
create keyspace salt;
use salt;
create column family returns
with key_validation_class='UTF8Type'
and comparator='UTF8Type'
and default_validation_class='UTF8Type';
'''
import logging
import pycassa
log = logging.getLogger(__name__)
__opts__ = {'cassandra.servers': ['localhost:9160'],
'cassandra.keyspace': 'salt',
'cassandra.column_family': 'returns',
'cassandra.consistency_level': 'ONE'}
def returner(ret):
'''
Return data to a Cassandra ColumnFamily
'''
consistency_level = getattr(pycassa.ConsistencyLevel,
__opts__['cassandra.consistency_level'])
pool = pycassa.ConnectionPool(__opts__['cassandra.keyspace'],
__opts__['cassandra.servers'])
cf = pycassa.ColumnFamily(pool, __opts__['cassandra.column_family'],
write_consistency_level=consistency_level)
columns = {'fun': ret['fun'],
'id': ret['id']}
if isinstance(ret['return'], dict):
for key, value in ret['return'].iteritems():
columns['return.%s' % (key,)] = str(value)
else:
columns['return'] = str(ret['return'])
log.debug(back)
cf.insert(ret['jid'], columns)
Debug statement used the wrong variable.'''
Return data to a Cassandra ColumFamily
Here's an example Keyspace/ColumnFamily setup that works with this
returner::
create keyspace salt;
use salt;
create column family returns
with key_validation_class='UTF8Type'
and comparator='UTF8Type'
and default_validation_class='UTF8Type';
'''
import logging
import pycassa
log = logging.getLogger(__name__)
__opts__ = {'cassandra.servers': ['localhost:9160'],
'cassandra.keyspace': 'salt',
'cassandra.column_family': 'returns',
'cassandra.consistency_level': 'ONE'}
def returner(ret):
'''
Return data to a Cassandra ColumnFamily
'''
consistency_level = getattr(pycassa.ConsistencyLevel,
__opts__['cassandra.consistency_level'])
pool = pycassa.ConnectionPool(__opts__['cassandra.keyspace'],
__opts__['cassandra.servers'])
cf = pycassa.ColumnFamily(pool, __opts__['cassandra.column_family'],
write_consistency_level=consistency_level)
columns = {'fun': ret['fun'],
'id': ret['id']}
if isinstance(ret['return'], dict):
for key, value in ret['return'].iteritems():
columns['return.%s' % (key,)] = str(value)
else:
columns['return'] = str(ret['return'])
log.debug(columns)
cf.insert(ret['jid'], columns)
| <commit_before>'''
Return data to a Cassandra ColumFamily
Here's an example Keyspace/ColumnFamily setup that works with this
returner::
create keyspace salt;
use salt;
create column family returns
with key_validation_class='UTF8Type'
and comparator='UTF8Type'
and default_validation_class='UTF8Type';
'''
import logging
import pycassa
log = logging.getLogger(__name__)
__opts__ = {'cassandra.servers': ['localhost:9160'],
'cassandra.keyspace': 'salt',
'cassandra.column_family': 'returns',
'cassandra.consistency_level': 'ONE'}
def returner(ret):
'''
Return data to a Cassandra ColumnFamily
'''
consistency_level = getattr(pycassa.ConsistencyLevel,
__opts__['cassandra.consistency_level'])
pool = pycassa.ConnectionPool(__opts__['cassandra.keyspace'],
__opts__['cassandra.servers'])
cf = pycassa.ColumnFamily(pool, __opts__['cassandra.column_family'],
write_consistency_level=consistency_level)
columns = {'fun': ret['fun'],
'id': ret['id']}
if isinstance(ret['return'], dict):
for key, value in ret['return'].iteritems():
columns['return.%s' % (key,)] = str(value)
else:
columns['return'] = str(ret['return'])
log.debug(back)
cf.insert(ret['jid'], columns)
<commit_msg>Debug statement used the wrong variable.<commit_after>'''
Return data to a Cassandra ColumFamily
Here's an example Keyspace/ColumnFamily setup that works with this
returner::
create keyspace salt;
use salt;
create column family returns
with key_validation_class='UTF8Type'
and comparator='UTF8Type'
and default_validation_class='UTF8Type';
'''
import logging
import pycassa
log = logging.getLogger(__name__)
__opts__ = {'cassandra.servers': ['localhost:9160'],
'cassandra.keyspace': 'salt',
'cassandra.column_family': 'returns',
'cassandra.consistency_level': 'ONE'}
def returner(ret):
'''
Return data to a Cassandra ColumnFamily
'''
consistency_level = getattr(pycassa.ConsistencyLevel,
__opts__['cassandra.consistency_level'])
pool = pycassa.ConnectionPool(__opts__['cassandra.keyspace'],
__opts__['cassandra.servers'])
cf = pycassa.ColumnFamily(pool, __opts__['cassandra.column_family'],
write_consistency_level=consistency_level)
columns = {'fun': ret['fun'],
'id': ret['id']}
if isinstance(ret['return'], dict):
for key, value in ret['return'].iteritems():
columns['return.%s' % (key,)] = str(value)
else:
columns['return'] = str(ret['return'])
log.debug(columns)
cf.insert(ret['jid'], columns)
|
566739e88098eb40da26bd0930ac2d65ffdb999c | src/nyc_trees/apps/core/helpers.py | src/nyc_trees/apps/core/helpers.py |
def user_is_census_admin(user):
return user.is_authenticated() and user.is_census_admin
def user_is_group_admin(user, group):
return user.is_authenticated() and (user.is_census_admin or
group.admin == user)
def user_has_online_training(user):
return user.is_authenticated() and user.online_training_complete
def user_has_field_training(user):
return user.is_authenticated() and user.field_training_complete
def user_is_individual_mapper(user):
return user.is_authenticated() and user.individual_mapper
| # -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from apps.users.models import TrustedMapper
def user_is_census_admin(user):
return user.is_authenticated() and user.is_census_admin
def user_is_group_admin(user, group):
return user.is_authenticated() and (user.is_census_admin or
group.admin == user)
def user_has_online_training(user):
return user.is_authenticated() and user.online_training_complete
def user_has_field_training(user):
return user.is_authenticated() and user.field_training_complete
def user_is_individual_mapper(user):
return user.is_authenticated() and user.individual_mapper
def user_is_trusted_mapper(user, group):
return user.is_authenticated() and \
TrustedMapper.objects.filter(group=group,
user=user,
is_approved=True).exists()
| Hide "Request Individual Mapper Status" button if approved | Hide "Request Individual Mapper Status" button if approved
There's no point to showing this button once you have been approved as
an individual mapper for this group.
| Python | agpl-3.0 | RickMohr/nyc-trees,kdeloach/nyc-trees,azavea/nyc-trees,maurizi/nyc-trees,kdeloach/nyc-trees,RickMohr/nyc-trees,kdeloach/nyc-trees,azavea/nyc-trees,azavea/nyc-trees,kdeloach/nyc-trees,maurizi/nyc-trees,RickMohr/nyc-trees,maurizi/nyc-trees,RickMohr/nyc-trees,kdeloach/nyc-trees,azavea/nyc-trees,azavea/nyc-trees,maurizi/nyc-trees |
def user_is_census_admin(user):
return user.is_authenticated() and user.is_census_admin
def user_is_group_admin(user, group):
return user.is_authenticated() and (user.is_census_admin or
group.admin == user)
def user_has_online_training(user):
return user.is_authenticated() and user.online_training_complete
def user_has_field_training(user):
return user.is_authenticated() and user.field_training_complete
def user_is_individual_mapper(user):
return user.is_authenticated() and user.individual_mapper
Hide "Request Individual Mapper Status" button if approved
There's no point to showing this button once you have been approved as
an individual mapper for this group. | # -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from apps.users.models import TrustedMapper
def user_is_census_admin(user):
return user.is_authenticated() and user.is_census_admin
def user_is_group_admin(user, group):
return user.is_authenticated() and (user.is_census_admin or
group.admin == user)
def user_has_online_training(user):
return user.is_authenticated() and user.online_training_complete
def user_has_field_training(user):
return user.is_authenticated() and user.field_training_complete
def user_is_individual_mapper(user):
return user.is_authenticated() and user.individual_mapper
def user_is_trusted_mapper(user, group):
return user.is_authenticated() and \
TrustedMapper.objects.filter(group=group,
user=user,
is_approved=True).exists()
| <commit_before>
def user_is_census_admin(user):
return user.is_authenticated() and user.is_census_admin
def user_is_group_admin(user, group):
return user.is_authenticated() and (user.is_census_admin or
group.admin == user)
def user_has_online_training(user):
return user.is_authenticated() and user.online_training_complete
def user_has_field_training(user):
return user.is_authenticated() and user.field_training_complete
def user_is_individual_mapper(user):
return user.is_authenticated() and user.individual_mapper
<commit_msg>Hide "Request Individual Mapper Status" button if approved
There's no point to showing this button once you have been approved as
an individual mapper for this group.<commit_after> | # -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from apps.users.models import TrustedMapper
def user_is_census_admin(user):
return user.is_authenticated() and user.is_census_admin
def user_is_group_admin(user, group):
return user.is_authenticated() and (user.is_census_admin or
group.admin == user)
def user_has_online_training(user):
return user.is_authenticated() and user.online_training_complete
def user_has_field_training(user):
return user.is_authenticated() and user.field_training_complete
def user_is_individual_mapper(user):
return user.is_authenticated() and user.individual_mapper
def user_is_trusted_mapper(user, group):
return user.is_authenticated() and \
TrustedMapper.objects.filter(group=group,
user=user,
is_approved=True).exists()
|
def user_is_census_admin(user):
return user.is_authenticated() and user.is_census_admin
def user_is_group_admin(user, group):
return user.is_authenticated() and (user.is_census_admin or
group.admin == user)
def user_has_online_training(user):
return user.is_authenticated() and user.online_training_complete
def user_has_field_training(user):
return user.is_authenticated() and user.field_training_complete
def user_is_individual_mapper(user):
return user.is_authenticated() and user.individual_mapper
Hide "Request Individual Mapper Status" button if approved
There's no point to showing this button once you have been approved as
an individual mapper for this group.# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from apps.users.models import TrustedMapper
def user_is_census_admin(user):
return user.is_authenticated() and user.is_census_admin
def user_is_group_admin(user, group):
return user.is_authenticated() and (user.is_census_admin or
group.admin == user)
def user_has_online_training(user):
return user.is_authenticated() and user.online_training_complete
def user_has_field_training(user):
return user.is_authenticated() and user.field_training_complete
def user_is_individual_mapper(user):
return user.is_authenticated() and user.individual_mapper
def user_is_trusted_mapper(user, group):
return user.is_authenticated() and \
TrustedMapper.objects.filter(group=group,
user=user,
is_approved=True).exists()
| <commit_before>
def user_is_census_admin(user):
return user.is_authenticated() and user.is_census_admin
def user_is_group_admin(user, group):
return user.is_authenticated() and (user.is_census_admin or
group.admin == user)
def user_has_online_training(user):
return user.is_authenticated() and user.online_training_complete
def user_has_field_training(user):
return user.is_authenticated() and user.field_training_complete
def user_is_individual_mapper(user):
return user.is_authenticated() and user.individual_mapper
<commit_msg>Hide "Request Individual Mapper Status" button if approved
There's no point to showing this button once you have been approved as
an individual mapper for this group.<commit_after># -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from apps.users.models import TrustedMapper
def user_is_census_admin(user):
return user.is_authenticated() and user.is_census_admin
def user_is_group_admin(user, group):
return user.is_authenticated() and (user.is_census_admin or
group.admin == user)
def user_has_online_training(user):
return user.is_authenticated() and user.online_training_complete
def user_has_field_training(user):
return user.is_authenticated() and user.field_training_complete
def user_is_individual_mapper(user):
return user.is_authenticated() and user.individual_mapper
def user_is_trusted_mapper(user, group):
return user.is_authenticated() and \
TrustedMapper.objects.filter(group=group,
user=user,
is_approved=True).exists()
|
0e2635c01205c3359c3eabf7369453c3886c304a | photo/qt/overviewWindow.py | photo/qt/overviewWindow.py | """An overview window showing thumbnails of the image set.
"""
from __future__ import division
import math
from PySide import QtCore, QtGui
class ThumbnailWidget(QtGui.QLabel):
def __init__(self, image, scale):
super(ThumbnailWidget, self).__init__()
pixmap = image.getPixmap()
size = scale * pixmap.size()
pixmap = pixmap.scaled(size)
self.setPixmap(pixmap)
class OverviewWindow(QtGui.QMainWindow):
def __init__(self, imageViewer):
super(OverviewWindow, self).__init__()
self.imageViewer = imageViewer
self.numcolumns = 4
self.scaleFactor = self.imageViewer.scaleFactor / 5.0
self.setWindowTitle("Overview")
self.mainLayout = QtGui.QGridLayout()
self._populate()
centralWidget = QtGui.QWidget()
centralWidget.setLayout(self.mainLayout)
scrollArea = QtGui.QScrollArea()
scrollArea.setWidget(centralWidget)
scrollArea.setAlignment(QtCore.Qt.AlignCenter)
self.setCentralWidget(scrollArea)
def _populate(self):
"""Populate the mainLayout with thumbnail images.
"""
# FIXME: by now, implement only the initial setup of the image
# list. Must also support updating the list after changes in
# imageViewer.selection.
images = self.imageViewer.selection
ncol = self.numcolumns
c = 0
for i in images:
thumb = ThumbnailWidget(i, self.scaleFactor)
self.mainLayout.addWidget(thumb, c // ncol, c % ncol)
c += 1
| """An overview window showing thumbnails of the image set.
"""
from __future__ import division
import math
from PySide import QtCore, QtGui
class ThumbnailWidget(QtGui.QLabel):
ThumbnailSize = QtCore.QSize(128, 128)
def __init__(self, image):
super(ThumbnailWidget, self).__init__()
pixmap = image.getPixmap()
pixmap = pixmap.scaled(self.ThumbnailSize, QtCore.Qt.KeepAspectRatio)
self.setPixmap(pixmap)
class OverviewWindow(QtGui.QMainWindow):
def __init__(self, imageViewer):
super(OverviewWindow, self).__init__()
self.imageViewer = imageViewer
self.numcolumns = 4
self.setWindowTitle("Overview")
self.mainLayout = QtGui.QGridLayout()
self._populate()
centralWidget = QtGui.QWidget()
centralWidget.setLayout(self.mainLayout)
scrollArea = QtGui.QScrollArea()
scrollArea.setWidget(centralWidget)
scrollArea.setAlignment(QtCore.Qt.AlignCenter)
self.setCentralWidget(scrollArea)
def _populate(self):
"""Populate the mainLayout with thumbnail images.
"""
# FIXME: by now, implement only the initial setup of the image
# list. Must also support updating the list after changes in
# imageViewer.selection.
images = self.imageViewer.selection
ncol = self.numcolumns
c = 0
for i in images:
thumb = ThumbnailWidget(i)
self.mainLayout.addWidget(thumb, c // ncol, c % ncol)
c += 1
| Use a fixed thumbnail size rather then a relative scale. | Use a fixed thumbnail size rather then a relative scale.
| Python | apache-2.0 | RKrahl/photo-tools | """An overview window showing thumbnails of the image set.
"""
from __future__ import division
import math
from PySide import QtCore, QtGui
class ThumbnailWidget(QtGui.QLabel):
def __init__(self, image, scale):
super(ThumbnailWidget, self).__init__()
pixmap = image.getPixmap()
size = scale * pixmap.size()
pixmap = pixmap.scaled(size)
self.setPixmap(pixmap)
class OverviewWindow(QtGui.QMainWindow):
def __init__(self, imageViewer):
super(OverviewWindow, self).__init__()
self.imageViewer = imageViewer
self.numcolumns = 4
self.scaleFactor = self.imageViewer.scaleFactor / 5.0
self.setWindowTitle("Overview")
self.mainLayout = QtGui.QGridLayout()
self._populate()
centralWidget = QtGui.QWidget()
centralWidget.setLayout(self.mainLayout)
scrollArea = QtGui.QScrollArea()
scrollArea.setWidget(centralWidget)
scrollArea.setAlignment(QtCore.Qt.AlignCenter)
self.setCentralWidget(scrollArea)
def _populate(self):
"""Populate the mainLayout with thumbnail images.
"""
# FIXME: by now, implement only the initial setup of the image
# list. Must also support updating the list after changes in
# imageViewer.selection.
images = self.imageViewer.selection
ncol = self.numcolumns
c = 0
for i in images:
thumb = ThumbnailWidget(i, self.scaleFactor)
self.mainLayout.addWidget(thumb, c // ncol, c % ncol)
c += 1
Use a fixed thumbnail size rather then a relative scale. | """An overview window showing thumbnails of the image set.
"""
from __future__ import division
import math
from PySide import QtCore, QtGui
class ThumbnailWidget(QtGui.QLabel):
ThumbnailSize = QtCore.QSize(128, 128)
def __init__(self, image):
super(ThumbnailWidget, self).__init__()
pixmap = image.getPixmap()
pixmap = pixmap.scaled(self.ThumbnailSize, QtCore.Qt.KeepAspectRatio)
self.setPixmap(pixmap)
class OverviewWindow(QtGui.QMainWindow):
def __init__(self, imageViewer):
super(OverviewWindow, self).__init__()
self.imageViewer = imageViewer
self.numcolumns = 4
self.setWindowTitle("Overview")
self.mainLayout = QtGui.QGridLayout()
self._populate()
centralWidget = QtGui.QWidget()
centralWidget.setLayout(self.mainLayout)
scrollArea = QtGui.QScrollArea()
scrollArea.setWidget(centralWidget)
scrollArea.setAlignment(QtCore.Qt.AlignCenter)
self.setCentralWidget(scrollArea)
def _populate(self):
"""Populate the mainLayout with thumbnail images.
"""
# FIXME: by now, implement only the initial setup of the image
# list. Must also support updating the list after changes in
# imageViewer.selection.
images = self.imageViewer.selection
ncol = self.numcolumns
c = 0
for i in images:
thumb = ThumbnailWidget(i)
self.mainLayout.addWidget(thumb, c // ncol, c % ncol)
c += 1
| <commit_before>"""An overview window showing thumbnails of the image set.
"""
from __future__ import division
import math
from PySide import QtCore, QtGui
class ThumbnailWidget(QtGui.QLabel):
def __init__(self, image, scale):
super(ThumbnailWidget, self).__init__()
pixmap = image.getPixmap()
size = scale * pixmap.size()
pixmap = pixmap.scaled(size)
self.setPixmap(pixmap)
class OverviewWindow(QtGui.QMainWindow):
def __init__(self, imageViewer):
super(OverviewWindow, self).__init__()
self.imageViewer = imageViewer
self.numcolumns = 4
self.scaleFactor = self.imageViewer.scaleFactor / 5.0
self.setWindowTitle("Overview")
self.mainLayout = QtGui.QGridLayout()
self._populate()
centralWidget = QtGui.QWidget()
centralWidget.setLayout(self.mainLayout)
scrollArea = QtGui.QScrollArea()
scrollArea.setWidget(centralWidget)
scrollArea.setAlignment(QtCore.Qt.AlignCenter)
self.setCentralWidget(scrollArea)
def _populate(self):
"""Populate the mainLayout with thumbnail images.
"""
# FIXME: by now, implement only the initial setup of the image
# list. Must also support updating the list after changes in
# imageViewer.selection.
images = self.imageViewer.selection
ncol = self.numcolumns
c = 0
for i in images:
thumb = ThumbnailWidget(i, self.scaleFactor)
self.mainLayout.addWidget(thumb, c // ncol, c % ncol)
c += 1
<commit_msg>Use a fixed thumbnail size rather then a relative scale.<commit_after> | """An overview window showing thumbnails of the image set.
"""
from __future__ import division
import math
from PySide import QtCore, QtGui
class ThumbnailWidget(QtGui.QLabel):
ThumbnailSize = QtCore.QSize(128, 128)
def __init__(self, image):
super(ThumbnailWidget, self).__init__()
pixmap = image.getPixmap()
pixmap = pixmap.scaled(self.ThumbnailSize, QtCore.Qt.KeepAspectRatio)
self.setPixmap(pixmap)
class OverviewWindow(QtGui.QMainWindow):
def __init__(self, imageViewer):
super(OverviewWindow, self).__init__()
self.imageViewer = imageViewer
self.numcolumns = 4
self.setWindowTitle("Overview")
self.mainLayout = QtGui.QGridLayout()
self._populate()
centralWidget = QtGui.QWidget()
centralWidget.setLayout(self.mainLayout)
scrollArea = QtGui.QScrollArea()
scrollArea.setWidget(centralWidget)
scrollArea.setAlignment(QtCore.Qt.AlignCenter)
self.setCentralWidget(scrollArea)
def _populate(self):
"""Populate the mainLayout with thumbnail images.
"""
# FIXME: by now, implement only the initial setup of the image
# list. Must also support updating the list after changes in
# imageViewer.selection.
images = self.imageViewer.selection
ncol = self.numcolumns
c = 0
for i in images:
thumb = ThumbnailWidget(i)
self.mainLayout.addWidget(thumb, c // ncol, c % ncol)
c += 1
| """An overview window showing thumbnails of the image set.
"""
from __future__ import division
import math
from PySide import QtCore, QtGui
class ThumbnailWidget(QtGui.QLabel):
def __init__(self, image, scale):
super(ThumbnailWidget, self).__init__()
pixmap = image.getPixmap()
size = scale * pixmap.size()
pixmap = pixmap.scaled(size)
self.setPixmap(pixmap)
class OverviewWindow(QtGui.QMainWindow):
def __init__(self, imageViewer):
super(OverviewWindow, self).__init__()
self.imageViewer = imageViewer
self.numcolumns = 4
self.scaleFactor = self.imageViewer.scaleFactor / 5.0
self.setWindowTitle("Overview")
self.mainLayout = QtGui.QGridLayout()
self._populate()
centralWidget = QtGui.QWidget()
centralWidget.setLayout(self.mainLayout)
scrollArea = QtGui.QScrollArea()
scrollArea.setWidget(centralWidget)
scrollArea.setAlignment(QtCore.Qt.AlignCenter)
self.setCentralWidget(scrollArea)
def _populate(self):
"""Populate the mainLayout with thumbnail images.
"""
# FIXME: by now, implement only the initial setup of the image
# list. Must also support updating the list after changes in
# imageViewer.selection.
images = self.imageViewer.selection
ncol = self.numcolumns
c = 0
for i in images:
thumb = ThumbnailWidget(i, self.scaleFactor)
self.mainLayout.addWidget(thumb, c // ncol, c % ncol)
c += 1
Use a fixed thumbnail size rather then a relative scale."""An overview window showing thumbnails of the image set.
"""
from __future__ import division
import math
from PySide import QtCore, QtGui
class ThumbnailWidget(QtGui.QLabel):
ThumbnailSize = QtCore.QSize(128, 128)
def __init__(self, image):
super(ThumbnailWidget, self).__init__()
pixmap = image.getPixmap()
pixmap = pixmap.scaled(self.ThumbnailSize, QtCore.Qt.KeepAspectRatio)
self.setPixmap(pixmap)
class OverviewWindow(QtGui.QMainWindow):
def __init__(self, imageViewer):
super(OverviewWindow, self).__init__()
self.imageViewer = imageViewer
self.numcolumns = 4
self.setWindowTitle("Overview")
self.mainLayout = QtGui.QGridLayout()
self._populate()
centralWidget = QtGui.QWidget()
centralWidget.setLayout(self.mainLayout)
scrollArea = QtGui.QScrollArea()
scrollArea.setWidget(centralWidget)
scrollArea.setAlignment(QtCore.Qt.AlignCenter)
self.setCentralWidget(scrollArea)
def _populate(self):
"""Populate the mainLayout with thumbnail images.
"""
# FIXME: by now, implement only the initial setup of the image
# list. Must also support updating the list after changes in
# imageViewer.selection.
images = self.imageViewer.selection
ncol = self.numcolumns
c = 0
for i in images:
thumb = ThumbnailWidget(i)
self.mainLayout.addWidget(thumb, c // ncol, c % ncol)
c += 1
| <commit_before>"""An overview window showing thumbnails of the image set.
"""
from __future__ import division
import math
from PySide import QtCore, QtGui
class ThumbnailWidget(QtGui.QLabel):
def __init__(self, image, scale):
super(ThumbnailWidget, self).__init__()
pixmap = image.getPixmap()
size = scale * pixmap.size()
pixmap = pixmap.scaled(size)
self.setPixmap(pixmap)
class OverviewWindow(QtGui.QMainWindow):
def __init__(self, imageViewer):
super(OverviewWindow, self).__init__()
self.imageViewer = imageViewer
self.numcolumns = 4
self.scaleFactor = self.imageViewer.scaleFactor / 5.0
self.setWindowTitle("Overview")
self.mainLayout = QtGui.QGridLayout()
self._populate()
centralWidget = QtGui.QWidget()
centralWidget.setLayout(self.mainLayout)
scrollArea = QtGui.QScrollArea()
scrollArea.setWidget(centralWidget)
scrollArea.setAlignment(QtCore.Qt.AlignCenter)
self.setCentralWidget(scrollArea)
def _populate(self):
"""Populate the mainLayout with thumbnail images.
"""
# FIXME: by now, implement only the initial setup of the image
# list. Must also support updating the list after changes in
# imageViewer.selection.
images = self.imageViewer.selection
ncol = self.numcolumns
c = 0
for i in images:
thumb = ThumbnailWidget(i, self.scaleFactor)
self.mainLayout.addWidget(thumb, c // ncol, c % ncol)
c += 1
<commit_msg>Use a fixed thumbnail size rather then a relative scale.<commit_after>"""An overview window showing thumbnails of the image set.
"""
from __future__ import division
import math
from PySide import QtCore, QtGui
class ThumbnailWidget(QtGui.QLabel):
ThumbnailSize = QtCore.QSize(128, 128)
def __init__(self, image):
super(ThumbnailWidget, self).__init__()
pixmap = image.getPixmap()
pixmap = pixmap.scaled(self.ThumbnailSize, QtCore.Qt.KeepAspectRatio)
self.setPixmap(pixmap)
class OverviewWindow(QtGui.QMainWindow):
def __init__(self, imageViewer):
super(OverviewWindow, self).__init__()
self.imageViewer = imageViewer
self.numcolumns = 4
self.setWindowTitle("Overview")
self.mainLayout = QtGui.QGridLayout()
self._populate()
centralWidget = QtGui.QWidget()
centralWidget.setLayout(self.mainLayout)
scrollArea = QtGui.QScrollArea()
scrollArea.setWidget(centralWidget)
scrollArea.setAlignment(QtCore.Qt.AlignCenter)
self.setCentralWidget(scrollArea)
def _populate(self):
"""Populate the mainLayout with thumbnail images.
"""
# FIXME: by now, implement only the initial setup of the image
# list. Must also support updating the list after changes in
# imageViewer.selection.
images = self.imageViewer.selection
ncol = self.numcolumns
c = 0
for i in images:
thumb = ThumbnailWidget(i)
self.mainLayout.addWidget(thumb, c // ncol, c % ncol)
c += 1
|
44faefd4bd0bfa3dede8686903759a033c1072d6 | flask_simple_serializer/response.py | flask_simple_serializer/response.py | import json
from flask import Response as SimpleResponse
from .status_codes import HTTP_200_OK
from .serializers import BaseSerializer
class Response(SimpleResponse):
def __init__(self, data, headers=None, status_code=HTTP_200_OK):
"""
For now the content/type always will be application/json.
We can change it to make a Web Browseable API
"""
if isinstance(data, BaseSerializer):
msg = (
'You passed a Serializer instance as data, but '
'probably meant to pass serialized `.data` or '
'`.errors`. representation.'
)
raise AssertionError(msg)
data = json.dumps(data)
content_type = "application/json"
super(Response, self).__init__(
data, headers=None, content_type=content_type, status=status_code
)
| from flask import Response as SimpleResponse
from flask import json
from .status_codes import HTTP_200_OK
from .serializers import BaseSerializer
class Response(SimpleResponse):
def __init__(self, data, headers=None, status_code=HTTP_200_OK):
"""
For now the content/type always will be application/json.
We can change it to make a Web Browseable API
"""
if isinstance(data, BaseSerializer):
msg = (
'You passed a Serializer instance as data, but '
'probably meant to pass serialized `.data` or '
'`.errors`. representation.'
)
raise AssertionError(msg)
data = json.dumps(data)
content_type = "application/json"
super(Response, self).__init__(
data, headers=None, content_type=content_type, status=status_code
)
| Replace json for flask.json to manage the Response | Replace json for flask.json to manage the Response
| Python | mit | marcosschroh/Flask-Simple-Serializer | import json
from flask import Response as SimpleResponse
from .status_codes import HTTP_200_OK
from .serializers import BaseSerializer
class Response(SimpleResponse):
def __init__(self, data, headers=None, status_code=HTTP_200_OK):
"""
For now the content/type always will be application/json.
We can change it to make a Web Browseable API
"""
if isinstance(data, BaseSerializer):
msg = (
'You passed a Serializer instance as data, but '
'probably meant to pass serialized `.data` or '
'`.errors`. representation.'
)
raise AssertionError(msg)
data = json.dumps(data)
content_type = "application/json"
super(Response, self).__init__(
data, headers=None, content_type=content_type, status=status_code
)
Replace json for flask.json to manage the Response | from flask import Response as SimpleResponse
from flask import json
from .status_codes import HTTP_200_OK
from .serializers import BaseSerializer
class Response(SimpleResponse):
def __init__(self, data, headers=None, status_code=HTTP_200_OK):
"""
For now the content/type always will be application/json.
We can change it to make a Web Browseable API
"""
if isinstance(data, BaseSerializer):
msg = (
'You passed a Serializer instance as data, but '
'probably meant to pass serialized `.data` or '
'`.errors`. representation.'
)
raise AssertionError(msg)
data = json.dumps(data)
content_type = "application/json"
super(Response, self).__init__(
data, headers=None, content_type=content_type, status=status_code
)
| <commit_before>import json
from flask import Response as SimpleResponse
from .status_codes import HTTP_200_OK
from .serializers import BaseSerializer
class Response(SimpleResponse):
def __init__(self, data, headers=None, status_code=HTTP_200_OK):
"""
For now the content/type always will be application/json.
We can change it to make a Web Browseable API
"""
if isinstance(data, BaseSerializer):
msg = (
'You passed a Serializer instance as data, but '
'probably meant to pass serialized `.data` or '
'`.errors`. representation.'
)
raise AssertionError(msg)
data = json.dumps(data)
content_type = "application/json"
super(Response, self).__init__(
data, headers=None, content_type=content_type, status=status_code
)
<commit_msg>Replace json for flask.json to manage the Response<commit_after> | from flask import Response as SimpleResponse
from flask import json
from .status_codes import HTTP_200_OK
from .serializers import BaseSerializer
class Response(SimpleResponse):
def __init__(self, data, headers=None, status_code=HTTP_200_OK):
"""
For now the content/type always will be application/json.
We can change it to make a Web Browseable API
"""
if isinstance(data, BaseSerializer):
msg = (
'You passed a Serializer instance as data, but '
'probably meant to pass serialized `.data` or '
'`.errors`. representation.'
)
raise AssertionError(msg)
data = json.dumps(data)
content_type = "application/json"
super(Response, self).__init__(
data, headers=None, content_type=content_type, status=status_code
)
| import json
from flask import Response as SimpleResponse
from .status_codes import HTTP_200_OK
from .serializers import BaseSerializer
class Response(SimpleResponse):
def __init__(self, data, headers=None, status_code=HTTP_200_OK):
"""
For now the content/type always will be application/json.
We can change it to make a Web Browseable API
"""
if isinstance(data, BaseSerializer):
msg = (
'You passed a Serializer instance as data, but '
'probably meant to pass serialized `.data` or '
'`.errors`. representation.'
)
raise AssertionError(msg)
data = json.dumps(data)
content_type = "application/json"
super(Response, self).__init__(
data, headers=None, content_type=content_type, status=status_code
)
Replace json for flask.json to manage the Responsefrom flask import Response as SimpleResponse
from flask import json
from .status_codes import HTTP_200_OK
from .serializers import BaseSerializer
class Response(SimpleResponse):
def __init__(self, data, headers=None, status_code=HTTP_200_OK):
"""
For now the content/type always will be application/json.
We can change it to make a Web Browseable API
"""
if isinstance(data, BaseSerializer):
msg = (
'You passed a Serializer instance as data, but '
'probably meant to pass serialized `.data` or '
'`.errors`. representation.'
)
raise AssertionError(msg)
data = json.dumps(data)
content_type = "application/json"
super(Response, self).__init__(
data, headers=None, content_type=content_type, status=status_code
)
| <commit_before>import json
from flask import Response as SimpleResponse
from .status_codes import HTTP_200_OK
from .serializers import BaseSerializer
class Response(SimpleResponse):
def __init__(self, data, headers=None, status_code=HTTP_200_OK):
"""
For now the content/type always will be application/json.
We can change it to make a Web Browseable API
"""
if isinstance(data, BaseSerializer):
msg = (
'You passed a Serializer instance as data, but '
'probably meant to pass serialized `.data` or '
'`.errors`. representation.'
)
raise AssertionError(msg)
data = json.dumps(data)
content_type = "application/json"
super(Response, self).__init__(
data, headers=None, content_type=content_type, status=status_code
)
<commit_msg>Replace json for flask.json to manage the Response<commit_after>from flask import Response as SimpleResponse
from flask import json
from .status_codes import HTTP_200_OK
from .serializers import BaseSerializer
class Response(SimpleResponse):
def __init__(self, data, headers=None, status_code=HTTP_200_OK):
"""
For now the content/type always will be application/json.
We can change it to make a Web Browseable API
"""
if isinstance(data, BaseSerializer):
msg = (
'You passed a Serializer instance as data, but '
'probably meant to pass serialized `.data` or '
'`.errors`. representation.'
)
raise AssertionError(msg)
data = json.dumps(data)
content_type = "application/json"
super(Response, self).__init__(
data, headers=None, content_type=content_type, status=status_code
)
|
c51ec70a8e71f2e8e7a0d0bb4f0712b379af0505 | src/victims_web/plugin/__init__.py | src/victims_web/plugin/__init__.py | # This file is part of victims-web.
#
# Copyright (C) 2013 The Victims Project
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from victims_web.models import Plugin
def get_config(plugin):
"""
Helper to get a plugin's configuration
"""
config = Plugin.objects(plugin=plugin).first()
if config is None:
config = Plugin()
config.plugin = plugin
config.save()
return config
| Add helper method to plugin pkg to handle configs | Add helper method to plugin pkg to handle configs
| Python | agpl-3.0 | victims/victims-web,jasinner/victims-web,victims/victims-web,jasinner/victims-web | Add helper method to plugin pkg to handle configs | # This file is part of victims-web.
#
# Copyright (C) 2013 The Victims Project
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from victims_web.models import Plugin
def get_config(plugin):
"""
Helper to get a plugin's configuration
"""
config = Plugin.objects(plugin=plugin).first()
if config is None:
config = Plugin()
config.plugin = plugin
config.save()
return config
| <commit_before><commit_msg>Add helper method to plugin pkg to handle configs<commit_after> | # This file is part of victims-web.
#
# Copyright (C) 2013 The Victims Project
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from victims_web.models import Plugin
def get_config(plugin):
"""
Helper to get a plugin's configuration
"""
config = Plugin.objects(plugin=plugin).first()
if config is None:
config = Plugin()
config.plugin = plugin
config.save()
return config
| Add helper method to plugin pkg to handle configs# This file is part of victims-web.
#
# Copyright (C) 2013 The Victims Project
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from victims_web.models import Plugin
def get_config(plugin):
"""
Helper to get a plugin's configuration
"""
config = Plugin.objects(plugin=plugin).first()
if config is None:
config = Plugin()
config.plugin = plugin
config.save()
return config
| <commit_before><commit_msg>Add helper method to plugin pkg to handle configs<commit_after># This file is part of victims-web.
#
# Copyright (C) 2013 The Victims Project
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from victims_web.models import Plugin
def get_config(plugin):
"""
Helper to get a plugin's configuration
"""
config = Plugin.objects(plugin=plugin).first()
if config is None:
config = Plugin()
config.plugin = plugin
config.save()
return config
| |
3f17f454172d15e9279e00ccc2acfb931bf685f1 | transmutagen/tests/test_origen.py | transmutagen/tests/test_origen.py | import os
from itertools import combinations
import numpy as np
from ..tape9utils import origen_to_name
DATA_DIR = os.path.abspath(os.path.join(__file__, os.path.pardir,
os.path.pardir, os.path.pardir, 'docker', 'data'))
def load_data(datafile):
with open(datafile) as f:
return eval(f.read(), {'array': np.array})
def test_data():
for datafile in os.listdir(DATA_DIR):
data = load_data(os.path.join(DATA_DIR, datafile))
tape9, time, nuc, phi = datafile.split()[0]
assert 'table_4' in data
assert 'nuclide' in data['table_4']
nuclides = data['table_4']['nuclides']
keys = ['activation_products', 'actinides', 'fission_products']
# Sanity check
for comb in combinations(keys, 2):
assert set.intersection(*comb) == set()
| import os
from itertools import combinations
import numpy as np
from ..tape9utils import origen_to_name
DATA_DIR = os.path.abspath(os.path.join(__file__, os.path.pardir,
os.path.pardir, os.path.pardir, 'docker', 'data'))
def load_data(datafile):
with open(datafile) as f:
return eval(f.read(), {'array': np.array})
def test_data():
for datafile in os.listdir(DATA_DIR):
data = load_data(os.path.join(DATA_DIR, datafile))
tape9, time, nuc, phi = datafile.split()
assert 'table_4' in data
assert 'nuclide' in data['table_4']
nuclide = data['table_4']['nuclide']
keys = ['activation_products', 'actinides', 'fission_products']
# Sanity check
for comb in combinations(keys, 2):
a, b = comb
for common in set.intersection(set(nuclide[a]), set(nuclide[b])):
array_a, array_b = nuclide[a][common], nuclide[b][common]
assert np.allclose(array_a, 0) \
or np.allclose(array_b, 0)
# or np.allclose(array_a, array_b)
| Add a sanity test for the data | Add a sanity test for the data
| Python | bsd-3-clause | ergs/transmutagen,ergs/transmutagen | import os
from itertools import combinations
import numpy as np
from ..tape9utils import origen_to_name
DATA_DIR = os.path.abspath(os.path.join(__file__, os.path.pardir,
os.path.pardir, os.path.pardir, 'docker', 'data'))
def load_data(datafile):
with open(datafile) as f:
return eval(f.read(), {'array': np.array})
def test_data():
for datafile in os.listdir(DATA_DIR):
data = load_data(os.path.join(DATA_DIR, datafile))
tape9, time, nuc, phi = datafile.split()[0]
assert 'table_4' in data
assert 'nuclide' in data['table_4']
nuclides = data['table_4']['nuclides']
keys = ['activation_products', 'actinides', 'fission_products']
# Sanity check
for comb in combinations(keys, 2):
assert set.intersection(*comb) == set()
Add a sanity test for the data | import os
from itertools import combinations
import numpy as np
from ..tape9utils import origen_to_name
DATA_DIR = os.path.abspath(os.path.join(__file__, os.path.pardir,
os.path.pardir, os.path.pardir, 'docker', 'data'))
def load_data(datafile):
with open(datafile) as f:
return eval(f.read(), {'array': np.array})
def test_data():
for datafile in os.listdir(DATA_DIR):
data = load_data(os.path.join(DATA_DIR, datafile))
tape9, time, nuc, phi = datafile.split()
assert 'table_4' in data
assert 'nuclide' in data['table_4']
nuclide = data['table_4']['nuclide']
keys = ['activation_products', 'actinides', 'fission_products']
# Sanity check
for comb in combinations(keys, 2):
a, b = comb
for common in set.intersection(set(nuclide[a]), set(nuclide[b])):
array_a, array_b = nuclide[a][common], nuclide[b][common]
assert np.allclose(array_a, 0) \
or np.allclose(array_b, 0)
# or np.allclose(array_a, array_b)
| <commit_before>import os
from itertools import combinations
import numpy as np
from ..tape9utils import origen_to_name
DATA_DIR = os.path.abspath(os.path.join(__file__, os.path.pardir,
os.path.pardir, os.path.pardir, 'docker', 'data'))
def load_data(datafile):
with open(datafile) as f:
return eval(f.read(), {'array': np.array})
def test_data():
for datafile in os.listdir(DATA_DIR):
data = load_data(os.path.join(DATA_DIR, datafile))
tape9, time, nuc, phi = datafile.split()[0]
assert 'table_4' in data
assert 'nuclide' in data['table_4']
nuclides = data['table_4']['nuclides']
keys = ['activation_products', 'actinides', 'fission_products']
# Sanity check
for comb in combinations(keys, 2):
assert set.intersection(*comb) == set()
<commit_msg>Add a sanity test for the data<commit_after> | import os
from itertools import combinations
import numpy as np
from ..tape9utils import origen_to_name
DATA_DIR = os.path.abspath(os.path.join(__file__, os.path.pardir,
os.path.pardir, os.path.pardir, 'docker', 'data'))
def load_data(datafile):
with open(datafile) as f:
return eval(f.read(), {'array': np.array})
def test_data():
for datafile in os.listdir(DATA_DIR):
data = load_data(os.path.join(DATA_DIR, datafile))
tape9, time, nuc, phi = datafile.split()
assert 'table_4' in data
assert 'nuclide' in data['table_4']
nuclide = data['table_4']['nuclide']
keys = ['activation_products', 'actinides', 'fission_products']
# Sanity check
for comb in combinations(keys, 2):
a, b = comb
for common in set.intersection(set(nuclide[a]), set(nuclide[b])):
array_a, array_b = nuclide[a][common], nuclide[b][common]
assert np.allclose(array_a, 0) \
or np.allclose(array_b, 0)
# or np.allclose(array_a, array_b)
| import os
from itertools import combinations
import numpy as np
from ..tape9utils import origen_to_name
DATA_DIR = os.path.abspath(os.path.join(__file__, os.path.pardir,
os.path.pardir, os.path.pardir, 'docker', 'data'))
def load_data(datafile):
with open(datafile) as f:
return eval(f.read(), {'array': np.array})
def test_data():
for datafile in os.listdir(DATA_DIR):
data = load_data(os.path.join(DATA_DIR, datafile))
tape9, time, nuc, phi = datafile.split()[0]
assert 'table_4' in data
assert 'nuclide' in data['table_4']
nuclides = data['table_4']['nuclides']
keys = ['activation_products', 'actinides', 'fission_products']
# Sanity check
for comb in combinations(keys, 2):
assert set.intersection(*comb) == set()
Add a sanity test for the dataimport os
from itertools import combinations
import numpy as np
from ..tape9utils import origen_to_name
DATA_DIR = os.path.abspath(os.path.join(__file__, os.path.pardir,
os.path.pardir, os.path.pardir, 'docker', 'data'))
def load_data(datafile):
with open(datafile) as f:
return eval(f.read(), {'array': np.array})
def test_data():
for datafile in os.listdir(DATA_DIR):
data = load_data(os.path.join(DATA_DIR, datafile))
tape9, time, nuc, phi = datafile.split()
assert 'table_4' in data
assert 'nuclide' in data['table_4']
nuclide = data['table_4']['nuclide']
keys = ['activation_products', 'actinides', 'fission_products']
# Sanity check
for comb in combinations(keys, 2):
a, b = comb
for common in set.intersection(set(nuclide[a]), set(nuclide[b])):
array_a, array_b = nuclide[a][common], nuclide[b][common]
assert np.allclose(array_a, 0) \
or np.allclose(array_b, 0)
# or np.allclose(array_a, array_b)
| <commit_before>import os
from itertools import combinations
import numpy as np
from ..tape9utils import origen_to_name
DATA_DIR = os.path.abspath(os.path.join(__file__, os.path.pardir,
os.path.pardir, os.path.pardir, 'docker', 'data'))
def load_data(datafile):
with open(datafile) as f:
return eval(f.read(), {'array': np.array})
def test_data():
for datafile in os.listdir(DATA_DIR):
data = load_data(os.path.join(DATA_DIR, datafile))
tape9, time, nuc, phi = datafile.split()[0]
assert 'table_4' in data
assert 'nuclide' in data['table_4']
nuclides = data['table_4']['nuclides']
keys = ['activation_products', 'actinides', 'fission_products']
# Sanity check
for comb in combinations(keys, 2):
assert set.intersection(*comb) == set()
<commit_msg>Add a sanity test for the data<commit_after>import os
from itertools import combinations
import numpy as np
from ..tape9utils import origen_to_name
DATA_DIR = os.path.abspath(os.path.join(__file__, os.path.pardir,
os.path.pardir, os.path.pardir, 'docker', 'data'))
def load_data(datafile):
with open(datafile) as f:
return eval(f.read(), {'array': np.array})
def test_data():
for datafile in os.listdir(DATA_DIR):
data = load_data(os.path.join(DATA_DIR, datafile))
tape9, time, nuc, phi = datafile.split()
assert 'table_4' in data
assert 'nuclide' in data['table_4']
nuclide = data['table_4']['nuclide']
keys = ['activation_products', 'actinides', 'fission_products']
# Sanity check
for comb in combinations(keys, 2):
a, b = comb
for common in set.intersection(set(nuclide[a]), set(nuclide[b])):
array_a, array_b = nuclide[a][common], nuclide[b][common]
assert np.allclose(array_a, 0) \
or np.allclose(array_b, 0)
# or np.allclose(array_a, array_b)
|
905690beacad9731bb113bdbeedf0ed2c7df3160 | profile_audfprint_match.py | profile_audfprint_match.py | import audfprint
import cProfile
import pstats
argv = ["audfprint", "match", "-d", "tmp.fpdb", "--density", "200", "query.mp3", "query2.mp3"]
cProfile.run('audfprint.main(argv)', 'fpmstats')
p = pstats.Stats('fpmstats')
p.sort_stats('time')
p.print_stats(10)
| import audfprint
import cProfile
import pstats
argv = ["audfprint", "match", "-d", "fpdbase.pklz", "--density", "200", "query.mp3"]
cProfile.run('audfprint.main(argv)', 'fpmstats')
p = pstats.Stats('fpmstats')
p.sort_stats('time')
p.print_stats(10)
| Update profile for local data. | Update profile for local data.
| Python | mit | dpwe/audfprint | import audfprint
import cProfile
import pstats
argv = ["audfprint", "match", "-d", "tmp.fpdb", "--density", "200", "query.mp3", "query2.mp3"]
cProfile.run('audfprint.main(argv)', 'fpmstats')
p = pstats.Stats('fpmstats')
p.sort_stats('time')
p.print_stats(10)
Update profile for local data. | import audfprint
import cProfile
import pstats
argv = ["audfprint", "match", "-d", "fpdbase.pklz", "--density", "200", "query.mp3"]
cProfile.run('audfprint.main(argv)', 'fpmstats')
p = pstats.Stats('fpmstats')
p.sort_stats('time')
p.print_stats(10)
| <commit_before>import audfprint
import cProfile
import pstats
argv = ["audfprint", "match", "-d", "tmp.fpdb", "--density", "200", "query.mp3", "query2.mp3"]
cProfile.run('audfprint.main(argv)', 'fpmstats')
p = pstats.Stats('fpmstats')
p.sort_stats('time')
p.print_stats(10)
<commit_msg>Update profile for local data.<commit_after> | import audfprint
import cProfile
import pstats
argv = ["audfprint", "match", "-d", "fpdbase.pklz", "--density", "200", "query.mp3"]
cProfile.run('audfprint.main(argv)', 'fpmstats')
p = pstats.Stats('fpmstats')
p.sort_stats('time')
p.print_stats(10)
| import audfprint
import cProfile
import pstats
argv = ["audfprint", "match", "-d", "tmp.fpdb", "--density", "200", "query.mp3", "query2.mp3"]
cProfile.run('audfprint.main(argv)', 'fpmstats')
p = pstats.Stats('fpmstats')
p.sort_stats('time')
p.print_stats(10)
Update profile for local data.import audfprint
import cProfile
import pstats
argv = ["audfprint", "match", "-d", "fpdbase.pklz", "--density", "200", "query.mp3"]
cProfile.run('audfprint.main(argv)', 'fpmstats')
p = pstats.Stats('fpmstats')
p.sort_stats('time')
p.print_stats(10)
| <commit_before>import audfprint
import cProfile
import pstats
argv = ["audfprint", "match", "-d", "tmp.fpdb", "--density", "200", "query.mp3", "query2.mp3"]
cProfile.run('audfprint.main(argv)', 'fpmstats')
p = pstats.Stats('fpmstats')
p.sort_stats('time')
p.print_stats(10)
<commit_msg>Update profile for local data.<commit_after>import audfprint
import cProfile
import pstats
argv = ["audfprint", "match", "-d", "fpdbase.pklz", "--density", "200", "query.mp3"]
cProfile.run('audfprint.main(argv)', 'fpmstats')
p = pstats.Stats('fpmstats')
p.sort_stats('time')
p.print_stats(10)
|
c3a06fc8a1c8b1bb2b24b929fd158ae1602836f6 | cobra/topology/__init__.py | cobra/topology/__init__.py | from os import name as __name
from sys import modules as __modules
from warnings import warn
if __name == 'java':
warn("%s is not yet supported on jython"%__modules[__name__])
else:
from reporter_metabolites import *
del __name, __modules
| from os import name as __name
from sys import modules as __modules
from warnings import warn
if __name == 'java':
warn("%s is not yet supported on jython"%__modules[__name__])
else:
from .reporter_metabolites import *
del __name, __modules
| Fix import issue in cobra.topology | Fix import issue in cobra.topology
Signed-off-by: Vivek Rai <6965dcfed9719c822a5fc29f0dbf450e6c3f778e@gmail.com>
| Python | lgpl-2.1 | JuBra/cobrapy,aebrahim/cobrapy,zakandrewking/cobrapy,JuBra/cobrapy,jeicher/cobrapy,zakandrewking/cobrapy,aebrahim/cobrapy,jeicher/cobrapy | from os import name as __name
from sys import modules as __modules
from warnings import warn
if __name == 'java':
warn("%s is not yet supported on jython"%__modules[__name__])
else:
from reporter_metabolites import *
del __name, __modules
Fix import issue in cobra.topology
Signed-off-by: Vivek Rai <6965dcfed9719c822a5fc29f0dbf450e6c3f778e@gmail.com> | from os import name as __name
from sys import modules as __modules
from warnings import warn
if __name == 'java':
warn("%s is not yet supported on jython"%__modules[__name__])
else:
from .reporter_metabolites import *
del __name, __modules
| <commit_before>from os import name as __name
from sys import modules as __modules
from warnings import warn
if __name == 'java':
warn("%s is not yet supported on jython"%__modules[__name__])
else:
from reporter_metabolites import *
del __name, __modules
<commit_msg>Fix import issue in cobra.topology
Signed-off-by: Vivek Rai <6965dcfed9719c822a5fc29f0dbf450e6c3f778e@gmail.com><commit_after> | from os import name as __name
from sys import modules as __modules
from warnings import warn
if __name == 'java':
warn("%s is not yet supported on jython"%__modules[__name__])
else:
from .reporter_metabolites import *
del __name, __modules
| from os import name as __name
from sys import modules as __modules
from warnings import warn
if __name == 'java':
warn("%s is not yet supported on jython"%__modules[__name__])
else:
from reporter_metabolites import *
del __name, __modules
Fix import issue in cobra.topology
Signed-off-by: Vivek Rai <6965dcfed9719c822a5fc29f0dbf450e6c3f778e@gmail.com>from os import name as __name
from sys import modules as __modules
from warnings import warn
if __name == 'java':
warn("%s is not yet supported on jython"%__modules[__name__])
else:
from .reporter_metabolites import *
del __name, __modules
| <commit_before>from os import name as __name
from sys import modules as __modules
from warnings import warn
if __name == 'java':
warn("%s is not yet supported on jython"%__modules[__name__])
else:
from reporter_metabolites import *
del __name, __modules
<commit_msg>Fix import issue in cobra.topology
Signed-off-by: Vivek Rai <6965dcfed9719c822a5fc29f0dbf450e6c3f778e@gmail.com><commit_after>from os import name as __name
from sys import modules as __modules
from warnings import warn
if __name == 'java':
warn("%s is not yet supported on jython"%__modules[__name__])
else:
from .reporter_metabolites import *
del __name, __modules
|
e7998648c42d5bcccec7239d13521a5b77a738af | src/utils/indices.py | src/utils/indices.py | import json
import os
from elasticsearch import Elasticsearch
from elasticsearch_dsl import Index
from model import APIDoc
def exists():
return Index(APIDoc.Index.name).exists()
def setup():
"""
Setup Elasticsearch Index.
Primary index with dynamic template.
Secondary index with static mappings.
"""
_dirname = os.path.dirname(__file__)
with open(os.path.join(_dirname, 'mapping.json'), 'r') as file:
mapping = json.load(file)
if not exists():
APIDoc.init()
elastic = Elasticsearch()
elastic.indices.put_mapping(
index=APIDoc.Index.name,
body=mapping
)
def delete():
Index(APIDoc.Index.name).delete()
def reset():
if exists():
delete()
setup()
def refresh():
index = Index(APIDoc.Index.name)
index.refresh()
| import json
import os
from elasticsearch import Elasticsearch
from elasticsearch_dsl import Index
from model import APIDoc
def exists():
return Index(APIDoc.Index.name).exists()
def setup():
"""
Setup Elasticsearch Index with dynamic template.
Run it on an open index to update dynamic mapping.
"""
_dirname = os.path.dirname(__file__)
with open(os.path.join(_dirname, 'mapping.json'), 'r') as file:
mapping = json.load(file)
if not exists():
APIDoc.init()
elastic = Elasticsearch()
elastic.indices.put_mapping(
index=APIDoc.Index.name,
body=mapping
)
def delete():
Index(APIDoc.Index.name).delete()
def reset():
if exists():
delete()
setup()
def refresh():
index = Index(APIDoc.Index.name)
index.refresh()
| Allow setup function to update dynamic mapping | Allow setup function to update dynamic mapping
| Python | mit | Network-of-BioThings/smartAPI,Network-of-BioThings/smartAPI,Network-of-BioThings/smartAPI,Network-of-BioThings/smartAPI,Network-of-BioThings/smartAPI | import json
import os
from elasticsearch import Elasticsearch
from elasticsearch_dsl import Index
from model import APIDoc
def exists():
return Index(APIDoc.Index.name).exists()
def setup():
"""
Setup Elasticsearch Index.
Primary index with dynamic template.
Secondary index with static mappings.
"""
_dirname = os.path.dirname(__file__)
with open(os.path.join(_dirname, 'mapping.json'), 'r') as file:
mapping = json.load(file)
if not exists():
APIDoc.init()
elastic = Elasticsearch()
elastic.indices.put_mapping(
index=APIDoc.Index.name,
body=mapping
)
def delete():
Index(APIDoc.Index.name).delete()
def reset():
if exists():
delete()
setup()
def refresh():
index = Index(APIDoc.Index.name)
index.refresh()
Allow setup function to update dynamic mapping | import json
import os
from elasticsearch import Elasticsearch
from elasticsearch_dsl import Index
from model import APIDoc
def exists():
return Index(APIDoc.Index.name).exists()
def setup():
"""
Setup Elasticsearch Index with dynamic template.
Run it on an open index to update dynamic mapping.
"""
_dirname = os.path.dirname(__file__)
with open(os.path.join(_dirname, 'mapping.json'), 'r') as file:
mapping = json.load(file)
if not exists():
APIDoc.init()
elastic = Elasticsearch()
elastic.indices.put_mapping(
index=APIDoc.Index.name,
body=mapping
)
def delete():
Index(APIDoc.Index.name).delete()
def reset():
if exists():
delete()
setup()
def refresh():
index = Index(APIDoc.Index.name)
index.refresh()
| <commit_before>import json
import os
from elasticsearch import Elasticsearch
from elasticsearch_dsl import Index
from model import APIDoc
def exists():
return Index(APIDoc.Index.name).exists()
def setup():
"""
Setup Elasticsearch Index.
Primary index with dynamic template.
Secondary index with static mappings.
"""
_dirname = os.path.dirname(__file__)
with open(os.path.join(_dirname, 'mapping.json'), 'r') as file:
mapping = json.load(file)
if not exists():
APIDoc.init()
elastic = Elasticsearch()
elastic.indices.put_mapping(
index=APIDoc.Index.name,
body=mapping
)
def delete():
Index(APIDoc.Index.name).delete()
def reset():
if exists():
delete()
setup()
def refresh():
index = Index(APIDoc.Index.name)
index.refresh()
<commit_msg>Allow setup function to update dynamic mapping<commit_after> | import json
import os
from elasticsearch import Elasticsearch
from elasticsearch_dsl import Index
from model import APIDoc
def exists():
return Index(APIDoc.Index.name).exists()
def setup():
"""
Setup Elasticsearch Index with dynamic template.
Run it on an open index to update dynamic mapping.
"""
_dirname = os.path.dirname(__file__)
with open(os.path.join(_dirname, 'mapping.json'), 'r') as file:
mapping = json.load(file)
if not exists():
APIDoc.init()
elastic = Elasticsearch()
elastic.indices.put_mapping(
index=APIDoc.Index.name,
body=mapping
)
def delete():
Index(APIDoc.Index.name).delete()
def reset():
if exists():
delete()
setup()
def refresh():
index = Index(APIDoc.Index.name)
index.refresh()
| import json
import os
from elasticsearch import Elasticsearch
from elasticsearch_dsl import Index
from model import APIDoc
def exists():
return Index(APIDoc.Index.name).exists()
def setup():
"""
Setup Elasticsearch Index.
Primary index with dynamic template.
Secondary index with static mappings.
"""
_dirname = os.path.dirname(__file__)
with open(os.path.join(_dirname, 'mapping.json'), 'r') as file:
mapping = json.load(file)
if not exists():
APIDoc.init()
elastic = Elasticsearch()
elastic.indices.put_mapping(
index=APIDoc.Index.name,
body=mapping
)
def delete():
Index(APIDoc.Index.name).delete()
def reset():
if exists():
delete()
setup()
def refresh():
index = Index(APIDoc.Index.name)
index.refresh()
Allow setup function to update dynamic mappingimport json
import os
from elasticsearch import Elasticsearch
from elasticsearch_dsl import Index
from model import APIDoc
def exists():
return Index(APIDoc.Index.name).exists()
def setup():
"""
Setup Elasticsearch Index with dynamic template.
Run it on an open index to update dynamic mapping.
"""
_dirname = os.path.dirname(__file__)
with open(os.path.join(_dirname, 'mapping.json'), 'r') as file:
mapping = json.load(file)
if not exists():
APIDoc.init()
elastic = Elasticsearch()
elastic.indices.put_mapping(
index=APIDoc.Index.name,
body=mapping
)
def delete():
Index(APIDoc.Index.name).delete()
def reset():
if exists():
delete()
setup()
def refresh():
index = Index(APIDoc.Index.name)
index.refresh()
| <commit_before>import json
import os
from elasticsearch import Elasticsearch
from elasticsearch_dsl import Index
from model import APIDoc
def exists():
return Index(APIDoc.Index.name).exists()
def setup():
"""
Setup Elasticsearch Index.
Primary index with dynamic template.
Secondary index with static mappings.
"""
_dirname = os.path.dirname(__file__)
with open(os.path.join(_dirname, 'mapping.json'), 'r') as file:
mapping = json.load(file)
if not exists():
APIDoc.init()
elastic = Elasticsearch()
elastic.indices.put_mapping(
index=APIDoc.Index.name,
body=mapping
)
def delete():
Index(APIDoc.Index.name).delete()
def reset():
if exists():
delete()
setup()
def refresh():
index = Index(APIDoc.Index.name)
index.refresh()
<commit_msg>Allow setup function to update dynamic mapping<commit_after>import json
import os
from elasticsearch import Elasticsearch
from elasticsearch_dsl import Index
from model import APIDoc
def exists():
return Index(APIDoc.Index.name).exists()
def setup():
"""
Setup Elasticsearch Index with dynamic template.
Run it on an open index to update dynamic mapping.
"""
_dirname = os.path.dirname(__file__)
with open(os.path.join(_dirname, 'mapping.json'), 'r') as file:
mapping = json.load(file)
if not exists():
APIDoc.init()
elastic = Elasticsearch()
elastic.indices.put_mapping(
index=APIDoc.Index.name,
body=mapping
)
def delete():
Index(APIDoc.Index.name).delete()
def reset():
if exists():
delete()
setup()
def refresh():
index = Index(APIDoc.Index.name)
index.refresh()
|
3a8ff4ce62c2a0f3e7ebc61284894fc69ec36b79 | django_sqs/message.py | django_sqs/message.py | import base64
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
import django.utils.simplejson as json
import boto.sqs.message
from django.contrib.contenttypes.models import ContentType
class ModelInstanceMessage(boto.sqs.message.RawMessage):
"""SQS Message class that returns
"""
def __init__(self, queue=None, instance=None):
boto.sqs.message.RawMessage.__init__(
self, queue=queue, body=instance)
def encode(self, value):
ct = ContentType.objects.get_for_model(value)
return base64.b64encode(
json.dumps(
(ct.app_label, ct.model, value.pk)))
def decode(self, value):
app_label, model, pk = json.loads(base64.b64decode(value))
ct = ContentType.objects.get(app_label=app_label, model=model)
return ct.get_object_for_this_type(pk=pk)
def get_instance(self):
return self.get_body()
| import base64
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
import django.utils.simplejson as json
import boto.sqs.message
from django.contrib.contenttypes.models import ContentType
class ModelInstanceMessage(boto.sqs.message.RawMessage):
"""SQS Message class that returns
"""
def __init__(self, queue=None, instance=None):
boto.sqs.message.RawMessage.__init__(
self, queue=queue, body=instance)
def encode(self, value):
ct = ContentType.objects.get_for_model(value)
return base64.b64encode(
json.dumps(
(ct.app_label, ct.model, value.pk)))
def decode(self, value):
try:
app_label, model, pk = json.loads(base64.b64decode(value))
except Exception, e:
self.__reason = "Error decoding payload: %s" % e
return None
try:
ct = ContentType.objects.get(app_label=app_label, model=model)
except ContentType.DoesNotExist:
self.__reason = "Invalid content type."
return None
cls = ct.model_class()
try:
return cls.objects.get(pk=pk)
except cls.DoesNotExist:
self.__reason = "%s.%s %r does not exist" % (
cls.__module__, cls.__name__, pk)
return None
def get_body(self):
rv = boto.sqs.message.RawMessage.get_body(self)
if rv is not None:
return rv
raise ValueError(self.__reason)
def get_instance(self):
return self.get_body()
| Raise ValueError on get_body instead of random exception when initializing ModelInstanceMessage. | Raise ValueError on get_body instead of random exception when initializing ModelInstanceMessage.
| Python | bsd-3-clause | mpasternacki/django-sqs | import base64
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
import django.utils.simplejson as json
import boto.sqs.message
from django.contrib.contenttypes.models import ContentType
class ModelInstanceMessage(boto.sqs.message.RawMessage):
"""SQS Message class that returns
"""
def __init__(self, queue=None, instance=None):
boto.sqs.message.RawMessage.__init__(
self, queue=queue, body=instance)
def encode(self, value):
ct = ContentType.objects.get_for_model(value)
return base64.b64encode(
json.dumps(
(ct.app_label, ct.model, value.pk)))
def decode(self, value):
app_label, model, pk = json.loads(base64.b64decode(value))
ct = ContentType.objects.get(app_label=app_label, model=model)
return ct.get_object_for_this_type(pk=pk)
def get_instance(self):
return self.get_body()
Raise ValueError on get_body instead of random exception when initializing ModelInstanceMessage. | import base64
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
import django.utils.simplejson as json
import boto.sqs.message
from django.contrib.contenttypes.models import ContentType
class ModelInstanceMessage(boto.sqs.message.RawMessage):
"""SQS Message class that returns
"""
def __init__(self, queue=None, instance=None):
boto.sqs.message.RawMessage.__init__(
self, queue=queue, body=instance)
def encode(self, value):
ct = ContentType.objects.get_for_model(value)
return base64.b64encode(
json.dumps(
(ct.app_label, ct.model, value.pk)))
def decode(self, value):
try:
app_label, model, pk = json.loads(base64.b64decode(value))
except Exception, e:
self.__reason = "Error decoding payload: %s" % e
return None
try:
ct = ContentType.objects.get(app_label=app_label, model=model)
except ContentType.DoesNotExist:
self.__reason = "Invalid content type."
return None
cls = ct.model_class()
try:
return cls.objects.get(pk=pk)
except cls.DoesNotExist:
self.__reason = "%s.%s %r does not exist" % (
cls.__module__, cls.__name__, pk)
return None
def get_body(self):
rv = boto.sqs.message.RawMessage.get_body(self)
if rv is not None:
return rv
raise ValueError(self.__reason)
def get_instance(self):
return self.get_body()
| <commit_before>import base64
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
import django.utils.simplejson as json
import boto.sqs.message
from django.contrib.contenttypes.models import ContentType
class ModelInstanceMessage(boto.sqs.message.RawMessage):
"""SQS Message class that returns
"""
def __init__(self, queue=None, instance=None):
boto.sqs.message.RawMessage.__init__(
self, queue=queue, body=instance)
def encode(self, value):
ct = ContentType.objects.get_for_model(value)
return base64.b64encode(
json.dumps(
(ct.app_label, ct.model, value.pk)))
def decode(self, value):
app_label, model, pk = json.loads(base64.b64decode(value))
ct = ContentType.objects.get(app_label=app_label, model=model)
return ct.get_object_for_this_type(pk=pk)
def get_instance(self):
return self.get_body()
<commit_msg>Raise ValueError on get_body instead of random exception when initializing ModelInstanceMessage.<commit_after> | import base64
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
import django.utils.simplejson as json
import boto.sqs.message
from django.contrib.contenttypes.models import ContentType
class ModelInstanceMessage(boto.sqs.message.RawMessage):
"""SQS Message class that returns
"""
def __init__(self, queue=None, instance=None):
boto.sqs.message.RawMessage.__init__(
self, queue=queue, body=instance)
def encode(self, value):
ct = ContentType.objects.get_for_model(value)
return base64.b64encode(
json.dumps(
(ct.app_label, ct.model, value.pk)))
def decode(self, value):
try:
app_label, model, pk = json.loads(base64.b64decode(value))
except Exception, e:
self.__reason = "Error decoding payload: %s" % e
return None
try:
ct = ContentType.objects.get(app_label=app_label, model=model)
except ContentType.DoesNotExist:
self.__reason = "Invalid content type."
return None
cls = ct.model_class()
try:
return cls.objects.get(pk=pk)
except cls.DoesNotExist:
self.__reason = "%s.%s %r does not exist" % (
cls.__module__, cls.__name__, pk)
return None
def get_body(self):
rv = boto.sqs.message.RawMessage.get_body(self)
if rv is not None:
return rv
raise ValueError(self.__reason)
def get_instance(self):
return self.get_body()
| import base64
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
import django.utils.simplejson as json
import boto.sqs.message
from django.contrib.contenttypes.models import ContentType
class ModelInstanceMessage(boto.sqs.message.RawMessage):
"""SQS Message class that returns
"""
def __init__(self, queue=None, instance=None):
boto.sqs.message.RawMessage.__init__(
self, queue=queue, body=instance)
def encode(self, value):
ct = ContentType.objects.get_for_model(value)
return base64.b64encode(
json.dumps(
(ct.app_label, ct.model, value.pk)))
def decode(self, value):
app_label, model, pk = json.loads(base64.b64decode(value))
ct = ContentType.objects.get(app_label=app_label, model=model)
return ct.get_object_for_this_type(pk=pk)
def get_instance(self):
return self.get_body()
Raise ValueError on get_body instead of random exception when initializing ModelInstanceMessage.import base64
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
import django.utils.simplejson as json
import boto.sqs.message
from django.contrib.contenttypes.models import ContentType
class ModelInstanceMessage(boto.sqs.message.RawMessage):
"""SQS Message class that returns
"""
def __init__(self, queue=None, instance=None):
boto.sqs.message.RawMessage.__init__(
self, queue=queue, body=instance)
def encode(self, value):
ct = ContentType.objects.get_for_model(value)
return base64.b64encode(
json.dumps(
(ct.app_label, ct.model, value.pk)))
def decode(self, value):
try:
app_label, model, pk = json.loads(base64.b64decode(value))
except Exception, e:
self.__reason = "Error decoding payload: %s" % e
return None
try:
ct = ContentType.objects.get(app_label=app_label, model=model)
except ContentType.DoesNotExist:
self.__reason = "Invalid content type."
return None
cls = ct.model_class()
try:
return cls.objects.get(pk=pk)
except cls.DoesNotExist:
self.__reason = "%s.%s %r does not exist" % (
cls.__module__, cls.__name__, pk)
return None
def get_body(self):
rv = boto.sqs.message.RawMessage.get_body(self)
if rv is not None:
return rv
raise ValueError(self.__reason)
def get_instance(self):
return self.get_body()
| <commit_before>import base64
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
import django.utils.simplejson as json
import boto.sqs.message
from django.contrib.contenttypes.models import ContentType
class ModelInstanceMessage(boto.sqs.message.RawMessage):
"""SQS Message class that returns
"""
def __init__(self, queue=None, instance=None):
boto.sqs.message.RawMessage.__init__(
self, queue=queue, body=instance)
def encode(self, value):
ct = ContentType.objects.get_for_model(value)
return base64.b64encode(
json.dumps(
(ct.app_label, ct.model, value.pk)))
def decode(self, value):
app_label, model, pk = json.loads(base64.b64decode(value))
ct = ContentType.objects.get(app_label=app_label, model=model)
return ct.get_object_for_this_type(pk=pk)
def get_instance(self):
return self.get_body()
<commit_msg>Raise ValueError on get_body instead of random exception when initializing ModelInstanceMessage.<commit_after>import base64
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
import django.utils.simplejson as json
import boto.sqs.message
from django.contrib.contenttypes.models import ContentType
class ModelInstanceMessage(boto.sqs.message.RawMessage):
"""SQS Message class that returns
"""
def __init__(self, queue=None, instance=None):
boto.sqs.message.RawMessage.__init__(
self, queue=queue, body=instance)
def encode(self, value):
ct = ContentType.objects.get_for_model(value)
return base64.b64encode(
json.dumps(
(ct.app_label, ct.model, value.pk)))
def decode(self, value):
try:
app_label, model, pk = json.loads(base64.b64decode(value))
except Exception, e:
self.__reason = "Error decoding payload: %s" % e
return None
try:
ct = ContentType.objects.get(app_label=app_label, model=model)
except ContentType.DoesNotExist:
self.__reason = "Invalid content type."
return None
cls = ct.model_class()
try:
return cls.objects.get(pk=pk)
except cls.DoesNotExist:
self.__reason = "%s.%s %r does not exist" % (
cls.__module__, cls.__name__, pk)
return None
def get_body(self):
rv = boto.sqs.message.RawMessage.get_body(self)
if rv is not None:
return rv
raise ValueError(self.__reason)
def get_instance(self):
return self.get_body()
|
9b0618d3b52c74bf2abd65a581807087cbaa2ca4 | grammpy_transforms/NongeneratingSymbolsRemove/nongeneratingSymbolsRemove.py | grammpy_transforms/NongeneratingSymbolsRemove/nongeneratingSymbolsRemove.py | #!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy-transforms
"""
from grammpy import Grammar
def _copy_grammar(grammar):
return Grammar(terminals=(item.s for item in grammar.terms()),
nonterminals=grammar.nonterms(),
rules=grammar.rules(),
start_symbol=grammar.start_get())
def remove_nongenerating_symbol(grammar: Grammar, transform_grammar=False) -> Grammar:
if transform_grammar is False:
grammar = _copy_grammar(grammar)
generates = set(item.s for item in grammar.terms())
while True:
additional = generates.copy()
for rule in grammar.rules():
rightPart = rule.right
allIn = True
for symbol in rightPart:
if symbol not in generates:
allIn = False
if not allIn:
continue
for symbol in rule.left:
additional.add(symbol)
if additional == generates:
break
generates = additional
allNonterms = list(grammar.nonterms())
for nonterm in allNonterms:
if nonterm not in generates:
grammar.remove_nonterm(nonterm)
return grammar
| #!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy-transforms
"""
from copy import copy
from grammpy import Grammar
def _copy_grammar(grammar):
return copy(grammar)
def remove_nongenerating_symbol(grammar: Grammar, transform_grammar=False) -> Grammar:
if transform_grammar is False:
grammar = _copy_grammar(grammar)
generates = set(item.s for item in grammar.terms())
while True:
additional = generates.copy()
for rule in grammar.rules():
rightPart = rule.right
allIn = True
for symbol in rightPart:
if symbol not in generates:
allIn = False
if not allIn:
continue
for symbol in rule.left:
additional.add(symbol)
if additional == generates:
break
generates = additional
allNonterms = list(grammar.nonterms())
for nonterm in allNonterms:
if nonterm not in generates:
grammar.remove_nonterm(nonterm)
return grammar
| Switch to new version of grammpy (1.1.2) and use copy method | Switch to new version of grammpy (1.1.2) and use copy method
| Python | mit | PatrikValkovic/grammpy | #!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy-transforms
"""
from grammpy import Grammar
def _copy_grammar(grammar):
return Grammar(terminals=(item.s for item in grammar.terms()),
nonterminals=grammar.nonterms(),
rules=grammar.rules(),
start_symbol=grammar.start_get())
def remove_nongenerating_symbol(grammar: Grammar, transform_grammar=False) -> Grammar:
if transform_grammar is False:
grammar = _copy_grammar(grammar)
generates = set(item.s for item in grammar.terms())
while True:
additional = generates.copy()
for rule in grammar.rules():
rightPart = rule.right
allIn = True
for symbol in rightPart:
if symbol not in generates:
allIn = False
if not allIn:
continue
for symbol in rule.left:
additional.add(symbol)
if additional == generates:
break
generates = additional
allNonterms = list(grammar.nonterms())
for nonterm in allNonterms:
if nonterm not in generates:
grammar.remove_nonterm(nonterm)
return grammar
Switch to new version of grammpy (1.1.2) and use copy method | #!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy-transforms
"""
from copy import copy
from grammpy import Grammar
def _copy_grammar(grammar):
return copy(grammar)
def remove_nongenerating_symbol(grammar: Grammar, transform_grammar=False) -> Grammar:
if transform_grammar is False:
grammar = _copy_grammar(grammar)
generates = set(item.s for item in grammar.terms())
while True:
additional = generates.copy()
for rule in grammar.rules():
rightPart = rule.right
allIn = True
for symbol in rightPart:
if symbol not in generates:
allIn = False
if not allIn:
continue
for symbol in rule.left:
additional.add(symbol)
if additional == generates:
break
generates = additional
allNonterms = list(grammar.nonterms())
for nonterm in allNonterms:
if nonterm not in generates:
grammar.remove_nonterm(nonterm)
return grammar
| <commit_before>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy-transforms
"""
from grammpy import Grammar
def _copy_grammar(grammar):
return Grammar(terminals=(item.s for item in grammar.terms()),
nonterminals=grammar.nonterms(),
rules=grammar.rules(),
start_symbol=grammar.start_get())
def remove_nongenerating_symbol(grammar: Grammar, transform_grammar=False) -> Grammar:
if transform_grammar is False:
grammar = _copy_grammar(grammar)
generates = set(item.s for item in grammar.terms())
while True:
additional = generates.copy()
for rule in grammar.rules():
rightPart = rule.right
allIn = True
for symbol in rightPart:
if symbol not in generates:
allIn = False
if not allIn:
continue
for symbol in rule.left:
additional.add(symbol)
if additional == generates:
break
generates = additional
allNonterms = list(grammar.nonterms())
for nonterm in allNonterms:
if nonterm not in generates:
grammar.remove_nonterm(nonterm)
return grammar
<commit_msg>Switch to new version of grammpy (1.1.2) and use copy method<commit_after> | #!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy-transforms
"""
from copy import copy
from grammpy import Grammar
def _copy_grammar(grammar):
return copy(grammar)
def remove_nongenerating_symbol(grammar: Grammar, transform_grammar=False) -> Grammar:
if transform_grammar is False:
grammar = _copy_grammar(grammar)
generates = set(item.s for item in grammar.terms())
while True:
additional = generates.copy()
for rule in grammar.rules():
rightPart = rule.right
allIn = True
for symbol in rightPart:
if symbol not in generates:
allIn = False
if not allIn:
continue
for symbol in rule.left:
additional.add(symbol)
if additional == generates:
break
generates = additional
allNonterms = list(grammar.nonterms())
for nonterm in allNonterms:
if nonterm not in generates:
grammar.remove_nonterm(nonterm)
return grammar
| #!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy-transforms
"""
from grammpy import Grammar
def _copy_grammar(grammar):
return Grammar(terminals=(item.s for item in grammar.terms()),
nonterminals=grammar.nonterms(),
rules=grammar.rules(),
start_symbol=grammar.start_get())
def remove_nongenerating_symbol(grammar: Grammar, transform_grammar=False) -> Grammar:
if transform_grammar is False:
grammar = _copy_grammar(grammar)
generates = set(item.s for item in grammar.terms())
while True:
additional = generates.copy()
for rule in grammar.rules():
rightPart = rule.right
allIn = True
for symbol in rightPart:
if symbol not in generates:
allIn = False
if not allIn:
continue
for symbol in rule.left:
additional.add(symbol)
if additional == generates:
break
generates = additional
allNonterms = list(grammar.nonterms())
for nonterm in allNonterms:
if nonterm not in generates:
grammar.remove_nonterm(nonterm)
return grammar
Switch to new version of grammpy (1.1.2) and use copy method#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy-transforms
"""
from copy import copy
from grammpy import Grammar
def _copy_grammar(grammar):
return copy(grammar)
def remove_nongenerating_symbol(grammar: Grammar, transform_grammar=False) -> Grammar:
if transform_grammar is False:
grammar = _copy_grammar(grammar)
generates = set(item.s for item in grammar.terms())
while True:
additional = generates.copy()
for rule in grammar.rules():
rightPart = rule.right
allIn = True
for symbol in rightPart:
if symbol not in generates:
allIn = False
if not allIn:
continue
for symbol in rule.left:
additional.add(symbol)
if additional == generates:
break
generates = additional
allNonterms = list(grammar.nonterms())
for nonterm in allNonterms:
if nonterm not in generates:
grammar.remove_nonterm(nonterm)
return grammar
| <commit_before>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy-transforms
"""
from grammpy import Grammar
def _copy_grammar(grammar):
return Grammar(terminals=(item.s for item in grammar.terms()),
nonterminals=grammar.nonterms(),
rules=grammar.rules(),
start_symbol=grammar.start_get())
def remove_nongenerating_symbol(grammar: Grammar, transform_grammar=False) -> Grammar:
if transform_grammar is False:
grammar = _copy_grammar(grammar)
generates = set(item.s for item in grammar.terms())
while True:
additional = generates.copy()
for rule in grammar.rules():
rightPart = rule.right
allIn = True
for symbol in rightPart:
if symbol not in generates:
allIn = False
if not allIn:
continue
for symbol in rule.left:
additional.add(symbol)
if additional == generates:
break
generates = additional
allNonterms = list(grammar.nonterms())
for nonterm in allNonterms:
if nonterm not in generates:
grammar.remove_nonterm(nonterm)
return grammar
<commit_msg>Switch to new version of grammpy (1.1.2) and use copy method<commit_after>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy-transforms
"""
from copy import copy
from grammpy import Grammar
def _copy_grammar(grammar):
return copy(grammar)
def remove_nongenerating_symbol(grammar: Grammar, transform_grammar=False) -> Grammar:
if transform_grammar is False:
grammar = _copy_grammar(grammar)
generates = set(item.s for item in grammar.terms())
while True:
additional = generates.copy()
for rule in grammar.rules():
rightPart = rule.right
allIn = True
for symbol in rightPart:
if symbol not in generates:
allIn = False
if not allIn:
continue
for symbol in rule.left:
additional.add(symbol)
if additional == generates:
break
generates = additional
allNonterms = list(grammar.nonterms())
for nonterm in allNonterms:
if nonterm not in generates:
grammar.remove_nonterm(nonterm)
return grammar
|
50dce60963c6817eb0dded8c4fc23047e7b54d6e | runtests.py | runtests.py | #!/usr/bin/env python
import glob
import logging
import os
import sys
import unittest
from trace import fullmodname
try:
from tests.utils import cleanup
except:
def cleanup():
pass
sys.path.insert(0, os.getcwd())
verbosity = 1
if "-q" in sys.argv or '--quiet' in sys.argv:
verbosity = 0
if "-v" in sys.argv or '--verbose' in sys.argv:
verbosity = 2
if verbosity == 0:
logging.disable(logging.CRITICAL)
elif verbosity == 1:
logging.disable(logging.ERROR)
elif verbosity == 2:
logging.basicConfig(level=logging.DEBUG)
def build_suite(folder):
# find all of the test modules
modules = map(fullmodname, glob.glob(os.path.join(folder, 'test_*.py')))
print "Running the tests found in the following modules:"
print modules
# load all of the tests into a suite
try:
return unittest.TestLoader().loadTestsFromNames(modules)
except Exception, exception:
# attempt to produce a more specific message
for module in modules:
__import__(module)
raise
unit_tests = build_suite('tests')
functional_tests = build_suite('functional_tests')
# run test suites
unittest.TextTestRunner(verbosity=verbosity).run(unit_tests)
unittest.TextTestRunner(verbosity=verbosity).run(functional_tests)
cleanup()
| #!/usr/bin/env python
import glob
import logging
import os
import sys
import unittest
from trace import fullmodname
try:
from tests.utils import cleanup
except:
def cleanup():
pass
sys.path.insert(0, os.getcwd())
verbosity = 1
if "-q" in sys.argv or '--quiet' in sys.argv:
verbosity = 0
if "-v" in sys.argv or '--verbose' in sys.argv:
verbosity = 2
if verbosity == 0:
logging.disable(logging.CRITICAL)
elif verbosity == 1:
logging.disable(logging.ERROR)
elif verbosity == 2:
logging.basicConfig(level=logging.DEBUG)
def build_suite(folder):
# find all of the test modules
modules = map(fullmodname, glob.glob(os.path.join(folder, 'test_*.py')))
print "Running the tests found in the following modules:"
print modules
# load all of the tests into a suite
try:
return unittest.TestLoader().loadTestsFromNames(modules)
except Exception, exception:
# attempt to produce a more specific message
for module in modules:
__import__(module)
raise
# build and run unit test suite
unit_tests = build_suite('tests')
unittest.TextTestRunner(verbosity=verbosity).run(unit_tests)
cleanup()
# build and run functional test suite
functional_tests = build_suite('functional_tests')
unittest.TextTestRunner(verbosity=verbosity).run(functional_tests)
cleanup()
| Split out running unit and functional tests | Split out running unit and functional tests
| Python | apache-2.0 | google/oauth2client,clancychilds/oauth2client,google/oauth2client,clancychilds/oauth2client,googleapis/google-api-python-client,googleapis/oauth2client,googleapis/oauth2client,jonparrott/oauth2client,jonparrott/oauth2client,googleapis/google-api-python-client | #!/usr/bin/env python
import glob
import logging
import os
import sys
import unittest
from trace import fullmodname
try:
from tests.utils import cleanup
except:
def cleanup():
pass
sys.path.insert(0, os.getcwd())
verbosity = 1
if "-q" in sys.argv or '--quiet' in sys.argv:
verbosity = 0
if "-v" in sys.argv or '--verbose' in sys.argv:
verbosity = 2
if verbosity == 0:
logging.disable(logging.CRITICAL)
elif verbosity == 1:
logging.disable(logging.ERROR)
elif verbosity == 2:
logging.basicConfig(level=logging.DEBUG)
def build_suite(folder):
# find all of the test modules
modules = map(fullmodname, glob.glob(os.path.join(folder, 'test_*.py')))
print "Running the tests found in the following modules:"
print modules
# load all of the tests into a suite
try:
return unittest.TestLoader().loadTestsFromNames(modules)
except Exception, exception:
# attempt to produce a more specific message
for module in modules:
__import__(module)
raise
unit_tests = build_suite('tests')
functional_tests = build_suite('functional_tests')
# run test suites
unittest.TextTestRunner(verbosity=verbosity).run(unit_tests)
unittest.TextTestRunner(verbosity=verbosity).run(functional_tests)
cleanup()
Split out running unit and functional tests | #!/usr/bin/env python
import glob
import logging
import os
import sys
import unittest
from trace import fullmodname
try:
from tests.utils import cleanup
except:
def cleanup():
pass
sys.path.insert(0, os.getcwd())
verbosity = 1
if "-q" in sys.argv or '--quiet' in sys.argv:
verbosity = 0
if "-v" in sys.argv or '--verbose' in sys.argv:
verbosity = 2
if verbosity == 0:
logging.disable(logging.CRITICAL)
elif verbosity == 1:
logging.disable(logging.ERROR)
elif verbosity == 2:
logging.basicConfig(level=logging.DEBUG)
def build_suite(folder):
# find all of the test modules
modules = map(fullmodname, glob.glob(os.path.join(folder, 'test_*.py')))
print "Running the tests found in the following modules:"
print modules
# load all of the tests into a suite
try:
return unittest.TestLoader().loadTestsFromNames(modules)
except Exception, exception:
# attempt to produce a more specific message
for module in modules:
__import__(module)
raise
# build and run unit test suite
unit_tests = build_suite('tests')
unittest.TextTestRunner(verbosity=verbosity).run(unit_tests)
cleanup()
# build and run functional test suite
functional_tests = build_suite('functional_tests')
unittest.TextTestRunner(verbosity=verbosity).run(functional_tests)
cleanup()
| <commit_before>#!/usr/bin/env python
import glob
import logging
import os
import sys
import unittest
from trace import fullmodname
try:
from tests.utils import cleanup
except:
def cleanup():
pass
sys.path.insert(0, os.getcwd())
verbosity = 1
if "-q" in sys.argv or '--quiet' in sys.argv:
verbosity = 0
if "-v" in sys.argv or '--verbose' in sys.argv:
verbosity = 2
if verbosity == 0:
logging.disable(logging.CRITICAL)
elif verbosity == 1:
logging.disable(logging.ERROR)
elif verbosity == 2:
logging.basicConfig(level=logging.DEBUG)
def build_suite(folder):
# find all of the test modules
modules = map(fullmodname, glob.glob(os.path.join(folder, 'test_*.py')))
print "Running the tests found in the following modules:"
print modules
# load all of the tests into a suite
try:
return unittest.TestLoader().loadTestsFromNames(modules)
except Exception, exception:
# attempt to produce a more specific message
for module in modules:
__import__(module)
raise
unit_tests = build_suite('tests')
functional_tests = build_suite('functional_tests')
# run test suites
unittest.TextTestRunner(verbosity=verbosity).run(unit_tests)
unittest.TextTestRunner(verbosity=verbosity).run(functional_tests)
cleanup()
<commit_msg>Split out running unit and functional tests<commit_after> | #!/usr/bin/env python
import glob
import logging
import os
import sys
import unittest
from trace import fullmodname
try:
from tests.utils import cleanup
except:
def cleanup():
pass
sys.path.insert(0, os.getcwd())
verbosity = 1
if "-q" in sys.argv or '--quiet' in sys.argv:
verbosity = 0
if "-v" in sys.argv or '--verbose' in sys.argv:
verbosity = 2
if verbosity == 0:
logging.disable(logging.CRITICAL)
elif verbosity == 1:
logging.disable(logging.ERROR)
elif verbosity == 2:
logging.basicConfig(level=logging.DEBUG)
def build_suite(folder):
# find all of the test modules
modules = map(fullmodname, glob.glob(os.path.join(folder, 'test_*.py')))
print "Running the tests found in the following modules:"
print modules
# load all of the tests into a suite
try:
return unittest.TestLoader().loadTestsFromNames(modules)
except Exception, exception:
# attempt to produce a more specific message
for module in modules:
__import__(module)
raise
# build and run unit test suite
unit_tests = build_suite('tests')
unittest.TextTestRunner(verbosity=verbosity).run(unit_tests)
cleanup()
# build and run functional test suite
functional_tests = build_suite('functional_tests')
unittest.TextTestRunner(verbosity=verbosity).run(functional_tests)
cleanup()
| #!/usr/bin/env python
import glob
import logging
import os
import sys
import unittest
from trace import fullmodname
try:
from tests.utils import cleanup
except:
def cleanup():
pass
sys.path.insert(0, os.getcwd())
verbosity = 1
if "-q" in sys.argv or '--quiet' in sys.argv:
verbosity = 0
if "-v" in sys.argv or '--verbose' in sys.argv:
verbosity = 2
if verbosity == 0:
logging.disable(logging.CRITICAL)
elif verbosity == 1:
logging.disable(logging.ERROR)
elif verbosity == 2:
logging.basicConfig(level=logging.DEBUG)
def build_suite(folder):
# find all of the test modules
modules = map(fullmodname, glob.glob(os.path.join(folder, 'test_*.py')))
print "Running the tests found in the following modules:"
print modules
# load all of the tests into a suite
try:
return unittest.TestLoader().loadTestsFromNames(modules)
except Exception, exception:
# attempt to produce a more specific message
for module in modules:
__import__(module)
raise
unit_tests = build_suite('tests')
functional_tests = build_suite('functional_tests')
# run test suites
unittest.TextTestRunner(verbosity=verbosity).run(unit_tests)
unittest.TextTestRunner(verbosity=verbosity).run(functional_tests)
cleanup()
Split out running unit and functional tests#!/usr/bin/env python
import glob
import logging
import os
import sys
import unittest
from trace import fullmodname
try:
from tests.utils import cleanup
except:
def cleanup():
pass
sys.path.insert(0, os.getcwd())
verbosity = 1
if "-q" in sys.argv or '--quiet' in sys.argv:
verbosity = 0
if "-v" in sys.argv or '--verbose' in sys.argv:
verbosity = 2
if verbosity == 0:
logging.disable(logging.CRITICAL)
elif verbosity == 1:
logging.disable(logging.ERROR)
elif verbosity == 2:
logging.basicConfig(level=logging.DEBUG)
def build_suite(folder):
# find all of the test modules
modules = map(fullmodname, glob.glob(os.path.join(folder, 'test_*.py')))
print "Running the tests found in the following modules:"
print modules
# load all of the tests into a suite
try:
return unittest.TestLoader().loadTestsFromNames(modules)
except Exception, exception:
# attempt to produce a more specific message
for module in modules:
__import__(module)
raise
# build and run unit test suite
unit_tests = build_suite('tests')
unittest.TextTestRunner(verbosity=verbosity).run(unit_tests)
cleanup()
# build and run functional test suite
functional_tests = build_suite('functional_tests')
unittest.TextTestRunner(verbosity=verbosity).run(functional_tests)
cleanup()
| <commit_before>#!/usr/bin/env python
import glob
import logging
import os
import sys
import unittest
from trace import fullmodname
try:
from tests.utils import cleanup
except:
def cleanup():
pass
sys.path.insert(0, os.getcwd())
verbosity = 1
if "-q" in sys.argv or '--quiet' in sys.argv:
verbosity = 0
if "-v" in sys.argv or '--verbose' in sys.argv:
verbosity = 2
if verbosity == 0:
logging.disable(logging.CRITICAL)
elif verbosity == 1:
logging.disable(logging.ERROR)
elif verbosity == 2:
logging.basicConfig(level=logging.DEBUG)
def build_suite(folder):
# find all of the test modules
modules = map(fullmodname, glob.glob(os.path.join(folder, 'test_*.py')))
print "Running the tests found in the following modules:"
print modules
# load all of the tests into a suite
try:
return unittest.TestLoader().loadTestsFromNames(modules)
except Exception, exception:
# attempt to produce a more specific message
for module in modules:
__import__(module)
raise
unit_tests = build_suite('tests')
functional_tests = build_suite('functional_tests')
# run test suites
unittest.TextTestRunner(verbosity=verbosity).run(unit_tests)
unittest.TextTestRunner(verbosity=verbosity).run(functional_tests)
cleanup()
<commit_msg>Split out running unit and functional tests<commit_after>#!/usr/bin/env python
import glob
import logging
import os
import sys
import unittest
from trace import fullmodname
try:
from tests.utils import cleanup
except:
def cleanup():
pass
sys.path.insert(0, os.getcwd())
verbosity = 1
if "-q" in sys.argv or '--quiet' in sys.argv:
verbosity = 0
if "-v" in sys.argv or '--verbose' in sys.argv:
verbosity = 2
if verbosity == 0:
logging.disable(logging.CRITICAL)
elif verbosity == 1:
logging.disable(logging.ERROR)
elif verbosity == 2:
logging.basicConfig(level=logging.DEBUG)
def build_suite(folder):
# find all of the test modules
modules = map(fullmodname, glob.glob(os.path.join(folder, 'test_*.py')))
print "Running the tests found in the following modules:"
print modules
# load all of the tests into a suite
try:
return unittest.TestLoader().loadTestsFromNames(modules)
except Exception, exception:
# attempt to produce a more specific message
for module in modules:
__import__(module)
raise
# build and run unit test suite
unit_tests = build_suite('tests')
unittest.TextTestRunner(verbosity=verbosity).run(unit_tests)
cleanup()
# build and run functional test suite
functional_tests = build_suite('functional_tests')
unittest.TextTestRunner(verbosity=verbosity).run(functional_tests)
cleanup()
|
0f21ef4fe5a1e95668f5fdbeda4d8a37da65484f | trombi/__init__.py | trombi/__init__.py | # Copyright (c) 2010 Inoi Oy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from .client import *
| # Copyright (c) 2010 Inoi Oy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
version = (0, 9, 0)
from .client import *
| Add version information under trombi module | Add version information under trombi module
| Python | mit | inoi/trombi | # Copyright (c) 2010 Inoi Oy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from .client import *
Add version information under trombi module | # Copyright (c) 2010 Inoi Oy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
version = (0, 9, 0)
from .client import *
| <commit_before># Copyright (c) 2010 Inoi Oy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from .client import *
<commit_msg>Add version information under trombi module<commit_after> | # Copyright (c) 2010 Inoi Oy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
version = (0, 9, 0)
from .client import *
| # Copyright (c) 2010 Inoi Oy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from .client import *
Add version information under trombi module# Copyright (c) 2010 Inoi Oy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
version = (0, 9, 0)
from .client import *
| <commit_before># Copyright (c) 2010 Inoi Oy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from .client import *
<commit_msg>Add version information under trombi module<commit_after># Copyright (c) 2010 Inoi Oy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
version = (0, 9, 0)
from .client import *
|
4c11a3c8f0cd82ebee3269e76450562aa8d2b8c3 | troposphere/sns.py | troposphere/sns.py | # Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
try:
from awacs.aws import Policy
policytypes = (dict, Policy)
except ImportError:
policytypes = dict,
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
'TopicArn': (basestring, True),
'FilterPolicy': (dict, False),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'Subscription': ([Subscription], False),
'TopicName': (basestring, False),
}
| # Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
try:
from awacs.aws import Policy
policytypes = (dict, Policy)
except ImportError:
policytypes = dict,
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'Subscription': ([Subscription], False),
'TopicName': (basestring, False),
}
| Add missing properties to SNS::Subscription | Add missing properties to SNS::Subscription
| Python | bsd-2-clause | johnctitus/troposphere,ikben/troposphere,cloudtools/troposphere,johnctitus/troposphere,ikben/troposphere,cloudtools/troposphere,pas256/troposphere,pas256/troposphere | # Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
try:
from awacs.aws import Policy
policytypes = (dict, Policy)
except ImportError:
policytypes = dict,
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
'TopicArn': (basestring, True),
'FilterPolicy': (dict, False),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'Subscription': ([Subscription], False),
'TopicName': (basestring, False),
}
Add missing properties to SNS::Subscription | # Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
try:
from awacs.aws import Policy
policytypes = (dict, Policy)
except ImportError:
policytypes = dict,
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'Subscription': ([Subscription], False),
'TopicName': (basestring, False),
}
| <commit_before># Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
try:
from awacs.aws import Policy
policytypes = (dict, Policy)
except ImportError:
policytypes = dict,
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
'TopicArn': (basestring, True),
'FilterPolicy': (dict, False),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'Subscription': ([Subscription], False),
'TopicName': (basestring, False),
}
<commit_msg>Add missing properties to SNS::Subscription<commit_after> | # Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
try:
from awacs.aws import Policy
policytypes = (dict, Policy)
except ImportError:
policytypes = dict,
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'Subscription': ([Subscription], False),
'TopicName': (basestring, False),
}
| # Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
try:
from awacs.aws import Policy
policytypes = (dict, Policy)
except ImportError:
policytypes = dict,
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
'TopicArn': (basestring, True),
'FilterPolicy': (dict, False),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'Subscription': ([Subscription], False),
'TopicName': (basestring, False),
}
Add missing properties to SNS::Subscription# Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
try:
from awacs.aws import Policy
policytypes = (dict, Policy)
except ImportError:
policytypes = dict,
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'Subscription': ([Subscription], False),
'TopicName': (basestring, False),
}
| <commit_before># Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
try:
from awacs.aws import Policy
policytypes = (dict, Policy)
except ImportError:
policytypes = dict,
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
'TopicArn': (basestring, True),
'FilterPolicy': (dict, False),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'Subscription': ([Subscription], False),
'TopicName': (basestring, False),
}
<commit_msg>Add missing properties to SNS::Subscription<commit_after># Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
try:
from awacs.aws import Policy
policytypes = (dict, Policy)
except ImportError:
policytypes = dict,
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'Subscription': ([Subscription], False),
'TopicName': (basestring, False),
}
|
6038bcd507c43eb86e04c6a32abf9b8249c8872e | tests/server/handlers/test_zip.py | tests/server/handlers/test_zip.py | import asyncio
import io
import zipfile
from unittest import mock
from tornado import testing
from waterbutler.core import streams
from tests import utils
class TestZipHandler(utils.HandlerTestCase):
def setUp(self):
super().setUp()
identity_future = asyncio.Future()
identity_future.set_result({
'auth': {},
'credentials': {},
'settings': {},
})
self.mock_identity = mock.Mock()
self.mock_identity.return_value = identity_future
self.identity_patcher = mock.patch('waterbutler.server.handlers.core.get_identity', self.mock_identity)
self.identity_patcher.start()
def tearDown(self):
super().tearDown()
self.identity_patcher.stop()
@mock.patch('waterbutler.core.utils.make_provider')
@testing.gen_test
def test_download_stream(self, mock_make_provider):
stream = asyncio.StreamReader()
data = b'freddie brian john roger'
stream.feed_data(data)
stream.feed_eof()
stream.size = len(data)
stream.content_type = 'application/octet-stream'
zipstream = streams.ZipStreamReader(('file.txt', stream))
mock_provider = utils.mock_provider_method(mock_make_provider,
'zip',
zipstream)
resp = yield self.http_client.fetch(
self.get_url('/zip?provider=queenhub&path=freddie.png'),
)
zip = zipfile.ZipFile(io.BytesIO(resp.body))
assert zip.testzip() is None
assert zip.open('file.txt').read() == data | import asyncio
import io
import zipfile
from unittest import mock
from tornado import testing
from waterbutler.core import streams
from tests import utils
class TestZipHandler(utils.HandlerTestCase):
@testing.gen_test
def test_download_stream(self):
data = b'freddie brian john roger'
stream = streams.StringStream(data)
stream.content_type = 'application/octet-stream'
zipstream = streams.ZipStreamReader(('file.txt', stream))
self.mock_provider.zip = utils.MockCoroutine(return_value=zipstream)
resp = yield self.http_client.fetch(
self.get_url('/zip?provider=queenhub&path=/freddie.png'),
)
zip = zipfile.ZipFile(io.BytesIO(resp.body))
assert zip.testzip() is None
assert zip.open('file.txt').read() == data
| Remove deprecated test setup and teardown code | Remove deprecated test setup and teardown code
| Python | apache-2.0 | rdhyee/waterbutler,kwierman/waterbutler,hmoco/waterbutler,CenterForOpenScience/waterbutler,cosenal/waterbutler,Ghalko/waterbutler,rafaeldelucena/waterbutler,felliott/waterbutler,icereval/waterbutler,RCOSDP/waterbutler,TomBaxter/waterbutler,chrisseto/waterbutler,Johnetordoff/waterbutler | import asyncio
import io
import zipfile
from unittest import mock
from tornado import testing
from waterbutler.core import streams
from tests import utils
class TestZipHandler(utils.HandlerTestCase):
def setUp(self):
super().setUp()
identity_future = asyncio.Future()
identity_future.set_result({
'auth': {},
'credentials': {},
'settings': {},
})
self.mock_identity = mock.Mock()
self.mock_identity.return_value = identity_future
self.identity_patcher = mock.patch('waterbutler.server.handlers.core.get_identity', self.mock_identity)
self.identity_patcher.start()
def tearDown(self):
super().tearDown()
self.identity_patcher.stop()
@mock.patch('waterbutler.core.utils.make_provider')
@testing.gen_test
def test_download_stream(self, mock_make_provider):
stream = asyncio.StreamReader()
data = b'freddie brian john roger'
stream.feed_data(data)
stream.feed_eof()
stream.size = len(data)
stream.content_type = 'application/octet-stream'
zipstream = streams.ZipStreamReader(('file.txt', stream))
mock_provider = utils.mock_provider_method(mock_make_provider,
'zip',
zipstream)
resp = yield self.http_client.fetch(
self.get_url('/zip?provider=queenhub&path=freddie.png'),
)
zip = zipfile.ZipFile(io.BytesIO(resp.body))
assert zip.testzip() is None
assert zip.open('file.txt').read() == dataRemove deprecated test setup and teardown code | import asyncio
import io
import zipfile
from unittest import mock
from tornado import testing
from waterbutler.core import streams
from tests import utils
class TestZipHandler(utils.HandlerTestCase):
@testing.gen_test
def test_download_stream(self):
data = b'freddie brian john roger'
stream = streams.StringStream(data)
stream.content_type = 'application/octet-stream'
zipstream = streams.ZipStreamReader(('file.txt', stream))
self.mock_provider.zip = utils.MockCoroutine(return_value=zipstream)
resp = yield self.http_client.fetch(
self.get_url('/zip?provider=queenhub&path=/freddie.png'),
)
zip = zipfile.ZipFile(io.BytesIO(resp.body))
assert zip.testzip() is None
assert zip.open('file.txt').read() == data
| <commit_before>import asyncio
import io
import zipfile
from unittest import mock
from tornado import testing
from waterbutler.core import streams
from tests import utils
class TestZipHandler(utils.HandlerTestCase):
def setUp(self):
super().setUp()
identity_future = asyncio.Future()
identity_future.set_result({
'auth': {},
'credentials': {},
'settings': {},
})
self.mock_identity = mock.Mock()
self.mock_identity.return_value = identity_future
self.identity_patcher = mock.patch('waterbutler.server.handlers.core.get_identity', self.mock_identity)
self.identity_patcher.start()
def tearDown(self):
super().tearDown()
self.identity_patcher.stop()
@mock.patch('waterbutler.core.utils.make_provider')
@testing.gen_test
def test_download_stream(self, mock_make_provider):
stream = asyncio.StreamReader()
data = b'freddie brian john roger'
stream.feed_data(data)
stream.feed_eof()
stream.size = len(data)
stream.content_type = 'application/octet-stream'
zipstream = streams.ZipStreamReader(('file.txt', stream))
mock_provider = utils.mock_provider_method(mock_make_provider,
'zip',
zipstream)
resp = yield self.http_client.fetch(
self.get_url('/zip?provider=queenhub&path=freddie.png'),
)
zip = zipfile.ZipFile(io.BytesIO(resp.body))
assert zip.testzip() is None
assert zip.open('file.txt').read() == data<commit_msg>Remove deprecated test setup and teardown code<commit_after> | import asyncio
import io
import zipfile
from unittest import mock
from tornado import testing
from waterbutler.core import streams
from tests import utils
class TestZipHandler(utils.HandlerTestCase):
@testing.gen_test
def test_download_stream(self):
data = b'freddie brian john roger'
stream = streams.StringStream(data)
stream.content_type = 'application/octet-stream'
zipstream = streams.ZipStreamReader(('file.txt', stream))
self.mock_provider.zip = utils.MockCoroutine(return_value=zipstream)
resp = yield self.http_client.fetch(
self.get_url('/zip?provider=queenhub&path=/freddie.png'),
)
zip = zipfile.ZipFile(io.BytesIO(resp.body))
assert zip.testzip() is None
assert zip.open('file.txt').read() == data
| import asyncio
import io
import zipfile
from unittest import mock
from tornado import testing
from waterbutler.core import streams
from tests import utils
class TestZipHandler(utils.HandlerTestCase):
def setUp(self):
super().setUp()
identity_future = asyncio.Future()
identity_future.set_result({
'auth': {},
'credentials': {},
'settings': {},
})
self.mock_identity = mock.Mock()
self.mock_identity.return_value = identity_future
self.identity_patcher = mock.patch('waterbutler.server.handlers.core.get_identity', self.mock_identity)
self.identity_patcher.start()
def tearDown(self):
super().tearDown()
self.identity_patcher.stop()
@mock.patch('waterbutler.core.utils.make_provider')
@testing.gen_test
def test_download_stream(self, mock_make_provider):
stream = asyncio.StreamReader()
data = b'freddie brian john roger'
stream.feed_data(data)
stream.feed_eof()
stream.size = len(data)
stream.content_type = 'application/octet-stream'
zipstream = streams.ZipStreamReader(('file.txt', stream))
mock_provider = utils.mock_provider_method(mock_make_provider,
'zip',
zipstream)
resp = yield self.http_client.fetch(
self.get_url('/zip?provider=queenhub&path=freddie.png'),
)
zip = zipfile.ZipFile(io.BytesIO(resp.body))
assert zip.testzip() is None
assert zip.open('file.txt').read() == dataRemove deprecated test setup and teardown codeimport asyncio
import io
import zipfile
from unittest import mock
from tornado import testing
from waterbutler.core import streams
from tests import utils
class TestZipHandler(utils.HandlerTestCase):
@testing.gen_test
def test_download_stream(self):
data = b'freddie brian john roger'
stream = streams.StringStream(data)
stream.content_type = 'application/octet-stream'
zipstream = streams.ZipStreamReader(('file.txt', stream))
self.mock_provider.zip = utils.MockCoroutine(return_value=zipstream)
resp = yield self.http_client.fetch(
self.get_url('/zip?provider=queenhub&path=/freddie.png'),
)
zip = zipfile.ZipFile(io.BytesIO(resp.body))
assert zip.testzip() is None
assert zip.open('file.txt').read() == data
| <commit_before>import asyncio
import io
import zipfile
from unittest import mock
from tornado import testing
from waterbutler.core import streams
from tests import utils
class TestZipHandler(utils.HandlerTestCase):
def setUp(self):
super().setUp()
identity_future = asyncio.Future()
identity_future.set_result({
'auth': {},
'credentials': {},
'settings': {},
})
self.mock_identity = mock.Mock()
self.mock_identity.return_value = identity_future
self.identity_patcher = mock.patch('waterbutler.server.handlers.core.get_identity', self.mock_identity)
self.identity_patcher.start()
def tearDown(self):
super().tearDown()
self.identity_patcher.stop()
@mock.patch('waterbutler.core.utils.make_provider')
@testing.gen_test
def test_download_stream(self, mock_make_provider):
stream = asyncio.StreamReader()
data = b'freddie brian john roger'
stream.feed_data(data)
stream.feed_eof()
stream.size = len(data)
stream.content_type = 'application/octet-stream'
zipstream = streams.ZipStreamReader(('file.txt', stream))
mock_provider = utils.mock_provider_method(mock_make_provider,
'zip',
zipstream)
resp = yield self.http_client.fetch(
self.get_url('/zip?provider=queenhub&path=freddie.png'),
)
zip = zipfile.ZipFile(io.BytesIO(resp.body))
assert zip.testzip() is None
assert zip.open('file.txt').read() == data<commit_msg>Remove deprecated test setup and teardown code<commit_after>import asyncio
import io
import zipfile
from unittest import mock
from tornado import testing
from waterbutler.core import streams
from tests import utils
class TestZipHandler(utils.HandlerTestCase):
@testing.gen_test
def test_download_stream(self):
data = b'freddie brian john roger'
stream = streams.StringStream(data)
stream.content_type = 'application/octet-stream'
zipstream = streams.ZipStreamReader(('file.txt', stream))
self.mock_provider.zip = utils.MockCoroutine(return_value=zipstream)
resp = yield self.http_client.fetch(
self.get_url('/zip?provider=queenhub&path=/freddie.png'),
)
zip = zipfile.ZipFile(io.BytesIO(resp.body))
assert zip.testzip() is None
assert zip.open('file.txt').read() == data
|
e9c23c7a0c622e8db29d066f1cd1a679dc6eb1bf | salt/grains/external_ip.py | salt/grains/external_ip.py | # -*- coding: utf-8 -*-
# This file is here to ensure that upgrades of salt remove the external_ip
# grain
| # -*- coding: utf-8 -*-
# This file is here to ensure that upgrades of salt remove the external_ip
# grain, this file should be removed in the Boron release
| Add note to remove file | Add note to remove file
| Python | apache-2.0 | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | # -*- coding: utf-8 -*-
# This file is here to ensure that upgrades of salt remove the external_ip
# grain
Add note to remove file | # -*- coding: utf-8 -*-
# This file is here to ensure that upgrades of salt remove the external_ip
# grain, this file should be removed in the Boron release
| <commit_before># -*- coding: utf-8 -*-
# This file is here to ensure that upgrades of salt remove the external_ip
# grain
<commit_msg>Add note to remove file<commit_after> | # -*- coding: utf-8 -*-
# This file is here to ensure that upgrades of salt remove the external_ip
# grain, this file should be removed in the Boron release
| # -*- coding: utf-8 -*-
# This file is here to ensure that upgrades of salt remove the external_ip
# grain
Add note to remove file# -*- coding: utf-8 -*-
# This file is here to ensure that upgrades of salt remove the external_ip
# grain, this file should be removed in the Boron release
| <commit_before># -*- coding: utf-8 -*-
# This file is here to ensure that upgrades of salt remove the external_ip
# grain
<commit_msg>Add note to remove file<commit_after># -*- coding: utf-8 -*-
# This file is here to ensure that upgrades of salt remove the external_ip
# grain, this file should be removed in the Boron release
|
8ecaaa14cba2a84606dc6d31f0c16c09dbfae017 | server/LikeLines/debug.py | server/LikeLines/debug.py | """
Debug Blueprints.
"""
from flask import Blueprint, current_app, redirect, jsonify, url_for, request
debug_pages = Blueprint('debug', __name__)
@debug_pages.route("/clear_all", methods=['GET', 'POST'])
def clear_all():
if request.method == 'GET':
return '<form method="POST"><input type="submit" value="CLEAR DATABASE"></form>'
else:
mongo = current_app.mongo
mongo.db.userSessions.remove()
mongo.db.interactionSessions.remove()
return redirect(url_for('destroy_session'))
@debug_pages.route("/dump")
def dump_session():
mongo = current_app.mongo
return jsonify({
'userSessions': list(mongo.db.userSessions.find()),
'interactionSessions': list(mongo.db.interactionSessions.find()),
})
| """
Debug Blueprints.
"""
from flask import Blueprint, current_app, redirect, jsonify, url_for, request
debug_pages = Blueprint('debug', __name__)
@debug_pages.route("/clear_all", methods=['GET', 'POST'])
def clear_all():
if request.method == 'GET':
return '<form method="POST"><input type="submit" value="CLEAR DATABASE"></form>'
else:
mongo = current_app.mongo
mongo.db.userSessions.remove()
mongo.db.interactionSessions.remove()
return redirect(url_for('end_session'))
@debug_pages.route("/dump")
def dump_session():
mongo = current_app.mongo
return jsonify({
'userSessions': list(mongo.db.userSessions.find()),
'interactionSessions': list(mongo.db.interactionSessions.find()),
})
| Fix incorrect redirect in clear_all | Fix incorrect redirect in clear_all
| Python | mit | ShinNoNoir/likelines-player,ShinNoNoir/likelines-player,ShinNoNoir/likelines-player | """
Debug Blueprints.
"""
from flask import Blueprint, current_app, redirect, jsonify, url_for, request
debug_pages = Blueprint('debug', __name__)
@debug_pages.route("/clear_all", methods=['GET', 'POST'])
def clear_all():
if request.method == 'GET':
return '<form method="POST"><input type="submit" value="CLEAR DATABASE"></form>'
else:
mongo = current_app.mongo
mongo.db.userSessions.remove()
mongo.db.interactionSessions.remove()
return redirect(url_for('destroy_session'))
@debug_pages.route("/dump")
def dump_session():
mongo = current_app.mongo
return jsonify({
'userSessions': list(mongo.db.userSessions.find()),
'interactionSessions': list(mongo.db.interactionSessions.find()),
})
Fix incorrect redirect in clear_all | """
Debug Blueprints.
"""
from flask import Blueprint, current_app, redirect, jsonify, url_for, request
debug_pages = Blueprint('debug', __name__)
@debug_pages.route("/clear_all", methods=['GET', 'POST'])
def clear_all():
if request.method == 'GET':
return '<form method="POST"><input type="submit" value="CLEAR DATABASE"></form>'
else:
mongo = current_app.mongo
mongo.db.userSessions.remove()
mongo.db.interactionSessions.remove()
return redirect(url_for('end_session'))
@debug_pages.route("/dump")
def dump_session():
mongo = current_app.mongo
return jsonify({
'userSessions': list(mongo.db.userSessions.find()),
'interactionSessions': list(mongo.db.interactionSessions.find()),
})
| <commit_before>"""
Debug Blueprints.
"""
from flask import Blueprint, current_app, redirect, jsonify, url_for, request
debug_pages = Blueprint('debug', __name__)
@debug_pages.route("/clear_all", methods=['GET', 'POST'])
def clear_all():
if request.method == 'GET':
return '<form method="POST"><input type="submit" value="CLEAR DATABASE"></form>'
else:
mongo = current_app.mongo
mongo.db.userSessions.remove()
mongo.db.interactionSessions.remove()
return redirect(url_for('destroy_session'))
@debug_pages.route("/dump")
def dump_session():
mongo = current_app.mongo
return jsonify({
'userSessions': list(mongo.db.userSessions.find()),
'interactionSessions': list(mongo.db.interactionSessions.find()),
})
<commit_msg>Fix incorrect redirect in clear_all<commit_after> | """
Debug Blueprints.
"""
from flask import Blueprint, current_app, redirect, jsonify, url_for, request
debug_pages = Blueprint('debug', __name__)
@debug_pages.route("/clear_all", methods=['GET', 'POST'])
def clear_all():
if request.method == 'GET':
return '<form method="POST"><input type="submit" value="CLEAR DATABASE"></form>'
else:
mongo = current_app.mongo
mongo.db.userSessions.remove()
mongo.db.interactionSessions.remove()
return redirect(url_for('end_session'))
@debug_pages.route("/dump")
def dump_session():
mongo = current_app.mongo
return jsonify({
'userSessions': list(mongo.db.userSessions.find()),
'interactionSessions': list(mongo.db.interactionSessions.find()),
})
| """
Debug Blueprints.
"""
from flask import Blueprint, current_app, redirect, jsonify, url_for, request
debug_pages = Blueprint('debug', __name__)
@debug_pages.route("/clear_all", methods=['GET', 'POST'])
def clear_all():
if request.method == 'GET':
return '<form method="POST"><input type="submit" value="CLEAR DATABASE"></form>'
else:
mongo = current_app.mongo
mongo.db.userSessions.remove()
mongo.db.interactionSessions.remove()
return redirect(url_for('destroy_session'))
@debug_pages.route("/dump")
def dump_session():
mongo = current_app.mongo
return jsonify({
'userSessions': list(mongo.db.userSessions.find()),
'interactionSessions': list(mongo.db.interactionSessions.find()),
})
Fix incorrect redirect in clear_all"""
Debug Blueprints.
"""
from flask import Blueprint, current_app, redirect, jsonify, url_for, request
debug_pages = Blueprint('debug', __name__)
@debug_pages.route("/clear_all", methods=['GET', 'POST'])
def clear_all():
if request.method == 'GET':
return '<form method="POST"><input type="submit" value="CLEAR DATABASE"></form>'
else:
mongo = current_app.mongo
mongo.db.userSessions.remove()
mongo.db.interactionSessions.remove()
return redirect(url_for('end_session'))
@debug_pages.route("/dump")
def dump_session():
mongo = current_app.mongo
return jsonify({
'userSessions': list(mongo.db.userSessions.find()),
'interactionSessions': list(mongo.db.interactionSessions.find()),
})
| <commit_before>"""
Debug Blueprints.
"""
from flask import Blueprint, current_app, redirect, jsonify, url_for, request
debug_pages = Blueprint('debug', __name__)
@debug_pages.route("/clear_all", methods=['GET', 'POST'])
def clear_all():
if request.method == 'GET':
return '<form method="POST"><input type="submit" value="CLEAR DATABASE"></form>'
else:
mongo = current_app.mongo
mongo.db.userSessions.remove()
mongo.db.interactionSessions.remove()
return redirect(url_for('destroy_session'))
@debug_pages.route("/dump")
def dump_session():
mongo = current_app.mongo
return jsonify({
'userSessions': list(mongo.db.userSessions.find()),
'interactionSessions': list(mongo.db.interactionSessions.find()),
})
<commit_msg>Fix incorrect redirect in clear_all<commit_after>"""
Debug Blueprints.
"""
from flask import Blueprint, current_app, redirect, jsonify, url_for, request
debug_pages = Blueprint('debug', __name__)
@debug_pages.route("/clear_all", methods=['GET', 'POST'])
def clear_all():
if request.method == 'GET':
return '<form method="POST"><input type="submit" value="CLEAR DATABASE"></form>'
else:
mongo = current_app.mongo
mongo.db.userSessions.remove()
mongo.db.interactionSessions.remove()
return redirect(url_for('end_session'))
@debug_pages.route("/dump")
def dump_session():
mongo = current_app.mongo
return jsonify({
'userSessions': list(mongo.db.userSessions.find()),
'interactionSessions': list(mongo.db.interactionSessions.find()),
})
|
35a9de1ba8f6c1bcb6ae35c9f965657de973412f | tokenizers/sentiment_tokenizer.py | tokenizers/sentiment_tokenizer.py | from nltk.sentiment.util import mark_negation
from nltk.util import trigrams
import re
import validators
from .happy_tokenizer import Tokenizer
class SentimentTokenizer(object):
def __init__(self):
self.tknzr = Tokenizer()
@staticmethod
def reduce_lengthening(text):
"""
Replace repeated character sequences of length 3 or greater with sequences
of length 3.
"""
pattern = re.compile(r"(.)\1{2,}")
return pattern.sub(r"\1\1\1", text)
@staticmethod
def replace_username(token):
return '@__user__' if token.startswith('@') else token
@staticmethod
def replace_link(token):
return '__url__' if validators.url(token) else token
def __call__(self, t):
t = self.reduce_lengthening(t)
tokens = t.split(' ')
cleaned_tokens = []
for token in tokens:
token = self.replace_username(token)
token = self.replace_link(token)
cleaned_tokens.append(token)
rebuild_str = ' '.join(cleaned_tokens)
negated_tokens = mark_negation(list(self.tknzr.tokenize(rebuild_str)))
list_of_trigrams = list(trigrams(negated_tokens))
return list([' '.join(s) for s in list_of_trigrams])
| from nltk.sentiment.util import mark_negation
from nltk.util import trigrams
import re
import validators
from .happy_tokenizer import Tokenizer
class SentimentTokenizer(object):
def __init__(self):
self.tknzr = Tokenizer()
@staticmethod
def reduce_lengthening(text):
"""
Replace repeated character sequences of length 3 or greater with sequences
of length 3.
"""
pattern = re.compile(r"(.)\1{2,}")
return pattern.sub(r"\1\1\1", text)
@staticmethod
def replace_username(token):
return '@__user__' if token.startswith('@') else token
@staticmethod
def replace_link(token):
return '__url__' if validators.url(token) else token
def __call__(self, t):
t = self.reduce_lengthening(t)
tokens = t.split(' ')
cleaned_tokens = []
for token in tokens:
token = self.replace_username(token)
token = self.replace_link(token)
cleaned_tokens.append(token)
rebuild_str = ' '.join(cleaned_tokens)
negated_tokens = mark_negation(list(self.tknzr.tokenize(rebuild_str)))
list_of_trigrams = list([' '.join(s) for s in trigrams(negated_tokens)])
return list_of_trigrams
| Use map to loop instead of mapping a list | Use map to loop instead of mapping a list
| Python | apache-2.0 | chuajiesheng/twitter-sentiment-analysis | from nltk.sentiment.util import mark_negation
from nltk.util import trigrams
import re
import validators
from .happy_tokenizer import Tokenizer
class SentimentTokenizer(object):
def __init__(self):
self.tknzr = Tokenizer()
@staticmethod
def reduce_lengthening(text):
"""
Replace repeated character sequences of length 3 or greater with sequences
of length 3.
"""
pattern = re.compile(r"(.)\1{2,}")
return pattern.sub(r"\1\1\1", text)
@staticmethod
def replace_username(token):
return '@__user__' if token.startswith('@') else token
@staticmethod
def replace_link(token):
return '__url__' if validators.url(token) else token
def __call__(self, t):
t = self.reduce_lengthening(t)
tokens = t.split(' ')
cleaned_tokens = []
for token in tokens:
token = self.replace_username(token)
token = self.replace_link(token)
cleaned_tokens.append(token)
rebuild_str = ' '.join(cleaned_tokens)
negated_tokens = mark_negation(list(self.tknzr.tokenize(rebuild_str)))
list_of_trigrams = list(trigrams(negated_tokens))
return list([' '.join(s) for s in list_of_trigrams])
Use map to loop instead of mapping a list | from nltk.sentiment.util import mark_negation
from nltk.util import trigrams
import re
import validators
from .happy_tokenizer import Tokenizer
class SentimentTokenizer(object):
def __init__(self):
self.tknzr = Tokenizer()
@staticmethod
def reduce_lengthening(text):
"""
Replace repeated character sequences of length 3 or greater with sequences
of length 3.
"""
pattern = re.compile(r"(.)\1{2,}")
return pattern.sub(r"\1\1\1", text)
@staticmethod
def replace_username(token):
return '@__user__' if token.startswith('@') else token
@staticmethod
def replace_link(token):
return '__url__' if validators.url(token) else token
def __call__(self, t):
t = self.reduce_lengthening(t)
tokens = t.split(' ')
cleaned_tokens = []
for token in tokens:
token = self.replace_username(token)
token = self.replace_link(token)
cleaned_tokens.append(token)
rebuild_str = ' '.join(cleaned_tokens)
negated_tokens = mark_negation(list(self.tknzr.tokenize(rebuild_str)))
list_of_trigrams = list([' '.join(s) for s in trigrams(negated_tokens)])
return list_of_trigrams
| <commit_before>from nltk.sentiment.util import mark_negation
from nltk.util import trigrams
import re
import validators
from .happy_tokenizer import Tokenizer
class SentimentTokenizer(object):
def __init__(self):
self.tknzr = Tokenizer()
@staticmethod
def reduce_lengthening(text):
"""
Replace repeated character sequences of length 3 or greater with sequences
of length 3.
"""
pattern = re.compile(r"(.)\1{2,}")
return pattern.sub(r"\1\1\1", text)
@staticmethod
def replace_username(token):
return '@__user__' if token.startswith('@') else token
@staticmethod
def replace_link(token):
return '__url__' if validators.url(token) else token
def __call__(self, t):
t = self.reduce_lengthening(t)
tokens = t.split(' ')
cleaned_tokens = []
for token in tokens:
token = self.replace_username(token)
token = self.replace_link(token)
cleaned_tokens.append(token)
rebuild_str = ' '.join(cleaned_tokens)
negated_tokens = mark_negation(list(self.tknzr.tokenize(rebuild_str)))
list_of_trigrams = list(trigrams(negated_tokens))
return list([' '.join(s) for s in list_of_trigrams])
<commit_msg>Use map to loop instead of mapping a list<commit_after> | from nltk.sentiment.util import mark_negation
from nltk.util import trigrams
import re
import validators
from .happy_tokenizer import Tokenizer
class SentimentTokenizer(object):
def __init__(self):
self.tknzr = Tokenizer()
@staticmethod
def reduce_lengthening(text):
"""
Replace repeated character sequences of length 3 or greater with sequences
of length 3.
"""
pattern = re.compile(r"(.)\1{2,}")
return pattern.sub(r"\1\1\1", text)
@staticmethod
def replace_username(token):
return '@__user__' if token.startswith('@') else token
@staticmethod
def replace_link(token):
return '__url__' if validators.url(token) else token
def __call__(self, t):
t = self.reduce_lengthening(t)
tokens = t.split(' ')
cleaned_tokens = []
for token in tokens:
token = self.replace_username(token)
token = self.replace_link(token)
cleaned_tokens.append(token)
rebuild_str = ' '.join(cleaned_tokens)
negated_tokens = mark_negation(list(self.tknzr.tokenize(rebuild_str)))
list_of_trigrams = list([' '.join(s) for s in trigrams(negated_tokens)])
return list_of_trigrams
| from nltk.sentiment.util import mark_negation
from nltk.util import trigrams
import re
import validators
from .happy_tokenizer import Tokenizer
class SentimentTokenizer(object):
def __init__(self):
self.tknzr = Tokenizer()
@staticmethod
def reduce_lengthening(text):
"""
Replace repeated character sequences of length 3 or greater with sequences
of length 3.
"""
pattern = re.compile(r"(.)\1{2,}")
return pattern.sub(r"\1\1\1", text)
@staticmethod
def replace_username(token):
return '@__user__' if token.startswith('@') else token
@staticmethod
def replace_link(token):
return '__url__' if validators.url(token) else token
def __call__(self, t):
t = self.reduce_lengthening(t)
tokens = t.split(' ')
cleaned_tokens = []
for token in tokens:
token = self.replace_username(token)
token = self.replace_link(token)
cleaned_tokens.append(token)
rebuild_str = ' '.join(cleaned_tokens)
negated_tokens = mark_negation(list(self.tknzr.tokenize(rebuild_str)))
list_of_trigrams = list(trigrams(negated_tokens))
return list([' '.join(s) for s in list_of_trigrams])
Use map to loop instead of mapping a listfrom nltk.sentiment.util import mark_negation
from nltk.util import trigrams
import re
import validators
from .happy_tokenizer import Tokenizer
class SentimentTokenizer(object):
def __init__(self):
self.tknzr = Tokenizer()
@staticmethod
def reduce_lengthening(text):
"""
Replace repeated character sequences of length 3 or greater with sequences
of length 3.
"""
pattern = re.compile(r"(.)\1{2,}")
return pattern.sub(r"\1\1\1", text)
@staticmethod
def replace_username(token):
return '@__user__' if token.startswith('@') else token
@staticmethod
def replace_link(token):
return '__url__' if validators.url(token) else token
def __call__(self, t):
t = self.reduce_lengthening(t)
tokens = t.split(' ')
cleaned_tokens = []
for token in tokens:
token = self.replace_username(token)
token = self.replace_link(token)
cleaned_tokens.append(token)
rebuild_str = ' '.join(cleaned_tokens)
negated_tokens = mark_negation(list(self.tknzr.tokenize(rebuild_str)))
list_of_trigrams = list([' '.join(s) for s in trigrams(negated_tokens)])
return list_of_trigrams
| <commit_before>from nltk.sentiment.util import mark_negation
from nltk.util import trigrams
import re
import validators
from .happy_tokenizer import Tokenizer
class SentimentTokenizer(object):
def __init__(self):
self.tknzr = Tokenizer()
@staticmethod
def reduce_lengthening(text):
"""
Replace repeated character sequences of length 3 or greater with sequences
of length 3.
"""
pattern = re.compile(r"(.)\1{2,}")
return pattern.sub(r"\1\1\1", text)
@staticmethod
def replace_username(token):
return '@__user__' if token.startswith('@') else token
@staticmethod
def replace_link(token):
return '__url__' if validators.url(token) else token
def __call__(self, t):
t = self.reduce_lengthening(t)
tokens = t.split(' ')
cleaned_tokens = []
for token in tokens:
token = self.replace_username(token)
token = self.replace_link(token)
cleaned_tokens.append(token)
rebuild_str = ' '.join(cleaned_tokens)
negated_tokens = mark_negation(list(self.tknzr.tokenize(rebuild_str)))
list_of_trigrams = list(trigrams(negated_tokens))
return list([' '.join(s) for s in list_of_trigrams])
<commit_msg>Use map to loop instead of mapping a list<commit_after>from nltk.sentiment.util import mark_negation
from nltk.util import trigrams
import re
import validators
from .happy_tokenizer import Tokenizer
class SentimentTokenizer(object):
def __init__(self):
self.tknzr = Tokenizer()
@staticmethod
def reduce_lengthening(text):
"""
Replace repeated character sequences of length 3 or greater with sequences
of length 3.
"""
pattern = re.compile(r"(.)\1{2,}")
return pattern.sub(r"\1\1\1", text)
@staticmethod
def replace_username(token):
return '@__user__' if token.startswith('@') else token
@staticmethod
def replace_link(token):
return '__url__' if validators.url(token) else token
def __call__(self, t):
t = self.reduce_lengthening(t)
tokens = t.split(' ')
cleaned_tokens = []
for token in tokens:
token = self.replace_username(token)
token = self.replace_link(token)
cleaned_tokens.append(token)
rebuild_str = ' '.join(cleaned_tokens)
negated_tokens = mark_negation(list(self.tknzr.tokenize(rebuild_str)))
list_of_trigrams = list([' '.join(s) for s in trigrams(negated_tokens)])
return list_of_trigrams
|
3c7e6e1f02b9d73497cb49359d542d3fa4c9a85f | utils/rc_sensor.py | utils/rc_sensor.py | #!/usr/bin/env python
import rcsensor
print(rcsensor.get_count(200, 10, 22))
| #!/usr/bin/env python
from common.rcsensor import rcsensor as rcsensor
class RcSensor(object):
def __init__(self, gpio, cycles=200, discharge_delay=10):
if gpio is None:
raise ValueError("Must supply gpio value")
self.gpio = gpio
self.cycles = cycles
self.discharge_delay = discharge_delay
def rc_count(self):
"""
Returns the average of cycle number of readings from a GPIO based R/C sensor
:return: int
"""
return rcsensor.get_rc_counts(self.gpio, self.cycles, self.discharge_delay)
| Create a RC sensor class object | Create a RC sensor class object
| Python | mit | mecworks/garden_pi,mecworks/garden_pi,mecworks/garden_pi,mecworks/garden_pi | #!/usr/bin/env python
import rcsensor
print(rcsensor.get_count(200, 10, 22))
Create a RC sensor class object | #!/usr/bin/env python
from common.rcsensor import rcsensor as rcsensor
class RcSensor(object):
def __init__(self, gpio, cycles=200, discharge_delay=10):
if gpio is None:
raise ValueError("Must supply gpio value")
self.gpio = gpio
self.cycles = cycles
self.discharge_delay = discharge_delay
def rc_count(self):
"""
Returns the average of cycle number of readings from a GPIO based R/C sensor
:return: int
"""
return rcsensor.get_rc_counts(self.gpio, self.cycles, self.discharge_delay)
| <commit_before>#!/usr/bin/env python
import rcsensor
print(rcsensor.get_count(200, 10, 22))
<commit_msg>Create a RC sensor class object<commit_after> | #!/usr/bin/env python
from common.rcsensor import rcsensor as rcsensor
class RcSensor(object):
def __init__(self, gpio, cycles=200, discharge_delay=10):
if gpio is None:
raise ValueError("Must supply gpio value")
self.gpio = gpio
self.cycles = cycles
self.discharge_delay = discharge_delay
def rc_count(self):
"""
Returns the average of cycle number of readings from a GPIO based R/C sensor
:return: int
"""
return rcsensor.get_rc_counts(self.gpio, self.cycles, self.discharge_delay)
| #!/usr/bin/env python
import rcsensor
print(rcsensor.get_count(200, 10, 22))
Create a RC sensor class object#!/usr/bin/env python
from common.rcsensor import rcsensor as rcsensor
class RcSensor(object):
def __init__(self, gpio, cycles=200, discharge_delay=10):
if gpio is None:
raise ValueError("Must supply gpio value")
self.gpio = gpio
self.cycles = cycles
self.discharge_delay = discharge_delay
def rc_count(self):
"""
Returns the average of cycle number of readings from a GPIO based R/C sensor
:return: int
"""
return rcsensor.get_rc_counts(self.gpio, self.cycles, self.discharge_delay)
| <commit_before>#!/usr/bin/env python
import rcsensor
print(rcsensor.get_count(200, 10, 22))
<commit_msg>Create a RC sensor class object<commit_after>#!/usr/bin/env python
from common.rcsensor import rcsensor as rcsensor
class RcSensor(object):
def __init__(self, gpio, cycles=200, discharge_delay=10):
if gpio is None:
raise ValueError("Must supply gpio value")
self.gpio = gpio
self.cycles = cycles
self.discharge_delay = discharge_delay
def rc_count(self):
"""
Returns the average of cycle number of readings from a GPIO based R/C sensor
:return: int
"""
return rcsensor.get_rc_counts(self.gpio, self.cycles, self.discharge_delay)
|
d697266e41f2e073c801221b7da46455a0ef1116 | dimod/package_info.py | dimod/package_info.py | # Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.9.4'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
| # Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.9.5'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
| Update version 0.9.4 -> 0.9.5 | Update version 0.9.4 -> 0.9.5
New Features
------------
* `BQM.normalize` now returns the value the BQM was scaled by
* `SampleSet.relabel_variables` no longer blocks for unresolved sample sets
* `FileView` has a new parameter, `ignore_variables` that treats the BQM as integer-labelled
* `ScaleComposite` no longer blocks
Fixes
-----
* `FileView.seek` now works correctly with `io.SEEK_END`
Changes
-------
* `BinaryPolynomial` support in `ScaleComposite` has been removed | Python | apache-2.0 | dwavesystems/dimod,dwavesystems/dimod | # Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.9.4'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
Update version 0.9.4 -> 0.9.5
New Features
------------
* `BQM.normalize` now returns the value the BQM was scaled by
* `SampleSet.relabel_variables` no longer blocks for unresolved sample sets
* `FileView` has a new parameter, `ignore_variables` that treats the BQM as integer-labelled
* `ScaleComposite` no longer blocks
Fixes
-----
* `FileView.seek` now works correctly with `io.SEEK_END`
Changes
-------
* `BinaryPolynomial` support in `ScaleComposite` has been removed | # Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.9.5'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
| <commit_before># Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.9.4'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
<commit_msg>Update version 0.9.4 -> 0.9.5
New Features
------------
* `BQM.normalize` now returns the value the BQM was scaled by
* `SampleSet.relabel_variables` no longer blocks for unresolved sample sets
* `FileView` has a new parameter, `ignore_variables` that treats the BQM as integer-labelled
* `ScaleComposite` no longer blocks
Fixes
-----
* `FileView.seek` now works correctly with `io.SEEK_END`
Changes
-------
* `BinaryPolynomial` support in `ScaleComposite` has been removed<commit_after> | # Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.9.5'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
| # Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.9.4'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
Update version 0.9.4 -> 0.9.5
New Features
------------
* `BQM.normalize` now returns the value the BQM was scaled by
* `SampleSet.relabel_variables` no longer blocks for unresolved sample sets
* `FileView` has a new parameter, `ignore_variables` that treats the BQM as integer-labelled
* `ScaleComposite` no longer blocks
Fixes
-----
* `FileView.seek` now works correctly with `io.SEEK_END`
Changes
-------
* `BinaryPolynomial` support in `ScaleComposite` has been removed# Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.9.5'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
| <commit_before># Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.9.4'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
<commit_msg>Update version 0.9.4 -> 0.9.5
New Features
------------
* `BQM.normalize` now returns the value the BQM was scaled by
* `SampleSet.relabel_variables` no longer blocks for unresolved sample sets
* `FileView` has a new parameter, `ignore_variables` that treats the BQM as integer-labelled
* `ScaleComposite` no longer blocks
Fixes
-----
* `FileView.seek` now works correctly with `io.SEEK_END`
Changes
-------
* `BinaryPolynomial` support in `ScaleComposite` has been removed<commit_after># Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.9.5'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
|
a5e8f6af93debd98b626ee382a843d5dedbf70f8 | test/benchmarks/general/blocks/read_sigproc.py | test/benchmarks/general/blocks/read_sigproc.py | from timeit import default_timer as timer
import bifrost as bf
from bifrost import pipeline as bfp
from bifrost import blocks as blocks
from bifrost_benchmarks import PipelineBenchmarker
class SigprocBenchmarker(PipelineBenchmarker):
def run_benchmark(self):
with bf.Pipeline() as pipeline:
fil_file = "../../../data/1chan8bitNoDM.fil"
data = blocks.read_sigproc([fil_file], gulp_nframe=4096)
data.on_data = self.timeit(data.on_data)
start = timer()
pipeline.run()
end = timer()
self.total_clock_time = end-start
sigproc_benchmarker = SigprocBenchmarker()
print sigproc_benchmarker.average_benchmark(10)
| """ Test the sigproc read function """
from timeit import default_timer as timer
import bifrost as bf
from bifrost import pipeline as bfp
from bifrost import blocks as blocks
from bifrost_benchmarks import PipelineBenchmarker
class SigprocBenchmarker(PipelineBenchmarker):
""" Test the sigproc read function """
def run_benchmark(self):
with bf.Pipeline() as pipeline:
fil_file = "../../../data/1chan8bitNoDM.fil"
data = blocks.read_sigproc([fil_file], gulp_nframe=4096)
data.on_data = self.timeit(data.on_data)
start = timer()
pipeline.run()
end = timer()
self.total_clock_time = end-start
sigproc_benchmarker = SigprocBenchmarker()
print sigproc_benchmarker.average_benchmark(10)
| Add docstrings for sigproc benchmarks | Add docstrings for sigproc benchmarks
| Python | bsd-3-clause | ledatelescope/bifrost,ledatelescope/bifrost,ledatelescope/bifrost,ledatelescope/bifrost | from timeit import default_timer as timer
import bifrost as bf
from bifrost import pipeline as bfp
from bifrost import blocks as blocks
from bifrost_benchmarks import PipelineBenchmarker
class SigprocBenchmarker(PipelineBenchmarker):
def run_benchmark(self):
with bf.Pipeline() as pipeline:
fil_file = "../../../data/1chan8bitNoDM.fil"
data = blocks.read_sigproc([fil_file], gulp_nframe=4096)
data.on_data = self.timeit(data.on_data)
start = timer()
pipeline.run()
end = timer()
self.total_clock_time = end-start
sigproc_benchmarker = SigprocBenchmarker()
print sigproc_benchmarker.average_benchmark(10)
Add docstrings for sigproc benchmarks | """ Test the sigproc read function """
from timeit import default_timer as timer
import bifrost as bf
from bifrost import pipeline as bfp
from bifrost import blocks as blocks
from bifrost_benchmarks import PipelineBenchmarker
class SigprocBenchmarker(PipelineBenchmarker):
""" Test the sigproc read function """
def run_benchmark(self):
with bf.Pipeline() as pipeline:
fil_file = "../../../data/1chan8bitNoDM.fil"
data = blocks.read_sigproc([fil_file], gulp_nframe=4096)
data.on_data = self.timeit(data.on_data)
start = timer()
pipeline.run()
end = timer()
self.total_clock_time = end-start
sigproc_benchmarker = SigprocBenchmarker()
print sigproc_benchmarker.average_benchmark(10)
| <commit_before>from timeit import default_timer as timer
import bifrost as bf
from bifrost import pipeline as bfp
from bifrost import blocks as blocks
from bifrost_benchmarks import PipelineBenchmarker
class SigprocBenchmarker(PipelineBenchmarker):
def run_benchmark(self):
with bf.Pipeline() as pipeline:
fil_file = "../../../data/1chan8bitNoDM.fil"
data = blocks.read_sigproc([fil_file], gulp_nframe=4096)
data.on_data = self.timeit(data.on_data)
start = timer()
pipeline.run()
end = timer()
self.total_clock_time = end-start
sigproc_benchmarker = SigprocBenchmarker()
print sigproc_benchmarker.average_benchmark(10)
<commit_msg>Add docstrings for sigproc benchmarks<commit_after> | """ Test the sigproc read function """
from timeit import default_timer as timer
import bifrost as bf
from bifrost import pipeline as bfp
from bifrost import blocks as blocks
from bifrost_benchmarks import PipelineBenchmarker
class SigprocBenchmarker(PipelineBenchmarker):
""" Test the sigproc read function """
def run_benchmark(self):
with bf.Pipeline() as pipeline:
fil_file = "../../../data/1chan8bitNoDM.fil"
data = blocks.read_sigproc([fil_file], gulp_nframe=4096)
data.on_data = self.timeit(data.on_data)
start = timer()
pipeline.run()
end = timer()
self.total_clock_time = end-start
sigproc_benchmarker = SigprocBenchmarker()
print sigproc_benchmarker.average_benchmark(10)
| from timeit import default_timer as timer
import bifrost as bf
from bifrost import pipeline as bfp
from bifrost import blocks as blocks
from bifrost_benchmarks import PipelineBenchmarker
class SigprocBenchmarker(PipelineBenchmarker):
def run_benchmark(self):
with bf.Pipeline() as pipeline:
fil_file = "../../../data/1chan8bitNoDM.fil"
data = blocks.read_sigproc([fil_file], gulp_nframe=4096)
data.on_data = self.timeit(data.on_data)
start = timer()
pipeline.run()
end = timer()
self.total_clock_time = end-start
sigproc_benchmarker = SigprocBenchmarker()
print sigproc_benchmarker.average_benchmark(10)
Add docstrings for sigproc benchmarks""" Test the sigproc read function """
from timeit import default_timer as timer
import bifrost as bf
from bifrost import pipeline as bfp
from bifrost import blocks as blocks
from bifrost_benchmarks import PipelineBenchmarker
class SigprocBenchmarker(PipelineBenchmarker):
""" Test the sigproc read function """
def run_benchmark(self):
with bf.Pipeline() as pipeline:
fil_file = "../../../data/1chan8bitNoDM.fil"
data = blocks.read_sigproc([fil_file], gulp_nframe=4096)
data.on_data = self.timeit(data.on_data)
start = timer()
pipeline.run()
end = timer()
self.total_clock_time = end-start
sigproc_benchmarker = SigprocBenchmarker()
print sigproc_benchmarker.average_benchmark(10)
| <commit_before>from timeit import default_timer as timer
import bifrost as bf
from bifrost import pipeline as bfp
from bifrost import blocks as blocks
from bifrost_benchmarks import PipelineBenchmarker
class SigprocBenchmarker(PipelineBenchmarker):
def run_benchmark(self):
with bf.Pipeline() as pipeline:
fil_file = "../../../data/1chan8bitNoDM.fil"
data = blocks.read_sigproc([fil_file], gulp_nframe=4096)
data.on_data = self.timeit(data.on_data)
start = timer()
pipeline.run()
end = timer()
self.total_clock_time = end-start
sigproc_benchmarker = SigprocBenchmarker()
print sigproc_benchmarker.average_benchmark(10)
<commit_msg>Add docstrings for sigproc benchmarks<commit_after>""" Test the sigproc read function """
from timeit import default_timer as timer
import bifrost as bf
from bifrost import pipeline as bfp
from bifrost import blocks as blocks
from bifrost_benchmarks import PipelineBenchmarker
class SigprocBenchmarker(PipelineBenchmarker):
""" Test the sigproc read function """
def run_benchmark(self):
with bf.Pipeline() as pipeline:
fil_file = "../../../data/1chan8bitNoDM.fil"
data = blocks.read_sigproc([fil_file], gulp_nframe=4096)
data.on_data = self.timeit(data.on_data)
start = timer()
pipeline.run()
end = timer()
self.total_clock_time = end-start
sigproc_benchmarker = SigprocBenchmarker()
print sigproc_benchmarker.average_benchmark(10)
|
9a2cc99b068b2aaa572f52b4516852b239577c34 | dummyserver/server.py | dummyserver/server.py | #!/usr/bin/python
import threading, socket
"""
Dummy server using for unit testing
"""
class Server(threading.Thread):
def __init__(self, handler, host='localhost', port=8021):
threading.Thread.__init__(self)
self.handler = handler
self.host = host
self.port = port
self.ready_event = threading.Event()
self.stop_event = threading.Event()
def run(self):
sock = socket.socket()
sock.bind((self.host, self.port))
sock.listen(0)
self.ready_event.set()
self.handler(sock)
self.stop_event.set()
sock.close()
def __enter__(self):
self.start()
self.ready_event.wait()
return self.host, self.port
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
self.stop_event.wait()
return False # allow exceptions to propagate
| #!/usr/bin/python
import threading, socket
class Server(threading.Thread):
""" Dummy server using for unit testing """
def __init__(self, handler, host='localhost', port=8021):
threading.Thread.__init__(self)
self.handler = handler
self.host = host
self.port = port
self.ready_event = threading.Event()
self.stop_event = threading.Event()
def run(self):
sock = socket.socket()
sock.bind((self.host, self.port))
sock.listen(0)
self.ready_event.set()
self.handler(sock)
self.stop_event.set()
sock.close()
def __enter__(self):
self.start()
self.ready_event.wait()
return self.host, self.port
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
self.stop_event.wait()
return False # allow exceptions to propagate
| Put docstring inside Server class | Put docstring inside Server class
| Python | apache-2.0 | psf/requests | #!/usr/bin/python
import threading, socket
"""
Dummy server using for unit testing
"""
class Server(threading.Thread):
def __init__(self, handler, host='localhost', port=8021):
threading.Thread.__init__(self)
self.handler = handler
self.host = host
self.port = port
self.ready_event = threading.Event()
self.stop_event = threading.Event()
def run(self):
sock = socket.socket()
sock.bind((self.host, self.port))
sock.listen(0)
self.ready_event.set()
self.handler(sock)
self.stop_event.set()
sock.close()
def __enter__(self):
self.start()
self.ready_event.wait()
return self.host, self.port
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
self.stop_event.wait()
return False # allow exceptions to propagate
Put docstring inside Server class | #!/usr/bin/python
import threading, socket
class Server(threading.Thread):
""" Dummy server using for unit testing """
def __init__(self, handler, host='localhost', port=8021):
threading.Thread.__init__(self)
self.handler = handler
self.host = host
self.port = port
self.ready_event = threading.Event()
self.stop_event = threading.Event()
def run(self):
sock = socket.socket()
sock.bind((self.host, self.port))
sock.listen(0)
self.ready_event.set()
self.handler(sock)
self.stop_event.set()
sock.close()
def __enter__(self):
self.start()
self.ready_event.wait()
return self.host, self.port
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
self.stop_event.wait()
return False # allow exceptions to propagate
| <commit_before>#!/usr/bin/python
import threading, socket
"""
Dummy server using for unit testing
"""
class Server(threading.Thread):
def __init__(self, handler, host='localhost', port=8021):
threading.Thread.__init__(self)
self.handler = handler
self.host = host
self.port = port
self.ready_event = threading.Event()
self.stop_event = threading.Event()
def run(self):
sock = socket.socket()
sock.bind((self.host, self.port))
sock.listen(0)
self.ready_event.set()
self.handler(sock)
self.stop_event.set()
sock.close()
def __enter__(self):
self.start()
self.ready_event.wait()
return self.host, self.port
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
self.stop_event.wait()
return False # allow exceptions to propagate
<commit_msg>Put docstring inside Server class<commit_after> | #!/usr/bin/python
import threading, socket
class Server(threading.Thread):
""" Dummy server using for unit testing """
def __init__(self, handler, host='localhost', port=8021):
threading.Thread.__init__(self)
self.handler = handler
self.host = host
self.port = port
self.ready_event = threading.Event()
self.stop_event = threading.Event()
def run(self):
sock = socket.socket()
sock.bind((self.host, self.port))
sock.listen(0)
self.ready_event.set()
self.handler(sock)
self.stop_event.set()
sock.close()
def __enter__(self):
self.start()
self.ready_event.wait()
return self.host, self.port
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
self.stop_event.wait()
return False # allow exceptions to propagate
| #!/usr/bin/python
import threading, socket
"""
Dummy server using for unit testing
"""
class Server(threading.Thread):
def __init__(self, handler, host='localhost', port=8021):
threading.Thread.__init__(self)
self.handler = handler
self.host = host
self.port = port
self.ready_event = threading.Event()
self.stop_event = threading.Event()
def run(self):
sock = socket.socket()
sock.bind((self.host, self.port))
sock.listen(0)
self.ready_event.set()
self.handler(sock)
self.stop_event.set()
sock.close()
def __enter__(self):
self.start()
self.ready_event.wait()
return self.host, self.port
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
self.stop_event.wait()
return False # allow exceptions to propagate
Put docstring inside Server class#!/usr/bin/python
import threading, socket
class Server(threading.Thread):
""" Dummy server using for unit testing """
def __init__(self, handler, host='localhost', port=8021):
threading.Thread.__init__(self)
self.handler = handler
self.host = host
self.port = port
self.ready_event = threading.Event()
self.stop_event = threading.Event()
def run(self):
sock = socket.socket()
sock.bind((self.host, self.port))
sock.listen(0)
self.ready_event.set()
self.handler(sock)
self.stop_event.set()
sock.close()
def __enter__(self):
self.start()
self.ready_event.wait()
return self.host, self.port
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
self.stop_event.wait()
return False # allow exceptions to propagate
| <commit_before>#!/usr/bin/python
import threading, socket
"""
Dummy server using for unit testing
"""
class Server(threading.Thread):
def __init__(self, handler, host='localhost', port=8021):
threading.Thread.__init__(self)
self.handler = handler
self.host = host
self.port = port
self.ready_event = threading.Event()
self.stop_event = threading.Event()
def run(self):
sock = socket.socket()
sock.bind((self.host, self.port))
sock.listen(0)
self.ready_event.set()
self.handler(sock)
self.stop_event.set()
sock.close()
def __enter__(self):
self.start()
self.ready_event.wait()
return self.host, self.port
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
self.stop_event.wait()
return False # allow exceptions to propagate
<commit_msg>Put docstring inside Server class<commit_after>#!/usr/bin/python
import threading, socket
class Server(threading.Thread):
""" Dummy server using for unit testing """
def __init__(self, handler, host='localhost', port=8021):
threading.Thread.__init__(self)
self.handler = handler
self.host = host
self.port = port
self.ready_event = threading.Event()
self.stop_event = threading.Event()
def run(self):
sock = socket.socket()
sock.bind((self.host, self.port))
sock.listen(0)
self.ready_event.set()
self.handler(sock)
self.stop_event.set()
sock.close()
def __enter__(self):
self.start()
self.ready_event.wait()
return self.host, self.port
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
self.stop_event.wait()
return False # allow exceptions to propagate
|
47352af38ace09af3572bc63d8c1da4d27cafb86 | app/notify_client/job_api_client.py | app/notify_client/job_api_client.py |
from notifications_python_client.base import BaseAPIClient
from app.notify_client import _attach_current_user
class JobApiClient(BaseAPIClient):
def __init__(self, base_url=None, client_id=None, secret=None):
super(self.__class__, self).__init__(base_url=base_url or 'base_url',
client_id=client_id or 'client_id',
secret=secret or 'secret')
def init_app(self, app):
self.base_url = app.config['API_HOST_NAME']
self.client_id = app.config['ADMIN_CLIENT_USER_NAME']
self.secret = app.config['ADMIN_CLIENT_SECRET']
def get_job(self, service_id, job_id=None):
if job_id:
return self.get(url='/service/{}/job/{}'.format(service_id, job_id))
else:
return self.get(url='/service/{}/job'.format(service_id))
def create_job(self, job_id, service_id, template_id, original_file_name, notification_count):
data = {
"id": job_id,
"template": template_id,
"original_file_name": original_file_name,
"notification_count": notification_count
}
_attach_current_user(data)
resp = self.post(url='/service/{}/job'.format(service_id), data=data)
return resp['data']
|
from notifications_python_client.base import BaseAPIClient
from app.notify_client import _attach_current_user
class JobApiClient(BaseAPIClient):
def __init__(self, base_url=None, client_id=None, secret=None):
super(self.__class__, self).__init__(base_url=base_url or 'base_url',
client_id=client_id or 'client_id',
secret=secret or 'secret')
def init_app(self, app):
self.base_url = app.config['API_HOST_NAME']
self.client_id = app.config['ADMIN_CLIENT_USER_NAME']
self.secret = app.config['ADMIN_CLIENT_SECRET']
def get_job(self, service_id, job_id=None, limit_days=None):
if job_id:
return self.get(url='/service/{}/job/{}'.format(service_id, job_id))
params = {}
if limit_days is not None:
params['limit_days'] = limit_days
else:
return self.get(url='/service/{}/job'.format(service_id), params=params)
def create_job(self, job_id, service_id, template_id, original_file_name, notification_count):
data = {
"id": job_id,
"template": template_id,
"original_file_name": original_file_name,
"notification_count": notification_count
}
_attach_current_user(data)
resp = self.post(url='/service/{}/job'.format(service_id), data=data)
return resp['data']
| Add limit_days query param to the get_job endpoint. | Add limit_days query param to the get_job endpoint.
| Python | mit | alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin |
from notifications_python_client.base import BaseAPIClient
from app.notify_client import _attach_current_user
class JobApiClient(BaseAPIClient):
def __init__(self, base_url=None, client_id=None, secret=None):
super(self.__class__, self).__init__(base_url=base_url or 'base_url',
client_id=client_id or 'client_id',
secret=secret or 'secret')
def init_app(self, app):
self.base_url = app.config['API_HOST_NAME']
self.client_id = app.config['ADMIN_CLIENT_USER_NAME']
self.secret = app.config['ADMIN_CLIENT_SECRET']
def get_job(self, service_id, job_id=None):
if job_id:
return self.get(url='/service/{}/job/{}'.format(service_id, job_id))
else:
return self.get(url='/service/{}/job'.format(service_id))
def create_job(self, job_id, service_id, template_id, original_file_name, notification_count):
data = {
"id": job_id,
"template": template_id,
"original_file_name": original_file_name,
"notification_count": notification_count
}
_attach_current_user(data)
resp = self.post(url='/service/{}/job'.format(service_id), data=data)
return resp['data']
Add limit_days query param to the get_job endpoint. |
from notifications_python_client.base import BaseAPIClient
from app.notify_client import _attach_current_user
class JobApiClient(BaseAPIClient):
def __init__(self, base_url=None, client_id=None, secret=None):
super(self.__class__, self).__init__(base_url=base_url or 'base_url',
client_id=client_id or 'client_id',
secret=secret or 'secret')
def init_app(self, app):
self.base_url = app.config['API_HOST_NAME']
self.client_id = app.config['ADMIN_CLIENT_USER_NAME']
self.secret = app.config['ADMIN_CLIENT_SECRET']
def get_job(self, service_id, job_id=None, limit_days=None):
if job_id:
return self.get(url='/service/{}/job/{}'.format(service_id, job_id))
params = {}
if limit_days is not None:
params['limit_days'] = limit_days
else:
return self.get(url='/service/{}/job'.format(service_id), params=params)
def create_job(self, job_id, service_id, template_id, original_file_name, notification_count):
data = {
"id": job_id,
"template": template_id,
"original_file_name": original_file_name,
"notification_count": notification_count
}
_attach_current_user(data)
resp = self.post(url='/service/{}/job'.format(service_id), data=data)
return resp['data']
| <commit_before>
from notifications_python_client.base import BaseAPIClient
from app.notify_client import _attach_current_user
class JobApiClient(BaseAPIClient):
def __init__(self, base_url=None, client_id=None, secret=None):
super(self.__class__, self).__init__(base_url=base_url or 'base_url',
client_id=client_id or 'client_id',
secret=secret or 'secret')
def init_app(self, app):
self.base_url = app.config['API_HOST_NAME']
self.client_id = app.config['ADMIN_CLIENT_USER_NAME']
self.secret = app.config['ADMIN_CLIENT_SECRET']
def get_job(self, service_id, job_id=None):
if job_id:
return self.get(url='/service/{}/job/{}'.format(service_id, job_id))
else:
return self.get(url='/service/{}/job'.format(service_id))
def create_job(self, job_id, service_id, template_id, original_file_name, notification_count):
data = {
"id": job_id,
"template": template_id,
"original_file_name": original_file_name,
"notification_count": notification_count
}
_attach_current_user(data)
resp = self.post(url='/service/{}/job'.format(service_id), data=data)
return resp['data']
<commit_msg>Add limit_days query param to the get_job endpoint.<commit_after> |
from notifications_python_client.base import BaseAPIClient
from app.notify_client import _attach_current_user
class JobApiClient(BaseAPIClient):
def __init__(self, base_url=None, client_id=None, secret=None):
super(self.__class__, self).__init__(base_url=base_url or 'base_url',
client_id=client_id or 'client_id',
secret=secret or 'secret')
def init_app(self, app):
self.base_url = app.config['API_HOST_NAME']
self.client_id = app.config['ADMIN_CLIENT_USER_NAME']
self.secret = app.config['ADMIN_CLIENT_SECRET']
def get_job(self, service_id, job_id=None, limit_days=None):
if job_id:
return self.get(url='/service/{}/job/{}'.format(service_id, job_id))
params = {}
if limit_days is not None:
params['limit_days'] = limit_days
else:
return self.get(url='/service/{}/job'.format(service_id), params=params)
def create_job(self, job_id, service_id, template_id, original_file_name, notification_count):
data = {
"id": job_id,
"template": template_id,
"original_file_name": original_file_name,
"notification_count": notification_count
}
_attach_current_user(data)
resp = self.post(url='/service/{}/job'.format(service_id), data=data)
return resp['data']
|
from notifications_python_client.base import BaseAPIClient
from app.notify_client import _attach_current_user
class JobApiClient(BaseAPIClient):
def __init__(self, base_url=None, client_id=None, secret=None):
super(self.__class__, self).__init__(base_url=base_url or 'base_url',
client_id=client_id or 'client_id',
secret=secret or 'secret')
def init_app(self, app):
self.base_url = app.config['API_HOST_NAME']
self.client_id = app.config['ADMIN_CLIENT_USER_NAME']
self.secret = app.config['ADMIN_CLIENT_SECRET']
def get_job(self, service_id, job_id=None):
if job_id:
return self.get(url='/service/{}/job/{}'.format(service_id, job_id))
else:
return self.get(url='/service/{}/job'.format(service_id))
def create_job(self, job_id, service_id, template_id, original_file_name, notification_count):
data = {
"id": job_id,
"template": template_id,
"original_file_name": original_file_name,
"notification_count": notification_count
}
_attach_current_user(data)
resp = self.post(url='/service/{}/job'.format(service_id), data=data)
return resp['data']
Add limit_days query param to the get_job endpoint.
from notifications_python_client.base import BaseAPIClient
from app.notify_client import _attach_current_user
class JobApiClient(BaseAPIClient):
def __init__(self, base_url=None, client_id=None, secret=None):
super(self.__class__, self).__init__(base_url=base_url or 'base_url',
client_id=client_id or 'client_id',
secret=secret or 'secret')
def init_app(self, app):
self.base_url = app.config['API_HOST_NAME']
self.client_id = app.config['ADMIN_CLIENT_USER_NAME']
self.secret = app.config['ADMIN_CLIENT_SECRET']
def get_job(self, service_id, job_id=None, limit_days=None):
if job_id:
return self.get(url='/service/{}/job/{}'.format(service_id, job_id))
params = {}
if limit_days is not None:
params['limit_days'] = limit_days
else:
return self.get(url='/service/{}/job'.format(service_id), params=params)
def create_job(self, job_id, service_id, template_id, original_file_name, notification_count):
data = {
"id": job_id,
"template": template_id,
"original_file_name": original_file_name,
"notification_count": notification_count
}
_attach_current_user(data)
resp = self.post(url='/service/{}/job'.format(service_id), data=data)
return resp['data']
| <commit_before>
from notifications_python_client.base import BaseAPIClient
from app.notify_client import _attach_current_user
class JobApiClient(BaseAPIClient):
def __init__(self, base_url=None, client_id=None, secret=None):
super(self.__class__, self).__init__(base_url=base_url or 'base_url',
client_id=client_id or 'client_id',
secret=secret or 'secret')
def init_app(self, app):
self.base_url = app.config['API_HOST_NAME']
self.client_id = app.config['ADMIN_CLIENT_USER_NAME']
self.secret = app.config['ADMIN_CLIENT_SECRET']
def get_job(self, service_id, job_id=None):
if job_id:
return self.get(url='/service/{}/job/{}'.format(service_id, job_id))
else:
return self.get(url='/service/{}/job'.format(service_id))
def create_job(self, job_id, service_id, template_id, original_file_name, notification_count):
data = {
"id": job_id,
"template": template_id,
"original_file_name": original_file_name,
"notification_count": notification_count
}
_attach_current_user(data)
resp = self.post(url='/service/{}/job'.format(service_id), data=data)
return resp['data']
<commit_msg>Add limit_days query param to the get_job endpoint.<commit_after>
from notifications_python_client.base import BaseAPIClient
from app.notify_client import _attach_current_user
class JobApiClient(BaseAPIClient):
def __init__(self, base_url=None, client_id=None, secret=None):
super(self.__class__, self).__init__(base_url=base_url or 'base_url',
client_id=client_id or 'client_id',
secret=secret or 'secret')
def init_app(self, app):
self.base_url = app.config['API_HOST_NAME']
self.client_id = app.config['ADMIN_CLIENT_USER_NAME']
self.secret = app.config['ADMIN_CLIENT_SECRET']
def get_job(self, service_id, job_id=None, limit_days=None):
if job_id:
return self.get(url='/service/{}/job/{}'.format(service_id, job_id))
params = {}
if limit_days is not None:
params['limit_days'] = limit_days
else:
return self.get(url='/service/{}/job'.format(service_id), params=params)
def create_job(self, job_id, service_id, template_id, original_file_name, notification_count):
data = {
"id": job_id,
"template": template_id,
"original_file_name": original_file_name,
"notification_count": notification_count
}
_attach_current_user(data)
resp = self.post(url='/service/{}/job'.format(service_id), data=data)
return resp['data']
|
aa6cf034e41f9426e6b4688ffe832f802efb7864 | fbalerts.py | fbalerts.py | '''
Check your Facebook notifications on command line.
Author: Amit Chaudhary ( studenton.com@gmail.com )
'''
import json
# Configuration
notifications = 5 # Number of Notifications
profile_id = '1XXXXXXXXXXXXXX'
token = 'write token here'
url = 'https://www.facebook.com/feeds/notifications.php?id=' + \
profile_id + '&viewer=' + profile_id + '&key=' + token + '&format=json'
def get_page(url):
try:
import urllib
return urllib.urlopen(url).read()
except:
return ''
def main():
try:
data = json.loads(get_page(url))
for i in range(notifications):
print data['entries'][i]['title'] + "\n"
except:
print """
Couldnot fetch the notifications. The possible causes are:
1. You are not connected to the internet.
2. You haven't entered the correct api token.
"""
if __name__ == "__main__":
main()
| '''
Check your Facebook notifications on command line.
Author: Amit Chaudhary ( studenton.com@gmail.com )
'''
import json
# Configuration
notifications = 5 # Number of Notifications
profile_id = '1XXXXXXXXXXXXXX'
token = 'write token here'
base_url = 'https://www.facebook.com/feeds/notifications.php?id={0}&viewer={0}&key={1}&format=json'
url = base_url.format(profile_id, token)
def get_page(url):
try:
import urllib
return urllib.urlopen(url).read()
except:
return ''
def main():
try:
data = json.loads(get_page(url))
for i in range(notifications):
print data['entries'][i]['title'] + "\n"
except:
print """
Couldnot fetch the notifications. The possible causes are:
1. You are not connected to the internet.
2. You haven't entered the correct api token.
"""
if __name__ == "__main__":
main()
| Use new string formatting method | Use new string formatting method | Python | mit | studenton/facebook-alerts | '''
Check your Facebook notifications on command line.
Author: Amit Chaudhary ( studenton.com@gmail.com )
'''
import json
# Configuration
notifications = 5 # Number of Notifications
profile_id = '1XXXXXXXXXXXXXX'
token = 'write token here'
url = 'https://www.facebook.com/feeds/notifications.php?id=' + \
profile_id + '&viewer=' + profile_id + '&key=' + token + '&format=json'
def get_page(url):
try:
import urllib
return urllib.urlopen(url).read()
except:
return ''
def main():
try:
data = json.loads(get_page(url))
for i in range(notifications):
print data['entries'][i]['title'] + "\n"
except:
print """
Couldnot fetch the notifications. The possible causes are:
1. You are not connected to the internet.
2. You haven't entered the correct api token.
"""
if __name__ == "__main__":
main()
Use new string formatting method | '''
Check your Facebook notifications on command line.
Author: Amit Chaudhary ( studenton.com@gmail.com )
'''
import json
# Configuration
notifications = 5 # Number of Notifications
profile_id = '1XXXXXXXXXXXXXX'
token = 'write token here'
base_url = 'https://www.facebook.com/feeds/notifications.php?id={0}&viewer={0}&key={1}&format=json'
url = base_url.format(profile_id, token)
def get_page(url):
try:
import urllib
return urllib.urlopen(url).read()
except:
return ''
def main():
try:
data = json.loads(get_page(url))
for i in range(notifications):
print data['entries'][i]['title'] + "\n"
except:
print """
Couldnot fetch the notifications. The possible causes are:
1. You are not connected to the internet.
2. You haven't entered the correct api token.
"""
if __name__ == "__main__":
main()
| <commit_before>'''
Check your Facebook notifications on command line.
Author: Amit Chaudhary ( studenton.com@gmail.com )
'''
import json
# Configuration
notifications = 5 # Number of Notifications
profile_id = '1XXXXXXXXXXXXXX'
token = 'write token here'
url = 'https://www.facebook.com/feeds/notifications.php?id=' + \
profile_id + '&viewer=' + profile_id + '&key=' + token + '&format=json'
def get_page(url):
try:
import urllib
return urllib.urlopen(url).read()
except:
return ''
def main():
try:
data = json.loads(get_page(url))
for i in range(notifications):
print data['entries'][i]['title'] + "\n"
except:
print """
Couldnot fetch the notifications. The possible causes are:
1. You are not connected to the internet.
2. You haven't entered the correct api token.
"""
if __name__ == "__main__":
main()
<commit_msg>Use new string formatting method<commit_after> | '''
Check your Facebook notifications on command line.
Author: Amit Chaudhary ( studenton.com@gmail.com )
'''
import json
# Configuration
notifications = 5 # Number of Notifications
profile_id = '1XXXXXXXXXXXXXX'
token = 'write token here'
base_url = 'https://www.facebook.com/feeds/notifications.php?id={0}&viewer={0}&key={1}&format=json'
url = base_url.format(profile_id, token)
def get_page(url):
try:
import urllib
return urllib.urlopen(url).read()
except:
return ''
def main():
try:
data = json.loads(get_page(url))
for i in range(notifications):
print data['entries'][i]['title'] + "\n"
except:
print """
Couldnot fetch the notifications. The possible causes are:
1. You are not connected to the internet.
2. You haven't entered the correct api token.
"""
if __name__ == "__main__":
main()
| '''
Check your Facebook notifications on command line.
Author: Amit Chaudhary ( studenton.com@gmail.com )
'''
import json
# Configuration
notifications = 5 # Number of Notifications
profile_id = '1XXXXXXXXXXXXXX'
token = 'write token here'
url = 'https://www.facebook.com/feeds/notifications.php?id=' + \
profile_id + '&viewer=' + profile_id + '&key=' + token + '&format=json'
def get_page(url):
try:
import urllib
return urllib.urlopen(url).read()
except:
return ''
def main():
try:
data = json.loads(get_page(url))
for i in range(notifications):
print data['entries'][i]['title'] + "\n"
except:
print """
Couldnot fetch the notifications. The possible causes are:
1. You are not connected to the internet.
2. You haven't entered the correct api token.
"""
if __name__ == "__main__":
main()
Use new string formatting method'''
Check your Facebook notifications on command line.
Author: Amit Chaudhary ( studenton.com@gmail.com )
'''
import json
# Configuration
notifications = 5 # Number of Notifications
profile_id = '1XXXXXXXXXXXXXX'
token = 'write token here'
base_url = 'https://www.facebook.com/feeds/notifications.php?id={0}&viewer={0}&key={1}&format=json'
url = base_url.format(profile_id, token)
def get_page(url):
try:
import urllib
return urllib.urlopen(url).read()
except:
return ''
def main():
try:
data = json.loads(get_page(url))
for i in range(notifications):
print data['entries'][i]['title'] + "\n"
except:
print """
Couldnot fetch the notifications. The possible causes are:
1. You are not connected to the internet.
2. You haven't entered the correct api token.
"""
if __name__ == "__main__":
main()
| <commit_before>'''
Check your Facebook notifications on command line.
Author: Amit Chaudhary ( studenton.com@gmail.com )
'''
import json
# Configuration
notifications = 5 # Number of Notifications
profile_id = '1XXXXXXXXXXXXXX'
token = 'write token here'
url = 'https://www.facebook.com/feeds/notifications.php?id=' + \
profile_id + '&viewer=' + profile_id + '&key=' + token + '&format=json'
def get_page(url):
try:
import urllib
return urllib.urlopen(url).read()
except:
return ''
def main():
try:
data = json.loads(get_page(url))
for i in range(notifications):
print data['entries'][i]['title'] + "\n"
except:
print """
Couldnot fetch the notifications. The possible causes are:
1. You are not connected to the internet.
2. You haven't entered the correct api token.
"""
if __name__ == "__main__":
main()
<commit_msg>Use new string formatting method<commit_after>'''
Check your Facebook notifications on command line.
Author: Amit Chaudhary ( studenton.com@gmail.com )
'''
import json
# Configuration
notifications = 5 # Number of Notifications
profile_id = '1XXXXXXXXXXXXXX'
token = 'write token here'
base_url = 'https://www.facebook.com/feeds/notifications.php?id={0}&viewer={0}&key={1}&format=json'
url = base_url.format(profile_id, token)
def get_page(url):
try:
import urllib
return urllib.urlopen(url).read()
except:
return ''
def main():
try:
data = json.loads(get_page(url))
for i in range(notifications):
print data['entries'][i]['title'] + "\n"
except:
print """
Couldnot fetch the notifications. The possible causes are:
1. You are not connected to the internet.
2. You haven't entered the correct api token.
"""
if __name__ == "__main__":
main()
|
afa76e2643ed75c6864d2281afd3e220b848e487 | iscc_bench/textid/unicode_blocks.py | iscc_bench/textid/unicode_blocks.py | # -*- coding: utf-8 -*-
"""Blocks of unicode ranges"""
from pprint import pprint
import requests
URL = "https://www.unicode.org/Public/UCD/latest/ucd/Blocks.txt"
def load_blocks():
blocks = {}
data = requests.get(URL).text
for line in data.splitlines():
if line and not line.startswith('#'):
hex_range, name = line.split(';')
int_range = tuple(int(i, 16) for i in hex_range.split('..'))
blocks[int_range] = name.strip()
return blocks
if __name__ == '__main__':
b = load_blocks()
pprint(b)
| # -*- coding: utf-8 -*-
"""Blocks of unicode ranges"""
import unicodedata
from pprint import pprint
import requests
URL = "https://www.unicode.org/Public/UCD/latest/ucd/Blocks.txt"
def load_blocks():
"""Load and parse unicode blocks from unicode standard"""
blocks = {}
data = requests.get(URL).text
for line in data.splitlines():
if line and not line.startswith('#'):
hex_range, name = line.split(';')
int_range = tuple(int(i, 16) for i in hex_range.split('..'))
blocks[int_range] = name.strip()
return blocks
def codepoints():
"""A list of all Unicode codepoints"""
cps = []
for block in load_blocks():
for cp in range(block[0], block[1] + 1):
cps.append(cp)
return cps
def whitespace_codes():
"""All whitespace character codes"""
ws = []
for cp in codepoints():
if unicodedata.category(chr(cp)) == 'Zs':
ws.append(cp)
return ws
def control_codes():
"""All control character codes"""
cc = []
for cp in codepoints():
if unicodedata.category(chr(cp)).startswith("C"):
cc.append(cp)
return cc
if __name__ == '__main__':
pprint(load_blocks())
| Add various unicode spec helper functions | Add various unicode spec helper functions
| Python | bsd-2-clause | coblo/isccbench | # -*- coding: utf-8 -*-
"""Blocks of unicode ranges"""
from pprint import pprint
import requests
URL = "https://www.unicode.org/Public/UCD/latest/ucd/Blocks.txt"
def load_blocks():
blocks = {}
data = requests.get(URL).text
for line in data.splitlines():
if line and not line.startswith('#'):
hex_range, name = line.split(';')
int_range = tuple(int(i, 16) for i in hex_range.split('..'))
blocks[int_range] = name.strip()
return blocks
if __name__ == '__main__':
b = load_blocks()
pprint(b)
Add various unicode spec helper functions | # -*- coding: utf-8 -*-
"""Blocks of unicode ranges"""
import unicodedata
from pprint import pprint
import requests
URL = "https://www.unicode.org/Public/UCD/latest/ucd/Blocks.txt"
def load_blocks():
"""Load and parse unicode blocks from unicode standard"""
blocks = {}
data = requests.get(URL).text
for line in data.splitlines():
if line and not line.startswith('#'):
hex_range, name = line.split(';')
int_range = tuple(int(i, 16) for i in hex_range.split('..'))
blocks[int_range] = name.strip()
return blocks
def codepoints():
"""A list of all Unicode codepoints"""
cps = []
for block in load_blocks():
for cp in range(block[0], block[1] + 1):
cps.append(cp)
return cps
def whitespace_codes():
"""All whitespace character codes"""
ws = []
for cp in codepoints():
if unicodedata.category(chr(cp)) == 'Zs':
ws.append(cp)
return ws
def control_codes():
"""All control character codes"""
cc = []
for cp in codepoints():
if unicodedata.category(chr(cp)).startswith("C"):
cc.append(cp)
return cc
if __name__ == '__main__':
pprint(load_blocks())
| <commit_before># -*- coding: utf-8 -*-
"""Blocks of unicode ranges"""
from pprint import pprint
import requests
URL = "https://www.unicode.org/Public/UCD/latest/ucd/Blocks.txt"
def load_blocks():
blocks = {}
data = requests.get(URL).text
for line in data.splitlines():
if line and not line.startswith('#'):
hex_range, name = line.split(';')
int_range = tuple(int(i, 16) for i in hex_range.split('..'))
blocks[int_range] = name.strip()
return blocks
if __name__ == '__main__':
b = load_blocks()
pprint(b)
<commit_msg>Add various unicode spec helper functions<commit_after> | # -*- coding: utf-8 -*-
"""Blocks of unicode ranges"""
import unicodedata
from pprint import pprint
import requests
URL = "https://www.unicode.org/Public/UCD/latest/ucd/Blocks.txt"
def load_blocks():
"""Load and parse unicode blocks from unicode standard"""
blocks = {}
data = requests.get(URL).text
for line in data.splitlines():
if line and not line.startswith('#'):
hex_range, name = line.split(';')
int_range = tuple(int(i, 16) for i in hex_range.split('..'))
blocks[int_range] = name.strip()
return blocks
def codepoints():
"""A list of all Unicode codepoints"""
cps = []
for block in load_blocks():
for cp in range(block[0], block[1] + 1):
cps.append(cp)
return cps
def whitespace_codes():
"""All whitespace character codes"""
ws = []
for cp in codepoints():
if unicodedata.category(chr(cp)) == 'Zs':
ws.append(cp)
return ws
def control_codes():
"""All control character codes"""
cc = []
for cp in codepoints():
if unicodedata.category(chr(cp)).startswith("C"):
cc.append(cp)
return cc
if __name__ == '__main__':
pprint(load_blocks())
| # -*- coding: utf-8 -*-
"""Blocks of unicode ranges"""
from pprint import pprint
import requests
URL = "https://www.unicode.org/Public/UCD/latest/ucd/Blocks.txt"
def load_blocks():
blocks = {}
data = requests.get(URL).text
for line in data.splitlines():
if line and not line.startswith('#'):
hex_range, name = line.split(';')
int_range = tuple(int(i, 16) for i in hex_range.split('..'))
blocks[int_range] = name.strip()
return blocks
if __name__ == '__main__':
b = load_blocks()
pprint(b)
Add various unicode spec helper functions# -*- coding: utf-8 -*-
"""Blocks of unicode ranges"""
import unicodedata
from pprint import pprint
import requests
URL = "https://www.unicode.org/Public/UCD/latest/ucd/Blocks.txt"
def load_blocks():
"""Load and parse unicode blocks from unicode standard"""
blocks = {}
data = requests.get(URL).text
for line in data.splitlines():
if line and not line.startswith('#'):
hex_range, name = line.split(';')
int_range = tuple(int(i, 16) for i in hex_range.split('..'))
blocks[int_range] = name.strip()
return blocks
def codepoints():
"""A list of all Unicode codepoints"""
cps = []
for block in load_blocks():
for cp in range(block[0], block[1] + 1):
cps.append(cp)
return cps
def whitespace_codes():
"""All whitespace character codes"""
ws = []
for cp in codepoints():
if unicodedata.category(chr(cp)) == 'Zs':
ws.append(cp)
return ws
def control_codes():
"""All control character codes"""
cc = []
for cp in codepoints():
if unicodedata.category(chr(cp)).startswith("C"):
cc.append(cp)
return cc
if __name__ == '__main__':
pprint(load_blocks())
| <commit_before># -*- coding: utf-8 -*-
"""Blocks of unicode ranges"""
from pprint import pprint
import requests
URL = "https://www.unicode.org/Public/UCD/latest/ucd/Blocks.txt"
def load_blocks():
blocks = {}
data = requests.get(URL).text
for line in data.splitlines():
if line and not line.startswith('#'):
hex_range, name = line.split(';')
int_range = tuple(int(i, 16) for i in hex_range.split('..'))
blocks[int_range] = name.strip()
return blocks
if __name__ == '__main__':
b = load_blocks()
pprint(b)
<commit_msg>Add various unicode spec helper functions<commit_after># -*- coding: utf-8 -*-
"""Blocks of unicode ranges"""
import unicodedata
from pprint import pprint
import requests
URL = "https://www.unicode.org/Public/UCD/latest/ucd/Blocks.txt"
def load_blocks():
"""Load and parse unicode blocks from unicode standard"""
blocks = {}
data = requests.get(URL).text
for line in data.splitlines():
if line and not line.startswith('#'):
hex_range, name = line.split(';')
int_range = tuple(int(i, 16) for i in hex_range.split('..'))
blocks[int_range] = name.strip()
return blocks
def codepoints():
"""A list of all Unicode codepoints"""
cps = []
for block in load_blocks():
for cp in range(block[0], block[1] + 1):
cps.append(cp)
return cps
def whitespace_codes():
"""All whitespace character codes"""
ws = []
for cp in codepoints():
if unicodedata.category(chr(cp)) == 'Zs':
ws.append(cp)
return ws
def control_codes():
"""All control character codes"""
cc = []
for cp in codepoints():
if unicodedata.category(chr(cp)).startswith("C"):
cc.append(cp)
return cc
if __name__ == '__main__':
pprint(load_blocks())
|
0f5a632d625d65f4edf9e31efa75708a79eee16c | CaseStudies/glass/Implementations/Python_Simplified/Implementation/readTable.py | CaseStudies/glass/Implementations/Python_Simplified/Implementation/readTable.py | """
This module implements a portion of the Input Format Module. In this
case the input is the tabular data necessary for the different interpolations.
"""
import numpy as np
def read_num_col(filename):
with open(filename, 'rb') as f:
num_col = [f.readline()]
num_col = np.genfromtxt(num_col, delimiter=',', dtype=str)
num_col = num_col[1::2].astype(float)
return num_col
def read_array1(filename, length):
array1 = np.loadtxt(filename, delimiter=',', usecols=range(0, 2*length, 2), skiprows=1)
return array1
def read_array2(filename, length):
array2 = np.loadtxt(filename, delimiter=',', usecols=range(1, 2*length, 2), skiprows=1)
return array2
| """
This module implements a portion of the Input Format Module. In this
case the input is the tabular data necessary for the different interpolations.
"""
def read_num_col(filename):
with open(filename, "r") as f:
line = f.readline()
z_array = line.split(",")[1::2]
z_array = [float(i) for i in z_array]
return z_array
def read_array1(filename, length):
with open(filename, "r") as f:
lines = f.readlines()
lines = lines[1:]
x_array = [line.split(",")[0::2] for line in lines]
for i in range(len(x_array)):
x_array[i] = [float(j) for j in x_array[i]]
return x_array
def read_array2(filename, length):
with open(filename, "r") as f:
lines = f.readlines()
lines = lines[1:]
y_array = [line.split(",")[1::2] for line in lines]
for i in range(len(y_array)):
y_array[i] = [float(j) for j in y_array[i]]
return y_array
| Remove numpy dependency from glassbr python code | Remove numpy dependency from glassbr python code
| Python | bsd-2-clause | JacquesCarette/literate-scientific-software,JacquesCarette/literate-scientific-software,JacquesCarette/literate-scientific-software,JacquesCarette/literate-scientific-software,JacquesCarette/literate-scientific-software,JacquesCarette/literate-scientific-software,JacquesCarette/literate-scientific-software | """
This module implements a portion of the Input Format Module. In this
case the input is the tabular data necessary for the different interpolations.
"""
import numpy as np
def read_num_col(filename):
with open(filename, 'rb') as f:
num_col = [f.readline()]
num_col = np.genfromtxt(num_col, delimiter=',', dtype=str)
num_col = num_col[1::2].astype(float)
return num_col
def read_array1(filename, length):
array1 = np.loadtxt(filename, delimiter=',', usecols=range(0, 2*length, 2), skiprows=1)
return array1
def read_array2(filename, length):
array2 = np.loadtxt(filename, delimiter=',', usecols=range(1, 2*length, 2), skiprows=1)
return array2
Remove numpy dependency from glassbr python code | """
This module implements a portion of the Input Format Module. In this
case the input is the tabular data necessary for the different interpolations.
"""
def read_num_col(filename):
with open(filename, "r") as f:
line = f.readline()
z_array = line.split(",")[1::2]
z_array = [float(i) for i in z_array]
return z_array
def read_array1(filename, length):
with open(filename, "r") as f:
lines = f.readlines()
lines = lines[1:]
x_array = [line.split(",")[0::2] for line in lines]
for i in range(len(x_array)):
x_array[i] = [float(j) for j in x_array[i]]
return x_array
def read_array2(filename, length):
with open(filename, "r") as f:
lines = f.readlines()
lines = lines[1:]
y_array = [line.split(",")[1::2] for line in lines]
for i in range(len(y_array)):
y_array[i] = [float(j) for j in y_array[i]]
return y_array
| <commit_before>"""
This module implements a portion of the Input Format Module. In this
case the input is the tabular data necessary for the different interpolations.
"""
import numpy as np
def read_num_col(filename):
with open(filename, 'rb') as f:
num_col = [f.readline()]
num_col = np.genfromtxt(num_col, delimiter=',', dtype=str)
num_col = num_col[1::2].astype(float)
return num_col
def read_array1(filename, length):
array1 = np.loadtxt(filename, delimiter=',', usecols=range(0, 2*length, 2), skiprows=1)
return array1
def read_array2(filename, length):
array2 = np.loadtxt(filename, delimiter=',', usecols=range(1, 2*length, 2), skiprows=1)
return array2
<commit_msg>Remove numpy dependency from glassbr python code<commit_after> | """
This module implements a portion of the Input Format Module. In this
case the input is the tabular data necessary for the different interpolations.
"""
def read_num_col(filename):
with open(filename, "r") as f:
line = f.readline()
z_array = line.split(",")[1::2]
z_array = [float(i) for i in z_array]
return z_array
def read_array1(filename, length):
with open(filename, "r") as f:
lines = f.readlines()
lines = lines[1:]
x_array = [line.split(",")[0::2] for line in lines]
for i in range(len(x_array)):
x_array[i] = [float(j) for j in x_array[i]]
return x_array
def read_array2(filename, length):
with open(filename, "r") as f:
lines = f.readlines()
lines = lines[1:]
y_array = [line.split(",")[1::2] for line in lines]
for i in range(len(y_array)):
y_array[i] = [float(j) for j in y_array[i]]
return y_array
| """
This module implements a portion of the Input Format Module. In this
case the input is the tabular data necessary for the different interpolations.
"""
import numpy as np
def read_num_col(filename):
with open(filename, 'rb') as f:
num_col = [f.readline()]
num_col = np.genfromtxt(num_col, delimiter=',', dtype=str)
num_col = num_col[1::2].astype(float)
return num_col
def read_array1(filename, length):
array1 = np.loadtxt(filename, delimiter=',', usecols=range(0, 2*length, 2), skiprows=1)
return array1
def read_array2(filename, length):
array2 = np.loadtxt(filename, delimiter=',', usecols=range(1, 2*length, 2), skiprows=1)
return array2
Remove numpy dependency from glassbr python code"""
This module implements a portion of the Input Format Module. In this
case the input is the tabular data necessary for the different interpolations.
"""
def read_num_col(filename):
with open(filename, "r") as f:
line = f.readline()
z_array = line.split(",")[1::2]
z_array = [float(i) for i in z_array]
return z_array
def read_array1(filename, length):
with open(filename, "r") as f:
lines = f.readlines()
lines = lines[1:]
x_array = [line.split(",")[0::2] for line in lines]
for i in range(len(x_array)):
x_array[i] = [float(j) for j in x_array[i]]
return x_array
def read_array2(filename, length):
with open(filename, "r") as f:
lines = f.readlines()
lines = lines[1:]
y_array = [line.split(",")[1::2] for line in lines]
for i in range(len(y_array)):
y_array[i] = [float(j) for j in y_array[i]]
return y_array
| <commit_before>"""
This module implements a portion of the Input Format Module. In this
case the input is the tabular data necessary for the different interpolations.
"""
import numpy as np
def read_num_col(filename):
with open(filename, 'rb') as f:
num_col = [f.readline()]
num_col = np.genfromtxt(num_col, delimiter=',', dtype=str)
num_col = num_col[1::2].astype(float)
return num_col
def read_array1(filename, length):
array1 = np.loadtxt(filename, delimiter=',', usecols=range(0, 2*length, 2), skiprows=1)
return array1
def read_array2(filename, length):
array2 = np.loadtxt(filename, delimiter=',', usecols=range(1, 2*length, 2), skiprows=1)
return array2
<commit_msg>Remove numpy dependency from glassbr python code<commit_after>"""
This module implements a portion of the Input Format Module. In this
case the input is the tabular data necessary for the different interpolations.
"""
def read_num_col(filename):
with open(filename, "r") as f:
line = f.readline()
z_array = line.split(",")[1::2]
z_array = [float(i) for i in z_array]
return z_array
def read_array1(filename, length):
with open(filename, "r") as f:
lines = f.readlines()
lines = lines[1:]
x_array = [line.split(",")[0::2] for line in lines]
for i in range(len(x_array)):
x_array[i] = [float(j) for j in x_array[i]]
return x_array
def read_array2(filename, length):
with open(filename, "r") as f:
lines = f.readlines()
lines = lines[1:]
y_array = [line.split(",")[1::2] for line in lines]
for i in range(len(y_array)):
y_array[i] = [float(j) for j in y_array[i]]
return y_array
|
ea6c57de01f420bdd344194e5529a0e91036c634 | greenfan/management/commands/create-job-from-testspec.py | greenfan/management/commands/create-job-from-testspec.py | #
# Copyright 2012 Cisco Systems, Inc.
#
# Author: Soren Hansen <sorhanse@cisco.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from django.core.management.base import BaseCommand
from greenfan.models import TestSpecification
class Command(BaseCommand):
def handle(self, ts_id, **options):
ts = TestSpecification.objects.get(id=ts_id)
job = ts.create_job()
return 'Created job %d' % job.pk
| #
# Copyright 2012 Cisco Systems, Inc.
#
# Author: Soren Hansen <sorhanse@cisco.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from django.core.management.base import BaseCommand
from greenfan.models import TestSpecification
class Command(BaseCommand):
def handle(self, ts_id, **options):
ts = TestSpecification.objects.get(id=ts_id)
physical = 'physical' in options
job = ts.create_job(physical=physical)
return 'Created job %d' % job.pk
| Allow us to create both virtual and physical jobs | Allow us to create both virtual and physical jobs
| Python | apache-2.0 | sorenh/python-django-greenfan,sorenh/python-django-greenfan | #
# Copyright 2012 Cisco Systems, Inc.
#
# Author: Soren Hansen <sorhanse@cisco.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from django.core.management.base import BaseCommand
from greenfan.models import TestSpecification
class Command(BaseCommand):
def handle(self, ts_id, **options):
ts = TestSpecification.objects.get(id=ts_id)
job = ts.create_job()
return 'Created job %d' % job.pk
Allow us to create both virtual and physical jobs | #
# Copyright 2012 Cisco Systems, Inc.
#
# Author: Soren Hansen <sorhanse@cisco.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from django.core.management.base import BaseCommand
from greenfan.models import TestSpecification
class Command(BaseCommand):
def handle(self, ts_id, **options):
ts = TestSpecification.objects.get(id=ts_id)
physical = 'physical' in options
job = ts.create_job(physical=physical)
return 'Created job %d' % job.pk
| <commit_before>#
# Copyright 2012 Cisco Systems, Inc.
#
# Author: Soren Hansen <sorhanse@cisco.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from django.core.management.base import BaseCommand
from greenfan.models import TestSpecification
class Command(BaseCommand):
def handle(self, ts_id, **options):
ts = TestSpecification.objects.get(id=ts_id)
job = ts.create_job()
return 'Created job %d' % job.pk
<commit_msg>Allow us to create both virtual and physical jobs<commit_after> | #
# Copyright 2012 Cisco Systems, Inc.
#
# Author: Soren Hansen <sorhanse@cisco.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from django.core.management.base import BaseCommand
from greenfan.models import TestSpecification
class Command(BaseCommand):
def handle(self, ts_id, **options):
ts = TestSpecification.objects.get(id=ts_id)
physical = 'physical' in options
job = ts.create_job(physical=physical)
return 'Created job %d' % job.pk
| #
# Copyright 2012 Cisco Systems, Inc.
#
# Author: Soren Hansen <sorhanse@cisco.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from django.core.management.base import BaseCommand
from greenfan.models import TestSpecification
class Command(BaseCommand):
def handle(self, ts_id, **options):
ts = TestSpecification.objects.get(id=ts_id)
job = ts.create_job()
return 'Created job %d' % job.pk
Allow us to create both virtual and physical jobs#
# Copyright 2012 Cisco Systems, Inc.
#
# Author: Soren Hansen <sorhanse@cisco.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from django.core.management.base import BaseCommand
from greenfan.models import TestSpecification
class Command(BaseCommand):
def handle(self, ts_id, **options):
ts = TestSpecification.objects.get(id=ts_id)
physical = 'physical' in options
job = ts.create_job(physical=physical)
return 'Created job %d' % job.pk
| <commit_before>#
# Copyright 2012 Cisco Systems, Inc.
#
# Author: Soren Hansen <sorhanse@cisco.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from django.core.management.base import BaseCommand
from greenfan.models import TestSpecification
class Command(BaseCommand):
def handle(self, ts_id, **options):
ts = TestSpecification.objects.get(id=ts_id)
job = ts.create_job()
return 'Created job %d' % job.pk
<commit_msg>Allow us to create both virtual and physical jobs<commit_after>#
# Copyright 2012 Cisco Systems, Inc.
#
# Author: Soren Hansen <sorhanse@cisco.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from django.core.management.base import BaseCommand
from greenfan.models import TestSpecification
class Command(BaseCommand):
def handle(self, ts_id, **options):
ts = TestSpecification.objects.get(id=ts_id)
physical = 'physical' in options
job = ts.create_job(physical=physical)
return 'Created job %d' % job.pk
|
bfc94287cc5886495851733a45872a8979900435 | lily/notes/migrations/0010_remove_polymorphic_cleanup.py | lily/notes/migrations/0010_remove_polymorphic_cleanup.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import django_extensions.db.fields
class Migration(migrations.Migration):
dependencies = [
('notes', '0009_remove_polymorphic_data_migrate'),
]
operations = [
migrations.AlterField(
model_name='NewNote',
name='created',
field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')
),
migrations.AlterField(
model_name='NewNote',
name='modified',
field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')
),
migrations.DeleteModel('Note'),
migrations.RenameModel('NewNote', 'Note')
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import django_extensions.db.fields
# Comment for testing migrations in Travis continuous deployment.
class Migration(migrations.Migration):
dependencies = [
('notes', '0009_remove_polymorphic_data_migrate'),
]
operations = [
migrations.AlterField(
model_name='NewNote',
name='created',
field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')
),
migrations.AlterField(
model_name='NewNote',
name='modified',
field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')
),
migrations.DeleteModel('Note'),
migrations.RenameModel('NewNote', 'Note')
]
| Test migrations with continuous deployment. | Test migrations with continuous deployment.
| Python | agpl-3.0 | HelloLily/hellolily,HelloLily/hellolily,HelloLily/hellolily,HelloLily/hellolily | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import django_extensions.db.fields
class Migration(migrations.Migration):
dependencies = [
('notes', '0009_remove_polymorphic_data_migrate'),
]
operations = [
migrations.AlterField(
model_name='NewNote',
name='created',
field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')
),
migrations.AlterField(
model_name='NewNote',
name='modified',
field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')
),
migrations.DeleteModel('Note'),
migrations.RenameModel('NewNote', 'Note')
]
Test migrations with continuous deployment. | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import django_extensions.db.fields
# Comment for testing migrations in Travis continuous deployment.
class Migration(migrations.Migration):
dependencies = [
('notes', '0009_remove_polymorphic_data_migrate'),
]
operations = [
migrations.AlterField(
model_name='NewNote',
name='created',
field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')
),
migrations.AlterField(
model_name='NewNote',
name='modified',
field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')
),
migrations.DeleteModel('Note'),
migrations.RenameModel('NewNote', 'Note')
]
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import django_extensions.db.fields
class Migration(migrations.Migration):
dependencies = [
('notes', '0009_remove_polymorphic_data_migrate'),
]
operations = [
migrations.AlterField(
model_name='NewNote',
name='created',
field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')
),
migrations.AlterField(
model_name='NewNote',
name='modified',
field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')
),
migrations.DeleteModel('Note'),
migrations.RenameModel('NewNote', 'Note')
]
<commit_msg>Test migrations with continuous deployment.<commit_after> | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import django_extensions.db.fields
# Comment for testing migrations in Travis continuous deployment.
class Migration(migrations.Migration):
dependencies = [
('notes', '0009_remove_polymorphic_data_migrate'),
]
operations = [
migrations.AlterField(
model_name='NewNote',
name='created',
field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')
),
migrations.AlterField(
model_name='NewNote',
name='modified',
field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')
),
migrations.DeleteModel('Note'),
migrations.RenameModel('NewNote', 'Note')
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import django_extensions.db.fields
class Migration(migrations.Migration):
dependencies = [
('notes', '0009_remove_polymorphic_data_migrate'),
]
operations = [
migrations.AlterField(
model_name='NewNote',
name='created',
field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')
),
migrations.AlterField(
model_name='NewNote',
name='modified',
field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')
),
migrations.DeleteModel('Note'),
migrations.RenameModel('NewNote', 'Note')
]
Test migrations with continuous deployment.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import django_extensions.db.fields
# Comment for testing migrations in Travis continuous deployment.
class Migration(migrations.Migration):
dependencies = [
('notes', '0009_remove_polymorphic_data_migrate'),
]
operations = [
migrations.AlterField(
model_name='NewNote',
name='created',
field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')
),
migrations.AlterField(
model_name='NewNote',
name='modified',
field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')
),
migrations.DeleteModel('Note'),
migrations.RenameModel('NewNote', 'Note')
]
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import django_extensions.db.fields
class Migration(migrations.Migration):
dependencies = [
('notes', '0009_remove_polymorphic_data_migrate'),
]
operations = [
migrations.AlterField(
model_name='NewNote',
name='created',
field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')
),
migrations.AlterField(
model_name='NewNote',
name='modified',
field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')
),
migrations.DeleteModel('Note'),
migrations.RenameModel('NewNote', 'Note')
]
<commit_msg>Test migrations with continuous deployment.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import django_extensions.db.fields
# Comment for testing migrations in Travis continuous deployment.
class Migration(migrations.Migration):
dependencies = [
('notes', '0009_remove_polymorphic_data_migrate'),
]
operations = [
migrations.AlterField(
model_name='NewNote',
name='created',
field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')
),
migrations.AlterField(
model_name='NewNote',
name='modified',
field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')
),
migrations.DeleteModel('Note'),
migrations.RenameModel('NewNote', 'Note')
]
|
bc3e31838fd1b5eec3c4ca17f5fab4588ac87904 | tests/client/test_TelnetClient.py | tests/client/test_TelnetClient.py | import unittest
import unittest.mock as mock
from ogn.client.client import TelnetClient
class TelnetClientTest(unittest.TestCase):
@mock.patch('ogn.client.client.socket')
def test_connect(self, socket_mock):
def callback(raw_message):
pass
client = TelnetClient()
client.run(callback=callback)
| import unittest
import unittest.mock as mock
from ogn.client.client import TelnetClient
class TelnetClientTest(unittest.TestCase):
@mock.patch('ogn.client.client.socket')
def test_connect_disconnect(self, socket_mock):
client = TelnetClient()
client.connect()
client.sock.connect.assert_called_once()
client.disconnect()
client.sock.shutdown.assert_called_once()
client.sock.close.assert_called_once()
@mock.patch('ogn.client.client.socket')
def test_run(self, socket_mock):
def callback(raw_message):
raise ConnectionRefusedError
client = TelnetClient()
client.connect()
client.run(callback=callback)
| Update to receiver version 0.2.6 | Update to receiver version 0.2.6
Update to receiver version 0.2.6
Better testing
| Python | agpl-3.0 | glidernet/python-ogn-client | import unittest
import unittest.mock as mock
from ogn.client.client import TelnetClient
class TelnetClientTest(unittest.TestCase):
@mock.patch('ogn.client.client.socket')
def test_connect(self, socket_mock):
def callback(raw_message):
pass
client = TelnetClient()
client.run(callback=callback)
Update to receiver version 0.2.6
Update to receiver version 0.2.6
Better testing | import unittest
import unittest.mock as mock
from ogn.client.client import TelnetClient
class TelnetClientTest(unittest.TestCase):
@mock.patch('ogn.client.client.socket')
def test_connect_disconnect(self, socket_mock):
client = TelnetClient()
client.connect()
client.sock.connect.assert_called_once()
client.disconnect()
client.sock.shutdown.assert_called_once()
client.sock.close.assert_called_once()
@mock.patch('ogn.client.client.socket')
def test_run(self, socket_mock):
def callback(raw_message):
raise ConnectionRefusedError
client = TelnetClient()
client.connect()
client.run(callback=callback)
| <commit_before>import unittest
import unittest.mock as mock
from ogn.client.client import TelnetClient
class TelnetClientTest(unittest.TestCase):
@mock.patch('ogn.client.client.socket')
def test_connect(self, socket_mock):
def callback(raw_message):
pass
client = TelnetClient()
client.run(callback=callback)
<commit_msg>Update to receiver version 0.2.6
Update to receiver version 0.2.6
Better testing<commit_after> | import unittest
import unittest.mock as mock
from ogn.client.client import TelnetClient
class TelnetClientTest(unittest.TestCase):
@mock.patch('ogn.client.client.socket')
def test_connect_disconnect(self, socket_mock):
client = TelnetClient()
client.connect()
client.sock.connect.assert_called_once()
client.disconnect()
client.sock.shutdown.assert_called_once()
client.sock.close.assert_called_once()
@mock.patch('ogn.client.client.socket')
def test_run(self, socket_mock):
def callback(raw_message):
raise ConnectionRefusedError
client = TelnetClient()
client.connect()
client.run(callback=callback)
| import unittest
import unittest.mock as mock
from ogn.client.client import TelnetClient
class TelnetClientTest(unittest.TestCase):
@mock.patch('ogn.client.client.socket')
def test_connect(self, socket_mock):
def callback(raw_message):
pass
client = TelnetClient()
client.run(callback=callback)
Update to receiver version 0.2.6
Update to receiver version 0.2.6
Better testingimport unittest
import unittest.mock as mock
from ogn.client.client import TelnetClient
class TelnetClientTest(unittest.TestCase):
@mock.patch('ogn.client.client.socket')
def test_connect_disconnect(self, socket_mock):
client = TelnetClient()
client.connect()
client.sock.connect.assert_called_once()
client.disconnect()
client.sock.shutdown.assert_called_once()
client.sock.close.assert_called_once()
@mock.patch('ogn.client.client.socket')
def test_run(self, socket_mock):
def callback(raw_message):
raise ConnectionRefusedError
client = TelnetClient()
client.connect()
client.run(callback=callback)
| <commit_before>import unittest
import unittest.mock as mock
from ogn.client.client import TelnetClient
class TelnetClientTest(unittest.TestCase):
@mock.patch('ogn.client.client.socket')
def test_connect(self, socket_mock):
def callback(raw_message):
pass
client = TelnetClient()
client.run(callback=callback)
<commit_msg>Update to receiver version 0.2.6
Update to receiver version 0.2.6
Better testing<commit_after>import unittest
import unittest.mock as mock
from ogn.client.client import TelnetClient
class TelnetClientTest(unittest.TestCase):
@mock.patch('ogn.client.client.socket')
def test_connect_disconnect(self, socket_mock):
client = TelnetClient()
client.connect()
client.sock.connect.assert_called_once()
client.disconnect()
client.sock.shutdown.assert_called_once()
client.sock.close.assert_called_once()
@mock.patch('ogn.client.client.socket')
def test_run(self, socket_mock):
def callback(raw_message):
raise ConnectionRefusedError
client = TelnetClient()
client.connect()
client.run(callback=callback)
|
05f0969ee8b9374c2fe5bce2c753fb4619432f0d | tests/integration/runners/jobs.py | tests/integration/runners/jobs.py | # -*- coding: utf-8 -*-
'''
Tests for the salt-run command
'''
# Import Salt Testing libs
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
# Import salt libs
import integration
class ManageTest(integration.ShellCase):
'''
Test the manage runner
'''
def test_active(self):
'''
jobs.active
'''
ret = self.run_run_plus('jobs.active')
self.assertEqual(ret['fun'], {})
self.assertEqual(ret['out'], ['{}'])
def test_lookup_jid(self):
'''
jobs.lookup_jid
'''
ret = self.run_run_plus('jobs.lookup_jid', '', '23974239742394')
self.assertEqual(ret['fun'], {})
self.assertEqual(ret['out'], [])
def test_list_jobs(self):
'''
jobs.list_jobs
'''
ret = self.run_run_plus('jobs.list_jobs')
self.assertIsInstance(ret['fun'], dict)
if __name__ == '__main__':
from integration import run_tests
run_tests(ManageTest)
| # -*- coding: utf-8 -*-
'''
Tests for the salt-run command
'''
# Import Salt Testing libs
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
# Import salt libs
import integration
class ManageTest(integration.ShellCase):
'''
Test the manage runner
'''
def test_active(self):
'''
jobs.active
'''
ret = self.run_run_plus('jobs.active')
self.assertEqual(ret['fun'], {})
self.assertEqual(ret['out'], [])
def test_lookup_jid(self):
'''
jobs.lookup_jid
'''
ret = self.run_run_plus('jobs.lookup_jid', '', '23974239742394')
self.assertEqual(ret['fun'], {})
self.assertEqual(ret['out'], [])
def test_list_jobs(self):
'''
jobs.list_jobs
'''
ret = self.run_run_plus('jobs.list_jobs')
self.assertIsInstance(ret['fun'], dict)
if __name__ == '__main__':
from integration import run_tests
run_tests(ManageTest)
| Fix the output now that we are using the default output (nested) instead of hard coding it to yaml | Fix the output now that we are using the default output (nested) instead of hard coding it to yaml
| Python | apache-2.0 | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | # -*- coding: utf-8 -*-
'''
Tests for the salt-run command
'''
# Import Salt Testing libs
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
# Import salt libs
import integration
class ManageTest(integration.ShellCase):
'''
Test the manage runner
'''
def test_active(self):
'''
jobs.active
'''
ret = self.run_run_plus('jobs.active')
self.assertEqual(ret['fun'], {})
self.assertEqual(ret['out'], ['{}'])
def test_lookup_jid(self):
'''
jobs.lookup_jid
'''
ret = self.run_run_plus('jobs.lookup_jid', '', '23974239742394')
self.assertEqual(ret['fun'], {})
self.assertEqual(ret['out'], [])
def test_list_jobs(self):
'''
jobs.list_jobs
'''
ret = self.run_run_plus('jobs.list_jobs')
self.assertIsInstance(ret['fun'], dict)
if __name__ == '__main__':
from integration import run_tests
run_tests(ManageTest)
Fix the output now that we are using the default output (nested) instead of hard coding it to yaml | # -*- coding: utf-8 -*-
'''
Tests for the salt-run command
'''
# Import Salt Testing libs
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
# Import salt libs
import integration
class ManageTest(integration.ShellCase):
'''
Test the manage runner
'''
def test_active(self):
'''
jobs.active
'''
ret = self.run_run_plus('jobs.active')
self.assertEqual(ret['fun'], {})
self.assertEqual(ret['out'], [])
def test_lookup_jid(self):
'''
jobs.lookup_jid
'''
ret = self.run_run_plus('jobs.lookup_jid', '', '23974239742394')
self.assertEqual(ret['fun'], {})
self.assertEqual(ret['out'], [])
def test_list_jobs(self):
'''
jobs.list_jobs
'''
ret = self.run_run_plus('jobs.list_jobs')
self.assertIsInstance(ret['fun'], dict)
if __name__ == '__main__':
from integration import run_tests
run_tests(ManageTest)
| <commit_before># -*- coding: utf-8 -*-
'''
Tests for the salt-run command
'''
# Import Salt Testing libs
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
# Import salt libs
import integration
class ManageTest(integration.ShellCase):
'''
Test the manage runner
'''
def test_active(self):
'''
jobs.active
'''
ret = self.run_run_plus('jobs.active')
self.assertEqual(ret['fun'], {})
self.assertEqual(ret['out'], ['{}'])
def test_lookup_jid(self):
'''
jobs.lookup_jid
'''
ret = self.run_run_plus('jobs.lookup_jid', '', '23974239742394')
self.assertEqual(ret['fun'], {})
self.assertEqual(ret['out'], [])
def test_list_jobs(self):
'''
jobs.list_jobs
'''
ret = self.run_run_plus('jobs.list_jobs')
self.assertIsInstance(ret['fun'], dict)
if __name__ == '__main__':
from integration import run_tests
run_tests(ManageTest)
<commit_msg>Fix the output now that we are using the default output (nested) instead of hard coding it to yaml<commit_after> | # -*- coding: utf-8 -*-
'''
Tests for the salt-run command
'''
# Import Salt Testing libs
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
# Import salt libs
import integration
class ManageTest(integration.ShellCase):
'''
Test the manage runner
'''
def test_active(self):
'''
jobs.active
'''
ret = self.run_run_plus('jobs.active')
self.assertEqual(ret['fun'], {})
self.assertEqual(ret['out'], [])
def test_lookup_jid(self):
'''
jobs.lookup_jid
'''
ret = self.run_run_plus('jobs.lookup_jid', '', '23974239742394')
self.assertEqual(ret['fun'], {})
self.assertEqual(ret['out'], [])
def test_list_jobs(self):
'''
jobs.list_jobs
'''
ret = self.run_run_plus('jobs.list_jobs')
self.assertIsInstance(ret['fun'], dict)
if __name__ == '__main__':
from integration import run_tests
run_tests(ManageTest)
| # -*- coding: utf-8 -*-
'''
Tests for the salt-run command
'''
# Import Salt Testing libs
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
# Import salt libs
import integration
class ManageTest(integration.ShellCase):
'''
Test the manage runner
'''
def test_active(self):
'''
jobs.active
'''
ret = self.run_run_plus('jobs.active')
self.assertEqual(ret['fun'], {})
self.assertEqual(ret['out'], ['{}'])
def test_lookup_jid(self):
'''
jobs.lookup_jid
'''
ret = self.run_run_plus('jobs.lookup_jid', '', '23974239742394')
self.assertEqual(ret['fun'], {})
self.assertEqual(ret['out'], [])
def test_list_jobs(self):
'''
jobs.list_jobs
'''
ret = self.run_run_plus('jobs.list_jobs')
self.assertIsInstance(ret['fun'], dict)
if __name__ == '__main__':
from integration import run_tests
run_tests(ManageTest)
Fix the output now that we are using the default output (nested) instead of hard coding it to yaml# -*- coding: utf-8 -*-
'''
Tests for the salt-run command
'''
# Import Salt Testing libs
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
# Import salt libs
import integration
class ManageTest(integration.ShellCase):
'''
Test the manage runner
'''
def test_active(self):
'''
jobs.active
'''
ret = self.run_run_plus('jobs.active')
self.assertEqual(ret['fun'], {})
self.assertEqual(ret['out'], [])
def test_lookup_jid(self):
'''
jobs.lookup_jid
'''
ret = self.run_run_plus('jobs.lookup_jid', '', '23974239742394')
self.assertEqual(ret['fun'], {})
self.assertEqual(ret['out'], [])
def test_list_jobs(self):
'''
jobs.list_jobs
'''
ret = self.run_run_plus('jobs.list_jobs')
self.assertIsInstance(ret['fun'], dict)
if __name__ == '__main__':
from integration import run_tests
run_tests(ManageTest)
| <commit_before># -*- coding: utf-8 -*-
'''
Tests for the salt-run command
'''
# Import Salt Testing libs
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
# Import salt libs
import integration
class ManageTest(integration.ShellCase):
'''
Test the manage runner
'''
def test_active(self):
'''
jobs.active
'''
ret = self.run_run_plus('jobs.active')
self.assertEqual(ret['fun'], {})
self.assertEqual(ret['out'], ['{}'])
def test_lookup_jid(self):
'''
jobs.lookup_jid
'''
ret = self.run_run_plus('jobs.lookup_jid', '', '23974239742394')
self.assertEqual(ret['fun'], {})
self.assertEqual(ret['out'], [])
def test_list_jobs(self):
'''
jobs.list_jobs
'''
ret = self.run_run_plus('jobs.list_jobs')
self.assertIsInstance(ret['fun'], dict)
if __name__ == '__main__':
from integration import run_tests
run_tests(ManageTest)
<commit_msg>Fix the output now that we are using the default output (nested) instead of hard coding it to yaml<commit_after># -*- coding: utf-8 -*-
'''
Tests for the salt-run command
'''
# Import Salt Testing libs
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
# Import salt libs
import integration
class ManageTest(integration.ShellCase):
'''
Test the manage runner
'''
def test_active(self):
'''
jobs.active
'''
ret = self.run_run_plus('jobs.active')
self.assertEqual(ret['fun'], {})
self.assertEqual(ret['out'], [])
def test_lookup_jid(self):
'''
jobs.lookup_jid
'''
ret = self.run_run_plus('jobs.lookup_jid', '', '23974239742394')
self.assertEqual(ret['fun'], {})
self.assertEqual(ret['out'], [])
def test_list_jobs(self):
'''
jobs.list_jobs
'''
ret = self.run_run_plus('jobs.list_jobs')
self.assertIsInstance(ret['fun'], dict)
if __name__ == '__main__':
from integration import run_tests
run_tests(ManageTest)
|
ba6f29106ba6b8957d82cf042753e4b48a671da6 | waterbutler/providers/osfstorage/metadata.py | waterbutler/providers/osfstorage/metadata.py | from waterbutler.core import metadata
class BaseOsfStorageMetadata:
@property
def provider(self):
return 'osfstorage'
class OsfStorageFileMetadata(BaseOsfStorageMetadata, metadata.BaseFileMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
if self.raw['path'][0].startswith('/'):
return self.raw['path']
return '/' + self.raw['path']
@property
def modified(self):
return self.raw.get('modified')
@property
def size(self):
return self.raw.get('size')
@property
def content_type(self):
return None
@property
def extra(self):
return {
'version': self.raw['version'],
'downloads': self.raw['downloads'],
}
class OsfStorageFolderMetadata(BaseOsfStorageMetadata, metadata.BaseFolderMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
return self.raw['path']
class OsfStorageRevisionMetadata(BaseOsfStorageMetadata, metadata.BaseFileRevisionMetadata):
@property
def modified(self):
return self.raw['date']
@property
def version_identifier(self):
return 'version'
@property
def version(self):
return str(self.raw['index'])
| from waterbutler.core import metadata
class BaseOsfStorageMetadata:
@property
def provider(self):
return 'osfstorage'
class OsfStorageFileMetadata(BaseOsfStorageMetadata, metadata.BaseFileMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
if self.raw['path'][0].startswith('/'):
return self.raw['path']
return '/' + self.raw['path']
@property
def modified(self):
return self.raw.get('modified')
@property
def size(self):
return self.raw.get('size')
@property
def content_type(self):
return None
@property
def extra(self):
return {
'version': self.raw['version'],
'downloads': self.raw['downloads'],
}
class OsfStorageFolderMetadata(BaseOsfStorageMetadata, metadata.BaseFolderMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
return self.raw['path']
class OsfStorageRevisionMetadata(BaseOsfStorageMetadata, metadata.BaseFileRevisionMetadata):
@property
def modified(self):
return self.raw['date']
@property
def version_identifier(self):
return 'version'
@property
def version(self):
return str(self.raw['index'])
@property
def extra(self):
return {
'user': self.raw['user'],
'downloads': self.raw['downloads'],
}
| Return User and download count | Return User and download count
| Python | apache-2.0 | rdhyee/waterbutler,hmoco/waterbutler,CenterForOpenScience/waterbutler,Johnetordoff/waterbutler,TomBaxter/waterbutler,kwierman/waterbutler,RCOSDP/waterbutler,Ghalko/waterbutler,icereval/waterbutler,cosenal/waterbutler,felliott/waterbutler,chrisseto/waterbutler,rafaeldelucena/waterbutler | from waterbutler.core import metadata
class BaseOsfStorageMetadata:
@property
def provider(self):
return 'osfstorage'
class OsfStorageFileMetadata(BaseOsfStorageMetadata, metadata.BaseFileMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
if self.raw['path'][0].startswith('/'):
return self.raw['path']
return '/' + self.raw['path']
@property
def modified(self):
return self.raw.get('modified')
@property
def size(self):
return self.raw.get('size')
@property
def content_type(self):
return None
@property
def extra(self):
return {
'version': self.raw['version'],
'downloads': self.raw['downloads'],
}
class OsfStorageFolderMetadata(BaseOsfStorageMetadata, metadata.BaseFolderMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
return self.raw['path']
class OsfStorageRevisionMetadata(BaseOsfStorageMetadata, metadata.BaseFileRevisionMetadata):
@property
def modified(self):
return self.raw['date']
@property
def version_identifier(self):
return 'version'
@property
def version(self):
return str(self.raw['index'])
Return User and download count | from waterbutler.core import metadata
class BaseOsfStorageMetadata:
@property
def provider(self):
return 'osfstorage'
class OsfStorageFileMetadata(BaseOsfStorageMetadata, metadata.BaseFileMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
if self.raw['path'][0].startswith('/'):
return self.raw['path']
return '/' + self.raw['path']
@property
def modified(self):
return self.raw.get('modified')
@property
def size(self):
return self.raw.get('size')
@property
def content_type(self):
return None
@property
def extra(self):
return {
'version': self.raw['version'],
'downloads': self.raw['downloads'],
}
class OsfStorageFolderMetadata(BaseOsfStorageMetadata, metadata.BaseFolderMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
return self.raw['path']
class OsfStorageRevisionMetadata(BaseOsfStorageMetadata, metadata.BaseFileRevisionMetadata):
@property
def modified(self):
return self.raw['date']
@property
def version_identifier(self):
return 'version'
@property
def version(self):
return str(self.raw['index'])
@property
def extra(self):
return {
'user': self.raw['user'],
'downloads': self.raw['downloads'],
}
| <commit_before>from waterbutler.core import metadata
class BaseOsfStorageMetadata:
@property
def provider(self):
return 'osfstorage'
class OsfStorageFileMetadata(BaseOsfStorageMetadata, metadata.BaseFileMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
if self.raw['path'][0].startswith('/'):
return self.raw['path']
return '/' + self.raw['path']
@property
def modified(self):
return self.raw.get('modified')
@property
def size(self):
return self.raw.get('size')
@property
def content_type(self):
return None
@property
def extra(self):
return {
'version': self.raw['version'],
'downloads': self.raw['downloads'],
}
class OsfStorageFolderMetadata(BaseOsfStorageMetadata, metadata.BaseFolderMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
return self.raw['path']
class OsfStorageRevisionMetadata(BaseOsfStorageMetadata, metadata.BaseFileRevisionMetadata):
@property
def modified(self):
return self.raw['date']
@property
def version_identifier(self):
return 'version'
@property
def version(self):
return str(self.raw['index'])
<commit_msg>Return User and download count<commit_after> | from waterbutler.core import metadata
class BaseOsfStorageMetadata:
@property
def provider(self):
return 'osfstorage'
class OsfStorageFileMetadata(BaseOsfStorageMetadata, metadata.BaseFileMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
if self.raw['path'][0].startswith('/'):
return self.raw['path']
return '/' + self.raw['path']
@property
def modified(self):
return self.raw.get('modified')
@property
def size(self):
return self.raw.get('size')
@property
def content_type(self):
return None
@property
def extra(self):
return {
'version': self.raw['version'],
'downloads': self.raw['downloads'],
}
class OsfStorageFolderMetadata(BaseOsfStorageMetadata, metadata.BaseFolderMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
return self.raw['path']
class OsfStorageRevisionMetadata(BaseOsfStorageMetadata, metadata.BaseFileRevisionMetadata):
@property
def modified(self):
return self.raw['date']
@property
def version_identifier(self):
return 'version'
@property
def version(self):
return str(self.raw['index'])
@property
def extra(self):
return {
'user': self.raw['user'],
'downloads': self.raw['downloads'],
}
| from waterbutler.core import metadata
class BaseOsfStorageMetadata:
@property
def provider(self):
return 'osfstorage'
class OsfStorageFileMetadata(BaseOsfStorageMetadata, metadata.BaseFileMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
if self.raw['path'][0].startswith('/'):
return self.raw['path']
return '/' + self.raw['path']
@property
def modified(self):
return self.raw.get('modified')
@property
def size(self):
return self.raw.get('size')
@property
def content_type(self):
return None
@property
def extra(self):
return {
'version': self.raw['version'],
'downloads': self.raw['downloads'],
}
class OsfStorageFolderMetadata(BaseOsfStorageMetadata, metadata.BaseFolderMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
return self.raw['path']
class OsfStorageRevisionMetadata(BaseOsfStorageMetadata, metadata.BaseFileRevisionMetadata):
@property
def modified(self):
return self.raw['date']
@property
def version_identifier(self):
return 'version'
@property
def version(self):
return str(self.raw['index'])
Return User and download countfrom waterbutler.core import metadata
class BaseOsfStorageMetadata:
@property
def provider(self):
return 'osfstorage'
class OsfStorageFileMetadata(BaseOsfStorageMetadata, metadata.BaseFileMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
if self.raw['path'][0].startswith('/'):
return self.raw['path']
return '/' + self.raw['path']
@property
def modified(self):
return self.raw.get('modified')
@property
def size(self):
return self.raw.get('size')
@property
def content_type(self):
return None
@property
def extra(self):
return {
'version': self.raw['version'],
'downloads': self.raw['downloads'],
}
class OsfStorageFolderMetadata(BaseOsfStorageMetadata, metadata.BaseFolderMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
return self.raw['path']
class OsfStorageRevisionMetadata(BaseOsfStorageMetadata, metadata.BaseFileRevisionMetadata):
@property
def modified(self):
return self.raw['date']
@property
def version_identifier(self):
return 'version'
@property
def version(self):
return str(self.raw['index'])
@property
def extra(self):
return {
'user': self.raw['user'],
'downloads': self.raw['downloads'],
}
| <commit_before>from waterbutler.core import metadata
class BaseOsfStorageMetadata:
@property
def provider(self):
return 'osfstorage'
class OsfStorageFileMetadata(BaseOsfStorageMetadata, metadata.BaseFileMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
if self.raw['path'][0].startswith('/'):
return self.raw['path']
return '/' + self.raw['path']
@property
def modified(self):
return self.raw.get('modified')
@property
def size(self):
return self.raw.get('size')
@property
def content_type(self):
return None
@property
def extra(self):
return {
'version': self.raw['version'],
'downloads': self.raw['downloads'],
}
class OsfStorageFolderMetadata(BaseOsfStorageMetadata, metadata.BaseFolderMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
return self.raw['path']
class OsfStorageRevisionMetadata(BaseOsfStorageMetadata, metadata.BaseFileRevisionMetadata):
@property
def modified(self):
return self.raw['date']
@property
def version_identifier(self):
return 'version'
@property
def version(self):
return str(self.raw['index'])
<commit_msg>Return User and download count<commit_after>from waterbutler.core import metadata
class BaseOsfStorageMetadata:
@property
def provider(self):
return 'osfstorage'
class OsfStorageFileMetadata(BaseOsfStorageMetadata, metadata.BaseFileMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
if self.raw['path'][0].startswith('/'):
return self.raw['path']
return '/' + self.raw['path']
@property
def modified(self):
return self.raw.get('modified')
@property
def size(self):
return self.raw.get('size')
@property
def content_type(self):
return None
@property
def extra(self):
return {
'version': self.raw['version'],
'downloads': self.raw['downloads'],
}
class OsfStorageFolderMetadata(BaseOsfStorageMetadata, metadata.BaseFolderMetadata):
@property
def name(self):
return self.raw['name']
@property
def path(self):
return self.raw['path']
class OsfStorageRevisionMetadata(BaseOsfStorageMetadata, metadata.BaseFileRevisionMetadata):
@property
def modified(self):
return self.raw['date']
@property
def version_identifier(self):
return 'version'
@property
def version(self):
return str(self.raw['index'])
@property
def extra(self):
return {
'user': self.raw['user'],
'downloads': self.raw['downloads'],
}
|
be86fc3f3c7ec9dc213f8f527da59d5578be8b2a | irma/fileobject/handler.py | irma/fileobject/handler.py | from irma.database.nosqlhandler import NoSQLDatabase
from bson import ObjectId
class FileObject(object):
_uri = None
_dbname = None
_collection = None
def __init__(self, dbname=None, id=None):
if dbname:
self._dbname = dbname
self._dbfile = None
if id:
self._id = ObjectId(id)
self.load()
def _exists(self, hashvalue):
db = NoSQLDatabase(self._dbname, self._uri)
return db.exists_file(self._dbname, self._collection, hashvalue)
def load(self):
db = NoSQLDatabase(self._dbname, self._uri)
self._dbfile = db.get_file(self._dbname, self._collection, self._id)
def save(self, data, name):
db = NoSQLDatabase(self._dbname, self._uri)
self._id = db.put_file(self._dbname, self._collection, data, name, '', [])
@property
def name(self):
"""Get the filename"""
return self._dbfile.filename
@property
def length(self):
"""Get file length"""
return self._dbfile_length
@property
def data(self):
"""Get the data"""
return self._dbfile.read()
@property
def id(self):
"""Return str version of ObjectId"""
if not self._id:
return None
else:
return str(self._id)
| from irma.database.nosqlhandler import NoSQLDatabase
from bson import ObjectId
class FileObject(object):
_uri = None
_dbname = None
_collection = None
def __init__(self, dbname=None, id=None):
if dbname:
self._dbname = dbname
self._dbfile = None
if id:
self._id = ObjectId(id)
self.load()
def _exists(self, hashvalue):
db = NoSQLDatabase(self._dbname, self._uri)
return db.exists_file(self._dbname, self._collection, hashvalue)
def load(self):
db = NoSQLDatabase(self._dbname, self._uri)
self._dbfile = db.get_file(self._dbname, self._collection, self._id)
def save(self, data, name):
db = NoSQLDatabase(self._dbname, self._uri)
self._id = db.put_file(self._dbname, self._collection, data, name, '', [])
def delete(self):
db = NoSQLDatabase(self._dbname, self._uri)
db.remove(self._dbname, self._collection, self._id)
@property
def name(self):
"""Get the filename"""
return self._dbfile.filename
@property
def length(self):
"""Get file length"""
return self._dbfile_length
@property
def data(self):
"""Get the data"""
return self._dbfile.read()
@property
def id(self):
"""Return str version of ObjectId"""
if not self._id:
return None
else:
return str(self._id)
| Delete method added in FileObject | Delete method added in FileObject
| Python | apache-2.0 | hirokihamasaki/irma,hirokihamasaki/irma,quarkslab/irma,hirokihamasaki/irma,hirokihamasaki/irma,quarkslab/irma,hirokihamasaki/irma,quarkslab/irma,quarkslab/irma | from irma.database.nosqlhandler import NoSQLDatabase
from bson import ObjectId
class FileObject(object):
_uri = None
_dbname = None
_collection = None
def __init__(self, dbname=None, id=None):
if dbname:
self._dbname = dbname
self._dbfile = None
if id:
self._id = ObjectId(id)
self.load()
def _exists(self, hashvalue):
db = NoSQLDatabase(self._dbname, self._uri)
return db.exists_file(self._dbname, self._collection, hashvalue)
def load(self):
db = NoSQLDatabase(self._dbname, self._uri)
self._dbfile = db.get_file(self._dbname, self._collection, self._id)
def save(self, data, name):
db = NoSQLDatabase(self._dbname, self._uri)
self._id = db.put_file(self._dbname, self._collection, data, name, '', [])
@property
def name(self):
"""Get the filename"""
return self._dbfile.filename
@property
def length(self):
"""Get file length"""
return self._dbfile_length
@property
def data(self):
"""Get the data"""
return self._dbfile.read()
@property
def id(self):
"""Return str version of ObjectId"""
if not self._id:
return None
else:
return str(self._id)
Delete method added in FileObject | from irma.database.nosqlhandler import NoSQLDatabase
from bson import ObjectId
class FileObject(object):
_uri = None
_dbname = None
_collection = None
def __init__(self, dbname=None, id=None):
if dbname:
self._dbname = dbname
self._dbfile = None
if id:
self._id = ObjectId(id)
self.load()
def _exists(self, hashvalue):
db = NoSQLDatabase(self._dbname, self._uri)
return db.exists_file(self._dbname, self._collection, hashvalue)
def load(self):
db = NoSQLDatabase(self._dbname, self._uri)
self._dbfile = db.get_file(self._dbname, self._collection, self._id)
def save(self, data, name):
db = NoSQLDatabase(self._dbname, self._uri)
self._id = db.put_file(self._dbname, self._collection, data, name, '', [])
def delete(self):
db = NoSQLDatabase(self._dbname, self._uri)
db.remove(self._dbname, self._collection, self._id)
@property
def name(self):
"""Get the filename"""
return self._dbfile.filename
@property
def length(self):
"""Get file length"""
return self._dbfile_length
@property
def data(self):
"""Get the data"""
return self._dbfile.read()
@property
def id(self):
"""Return str version of ObjectId"""
if not self._id:
return None
else:
return str(self._id)
| <commit_before>from irma.database.nosqlhandler import NoSQLDatabase
from bson import ObjectId
class FileObject(object):
_uri = None
_dbname = None
_collection = None
def __init__(self, dbname=None, id=None):
if dbname:
self._dbname = dbname
self._dbfile = None
if id:
self._id = ObjectId(id)
self.load()
def _exists(self, hashvalue):
db = NoSQLDatabase(self._dbname, self._uri)
return db.exists_file(self._dbname, self._collection, hashvalue)
def load(self):
db = NoSQLDatabase(self._dbname, self._uri)
self._dbfile = db.get_file(self._dbname, self._collection, self._id)
def save(self, data, name):
db = NoSQLDatabase(self._dbname, self._uri)
self._id = db.put_file(self._dbname, self._collection, data, name, '', [])
@property
def name(self):
"""Get the filename"""
return self._dbfile.filename
@property
def length(self):
"""Get file length"""
return self._dbfile_length
@property
def data(self):
"""Get the data"""
return self._dbfile.read()
@property
def id(self):
"""Return str version of ObjectId"""
if not self._id:
return None
else:
return str(self._id)
<commit_msg>Delete method added in FileObject<commit_after> | from irma.database.nosqlhandler import NoSQLDatabase
from bson import ObjectId
class FileObject(object):
_uri = None
_dbname = None
_collection = None
def __init__(self, dbname=None, id=None):
if dbname:
self._dbname = dbname
self._dbfile = None
if id:
self._id = ObjectId(id)
self.load()
def _exists(self, hashvalue):
db = NoSQLDatabase(self._dbname, self._uri)
return db.exists_file(self._dbname, self._collection, hashvalue)
def load(self):
db = NoSQLDatabase(self._dbname, self._uri)
self._dbfile = db.get_file(self._dbname, self._collection, self._id)
def save(self, data, name):
db = NoSQLDatabase(self._dbname, self._uri)
self._id = db.put_file(self._dbname, self._collection, data, name, '', [])
def delete(self):
db = NoSQLDatabase(self._dbname, self._uri)
db.remove(self._dbname, self._collection, self._id)
@property
def name(self):
"""Get the filename"""
return self._dbfile.filename
@property
def length(self):
"""Get file length"""
return self._dbfile_length
@property
def data(self):
"""Get the data"""
return self._dbfile.read()
@property
def id(self):
"""Return str version of ObjectId"""
if not self._id:
return None
else:
return str(self._id)
| from irma.database.nosqlhandler import NoSQLDatabase
from bson import ObjectId
class FileObject(object):
_uri = None
_dbname = None
_collection = None
def __init__(self, dbname=None, id=None):
if dbname:
self._dbname = dbname
self._dbfile = None
if id:
self._id = ObjectId(id)
self.load()
def _exists(self, hashvalue):
db = NoSQLDatabase(self._dbname, self._uri)
return db.exists_file(self._dbname, self._collection, hashvalue)
def load(self):
db = NoSQLDatabase(self._dbname, self._uri)
self._dbfile = db.get_file(self._dbname, self._collection, self._id)
def save(self, data, name):
db = NoSQLDatabase(self._dbname, self._uri)
self._id = db.put_file(self._dbname, self._collection, data, name, '', [])
@property
def name(self):
"""Get the filename"""
return self._dbfile.filename
@property
def length(self):
"""Get file length"""
return self._dbfile_length
@property
def data(self):
"""Get the data"""
return self._dbfile.read()
@property
def id(self):
"""Return str version of ObjectId"""
if not self._id:
return None
else:
return str(self._id)
Delete method added in FileObjectfrom irma.database.nosqlhandler import NoSQLDatabase
from bson import ObjectId
class FileObject(object):
_uri = None
_dbname = None
_collection = None
def __init__(self, dbname=None, id=None):
if dbname:
self._dbname = dbname
self._dbfile = None
if id:
self._id = ObjectId(id)
self.load()
def _exists(self, hashvalue):
db = NoSQLDatabase(self._dbname, self._uri)
return db.exists_file(self._dbname, self._collection, hashvalue)
def load(self):
db = NoSQLDatabase(self._dbname, self._uri)
self._dbfile = db.get_file(self._dbname, self._collection, self._id)
def save(self, data, name):
db = NoSQLDatabase(self._dbname, self._uri)
self._id = db.put_file(self._dbname, self._collection, data, name, '', [])
def delete(self):
db = NoSQLDatabase(self._dbname, self._uri)
db.remove(self._dbname, self._collection, self._id)
@property
def name(self):
"""Get the filename"""
return self._dbfile.filename
@property
def length(self):
"""Get file length"""
return self._dbfile_length
@property
def data(self):
"""Get the data"""
return self._dbfile.read()
@property
def id(self):
"""Return str version of ObjectId"""
if not self._id:
return None
else:
return str(self._id)
| <commit_before>from irma.database.nosqlhandler import NoSQLDatabase
from bson import ObjectId
class FileObject(object):
_uri = None
_dbname = None
_collection = None
def __init__(self, dbname=None, id=None):
if dbname:
self._dbname = dbname
self._dbfile = None
if id:
self._id = ObjectId(id)
self.load()
def _exists(self, hashvalue):
db = NoSQLDatabase(self._dbname, self._uri)
return db.exists_file(self._dbname, self._collection, hashvalue)
def load(self):
db = NoSQLDatabase(self._dbname, self._uri)
self._dbfile = db.get_file(self._dbname, self._collection, self._id)
def save(self, data, name):
db = NoSQLDatabase(self._dbname, self._uri)
self._id = db.put_file(self._dbname, self._collection, data, name, '', [])
@property
def name(self):
"""Get the filename"""
return self._dbfile.filename
@property
def length(self):
"""Get file length"""
return self._dbfile_length
@property
def data(self):
"""Get the data"""
return self._dbfile.read()
@property
def id(self):
"""Return str version of ObjectId"""
if not self._id:
return None
else:
return str(self._id)
<commit_msg>Delete method added in FileObject<commit_after>from irma.database.nosqlhandler import NoSQLDatabase
from bson import ObjectId
class FileObject(object):
_uri = None
_dbname = None
_collection = None
def __init__(self, dbname=None, id=None):
if dbname:
self._dbname = dbname
self._dbfile = None
if id:
self._id = ObjectId(id)
self.load()
def _exists(self, hashvalue):
db = NoSQLDatabase(self._dbname, self._uri)
return db.exists_file(self._dbname, self._collection, hashvalue)
def load(self):
db = NoSQLDatabase(self._dbname, self._uri)
self._dbfile = db.get_file(self._dbname, self._collection, self._id)
def save(self, data, name):
db = NoSQLDatabase(self._dbname, self._uri)
self._id = db.put_file(self._dbname, self._collection, data, name, '', [])
def delete(self):
db = NoSQLDatabase(self._dbname, self._uri)
db.remove(self._dbname, self._collection, self._id)
@property
def name(self):
"""Get the filename"""
return self._dbfile.filename
@property
def length(self):
"""Get file length"""
return self._dbfile_length
@property
def data(self):
"""Get the data"""
return self._dbfile.read()
@property
def id(self):
"""Return str version of ObjectId"""
if not self._id:
return None
else:
return str(self._id)
|
010c87de588009371adbab8a234de78d9da4ebbd | fullcalendar/admin.py | fullcalendar/admin.py | from django.utils.translation import ugettext_lazy as _
from django.contrib import admin
from mezzanine.core.admin import StackedDynamicInlineAdmin, DisplayableAdmin
from fullcalendar.models import *
class EventCategoryAdmin(admin.ModelAdmin):
list_display = ('name',)
class OccurrenceInline(StackedDynamicInlineAdmin):
model = Occurrence
fields = ('start_time', 'end_time', 'description', 'location')
class EventAdmin(DisplayableAdmin):
list_display = ('title', 'event_category')
list_filter = ('event_category',)
search_fields = ('title', 'description', 'content', 'keywords')
fieldsets = (
(None, {
"fields": [
"title", "status", ("publish_date", "expiry_date"),
"event_category", "content"
]
}),
(_("Meta data"), {
"fields": [
"_meta_title", "slug",
("description", "gen_description"),
"keywords", "in_sitemap"
],
"classes": ("collapse-closed",)
}),
)
inlines = [OccurrenceInline]
admin.site.register(Event, EventAdmin)
admin.site.register(EventCategory, EventCategoryAdmin)
| from django.utils.translation import ugettext_lazy as _
from django.contrib import admin
from mezzanine.core.admin import StackedDynamicInlineAdmin, DisplayableAdmin
from fullcalendar.models import *
class EventCategoryAdmin(admin.ModelAdmin):
list_display = ('name',)
class OccurrenceInline(StackedDynamicInlineAdmin):
model = Occurrence
fields = ('start_time', 'end_time', 'description', 'location')
class EventAdmin(DisplayableAdmin):
list_display = ('title', 'event_category', 'status')
list_filter = ('event_category',)
search_fields = ('title', 'description', 'content', 'keywords')
fieldsets = (
(None, {
"fields": [
"title", "status", ("publish_date", "expiry_date"),
"event_category", "content"
]
}),
(_("Meta data"), {
"fields": [
"_meta_title", "slug",
("description", "gen_description"),
"keywords", "in_sitemap"
],
"classes": ("collapse-closed",)
}),
)
inlines = [OccurrenceInline]
admin.site.register(Event, EventAdmin)
admin.site.register(EventCategory, EventCategoryAdmin)
| Fix Django system check error for a editable field that is not displayed | Fix Django system check error for a editable field that is not displayed
This error,
"CommandError: System check identified some issues:
ERRORS:
<class 'fullcalendar.admin.EventAdmin'>: (admin.E122) The value of 'list_editable[0]' refers to 'status', which is not an attribute of 'events.Event'."
was given for Django >1.7.1
| Python | mit | jonge-democraten/mezzanine-fullcalendar | from django.utils.translation import ugettext_lazy as _
from django.contrib import admin
from mezzanine.core.admin import StackedDynamicInlineAdmin, DisplayableAdmin
from fullcalendar.models import *
class EventCategoryAdmin(admin.ModelAdmin):
list_display = ('name',)
class OccurrenceInline(StackedDynamicInlineAdmin):
model = Occurrence
fields = ('start_time', 'end_time', 'description', 'location')
class EventAdmin(DisplayableAdmin):
list_display = ('title', 'event_category')
list_filter = ('event_category',)
search_fields = ('title', 'description', 'content', 'keywords')
fieldsets = (
(None, {
"fields": [
"title", "status", ("publish_date", "expiry_date"),
"event_category", "content"
]
}),
(_("Meta data"), {
"fields": [
"_meta_title", "slug",
("description", "gen_description"),
"keywords", "in_sitemap"
],
"classes": ("collapse-closed",)
}),
)
inlines = [OccurrenceInline]
admin.site.register(Event, EventAdmin)
admin.site.register(EventCategory, EventCategoryAdmin)
Fix Django system check error for a editable field that is not displayed
This error,
"CommandError: System check identified some issues:
ERRORS:
<class 'fullcalendar.admin.EventAdmin'>: (admin.E122) The value of 'list_editable[0]' refers to 'status', which is not an attribute of 'events.Event'."
was given for Django >1.7.1 | from django.utils.translation import ugettext_lazy as _
from django.contrib import admin
from mezzanine.core.admin import StackedDynamicInlineAdmin, DisplayableAdmin
from fullcalendar.models import *
class EventCategoryAdmin(admin.ModelAdmin):
list_display = ('name',)
class OccurrenceInline(StackedDynamicInlineAdmin):
model = Occurrence
fields = ('start_time', 'end_time', 'description', 'location')
class EventAdmin(DisplayableAdmin):
list_display = ('title', 'event_category', 'status')
list_filter = ('event_category',)
search_fields = ('title', 'description', 'content', 'keywords')
fieldsets = (
(None, {
"fields": [
"title", "status", ("publish_date", "expiry_date"),
"event_category", "content"
]
}),
(_("Meta data"), {
"fields": [
"_meta_title", "slug",
("description", "gen_description"),
"keywords", "in_sitemap"
],
"classes": ("collapse-closed",)
}),
)
inlines = [OccurrenceInline]
admin.site.register(Event, EventAdmin)
admin.site.register(EventCategory, EventCategoryAdmin)
| <commit_before>from django.utils.translation import ugettext_lazy as _
from django.contrib import admin
from mezzanine.core.admin import StackedDynamicInlineAdmin, DisplayableAdmin
from fullcalendar.models import *
class EventCategoryAdmin(admin.ModelAdmin):
list_display = ('name',)
class OccurrenceInline(StackedDynamicInlineAdmin):
model = Occurrence
fields = ('start_time', 'end_time', 'description', 'location')
class EventAdmin(DisplayableAdmin):
list_display = ('title', 'event_category')
list_filter = ('event_category',)
search_fields = ('title', 'description', 'content', 'keywords')
fieldsets = (
(None, {
"fields": [
"title", "status", ("publish_date", "expiry_date"),
"event_category", "content"
]
}),
(_("Meta data"), {
"fields": [
"_meta_title", "slug",
("description", "gen_description"),
"keywords", "in_sitemap"
],
"classes": ("collapse-closed",)
}),
)
inlines = [OccurrenceInline]
admin.site.register(Event, EventAdmin)
admin.site.register(EventCategory, EventCategoryAdmin)
<commit_msg>Fix Django system check error for a editable field that is not displayed
This error,
"CommandError: System check identified some issues:
ERRORS:
<class 'fullcalendar.admin.EventAdmin'>: (admin.E122) The value of 'list_editable[0]' refers to 'status', which is not an attribute of 'events.Event'."
was given for Django >1.7.1<commit_after> | from django.utils.translation import ugettext_lazy as _
from django.contrib import admin
from mezzanine.core.admin import StackedDynamicInlineAdmin, DisplayableAdmin
from fullcalendar.models import *
class EventCategoryAdmin(admin.ModelAdmin):
list_display = ('name',)
class OccurrenceInline(StackedDynamicInlineAdmin):
model = Occurrence
fields = ('start_time', 'end_time', 'description', 'location')
class EventAdmin(DisplayableAdmin):
list_display = ('title', 'event_category', 'status')
list_filter = ('event_category',)
search_fields = ('title', 'description', 'content', 'keywords')
fieldsets = (
(None, {
"fields": [
"title", "status", ("publish_date", "expiry_date"),
"event_category", "content"
]
}),
(_("Meta data"), {
"fields": [
"_meta_title", "slug",
("description", "gen_description"),
"keywords", "in_sitemap"
],
"classes": ("collapse-closed",)
}),
)
inlines = [OccurrenceInline]
admin.site.register(Event, EventAdmin)
admin.site.register(EventCategory, EventCategoryAdmin)
| from django.utils.translation import ugettext_lazy as _
from django.contrib import admin
from mezzanine.core.admin import StackedDynamicInlineAdmin, DisplayableAdmin
from fullcalendar.models import *
class EventCategoryAdmin(admin.ModelAdmin):
list_display = ('name',)
class OccurrenceInline(StackedDynamicInlineAdmin):
model = Occurrence
fields = ('start_time', 'end_time', 'description', 'location')
class EventAdmin(DisplayableAdmin):
list_display = ('title', 'event_category')
list_filter = ('event_category',)
search_fields = ('title', 'description', 'content', 'keywords')
fieldsets = (
(None, {
"fields": [
"title", "status", ("publish_date", "expiry_date"),
"event_category", "content"
]
}),
(_("Meta data"), {
"fields": [
"_meta_title", "slug",
("description", "gen_description"),
"keywords", "in_sitemap"
],
"classes": ("collapse-closed",)
}),
)
inlines = [OccurrenceInline]
admin.site.register(Event, EventAdmin)
admin.site.register(EventCategory, EventCategoryAdmin)
Fix Django system check error for a editable field that is not displayed
This error,
"CommandError: System check identified some issues:
ERRORS:
<class 'fullcalendar.admin.EventAdmin'>: (admin.E122) The value of 'list_editable[0]' refers to 'status', which is not an attribute of 'events.Event'."
was given for Django >1.7.1from django.utils.translation import ugettext_lazy as _
from django.contrib import admin
from mezzanine.core.admin import StackedDynamicInlineAdmin, DisplayableAdmin
from fullcalendar.models import *
class EventCategoryAdmin(admin.ModelAdmin):
list_display = ('name',)
class OccurrenceInline(StackedDynamicInlineAdmin):
model = Occurrence
fields = ('start_time', 'end_time', 'description', 'location')
class EventAdmin(DisplayableAdmin):
list_display = ('title', 'event_category', 'status')
list_filter = ('event_category',)
search_fields = ('title', 'description', 'content', 'keywords')
fieldsets = (
(None, {
"fields": [
"title", "status", ("publish_date", "expiry_date"),
"event_category", "content"
]
}),
(_("Meta data"), {
"fields": [
"_meta_title", "slug",
("description", "gen_description"),
"keywords", "in_sitemap"
],
"classes": ("collapse-closed",)
}),
)
inlines = [OccurrenceInline]
admin.site.register(Event, EventAdmin)
admin.site.register(EventCategory, EventCategoryAdmin)
| <commit_before>from django.utils.translation import ugettext_lazy as _
from django.contrib import admin
from mezzanine.core.admin import StackedDynamicInlineAdmin, DisplayableAdmin
from fullcalendar.models import *
class EventCategoryAdmin(admin.ModelAdmin):
list_display = ('name',)
class OccurrenceInline(StackedDynamicInlineAdmin):
model = Occurrence
fields = ('start_time', 'end_time', 'description', 'location')
class EventAdmin(DisplayableAdmin):
list_display = ('title', 'event_category')
list_filter = ('event_category',)
search_fields = ('title', 'description', 'content', 'keywords')
fieldsets = (
(None, {
"fields": [
"title", "status", ("publish_date", "expiry_date"),
"event_category", "content"
]
}),
(_("Meta data"), {
"fields": [
"_meta_title", "slug",
("description", "gen_description"),
"keywords", "in_sitemap"
],
"classes": ("collapse-closed",)
}),
)
inlines = [OccurrenceInline]
admin.site.register(Event, EventAdmin)
admin.site.register(EventCategory, EventCategoryAdmin)
<commit_msg>Fix Django system check error for a editable field that is not displayed
This error,
"CommandError: System check identified some issues:
ERRORS:
<class 'fullcalendar.admin.EventAdmin'>: (admin.E122) The value of 'list_editable[0]' refers to 'status', which is not an attribute of 'events.Event'."
was given for Django >1.7.1<commit_after>from django.utils.translation import ugettext_lazy as _
from django.contrib import admin
from mezzanine.core.admin import StackedDynamicInlineAdmin, DisplayableAdmin
from fullcalendar.models import *
class EventCategoryAdmin(admin.ModelAdmin):
list_display = ('name',)
class OccurrenceInline(StackedDynamicInlineAdmin):
model = Occurrence
fields = ('start_time', 'end_time', 'description', 'location')
class EventAdmin(DisplayableAdmin):
list_display = ('title', 'event_category', 'status')
list_filter = ('event_category',)
search_fields = ('title', 'description', 'content', 'keywords')
fieldsets = (
(None, {
"fields": [
"title", "status", ("publish_date", "expiry_date"),
"event_category", "content"
]
}),
(_("Meta data"), {
"fields": [
"_meta_title", "slug",
("description", "gen_description"),
"keywords", "in_sitemap"
],
"classes": ("collapse-closed",)
}),
)
inlines = [OccurrenceInline]
admin.site.register(Event, EventAdmin)
admin.site.register(EventCategory, EventCategoryAdmin)
|
4a85ecaaae1452e74acc485d032f00e8bedace47 | cmsplugin_filer_link/cms_plugins.py | cmsplugin_filer_link/cms_plugins.py | from __future__ import unicode_literals
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django.utils.translation import ugettext as _
from django.conf import settings
from .models import FilerLinkPlugin
class FilerLinkPlugin(CMSPluginBase):
module = 'Filer'
model = FilerLinkPlugin
name = _("Link")
text_enabled = True
render_template = "cmsplugin_filer_link/link.html"
def render(self, context, instance, placeholder):
if instance.file:
link = instance.file.url
elif instance.mailto:
link = "mailto:%s" % _(instance.mailto)
elif instance.url:
link = _(instance.url)
elif instance.page_link:
link = instance.page_link.get_absolute_url()
else:
link = ""
context.update({
'link': link,
'style': instance.link_style,
'name': instance.name,
'new_window': instance.new_window,
})
return context
def icon_src(self, instance):
return settings.STATIC_URL + "cms/images/plugins/link.png"
plugin_pool.register_plugin(FilerLinkPlugin)
| from __future__ import unicode_literals
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django.utils.translation import ugettext as _
from django.conf import settings
from .models import FilerLinkPlugin
class FilerLinkPlugin(CMSPluginBase):
module = 'Filer'
model = FilerLinkPlugin
name = _("Link")
text_enabled = True
raw_id_fields = ('page_link', )
render_template = "cmsplugin_filer_link/link.html"
def render(self, context, instance, placeholder):
if instance.file:
link = instance.file.url
elif instance.mailto:
link = "mailto:%s" % _(instance.mailto)
elif instance.url:
link = _(instance.url)
elif instance.page_link:
link = instance.page_link.get_absolute_url()
else:
link = ""
context.update({
'link': link,
'style': instance.link_style,
'name': instance.name,
'new_window': instance.new_window,
})
return context
def icon_src(self, instance):
return settings.STATIC_URL + "cms/images/plugins/link.png"
plugin_pool.register_plugin(FilerLinkPlugin)
| Add "page_link" to "raw_id_fields" to prevent the run of "decompress" | Add "page_link" to "raw_id_fields" to prevent the run of "decompress"
Same issue as the already merged pull request for issue #106 however this applies to cmsfiler_link | Python | bsd-3-clause | stefanfoulis/cmsplugin-filer,creimers/cmsplugin-filer,wlanslovenija/cmsplugin-filer,jschneier/cmsplugin-filer,creimers/cmsplugin-filer,divio/cmsplugin-filer,yvess/cmsplugin-filer,brightinteractive/cmsplugin-filer,yvess/cmsplugin-filer,nephila/cmsplugin-filer,jschneier/cmsplugin-filer,stefanfoulis/cmsplugin-filer,sephii/cmsplugin-filer,alsoicode/cmsplugin-filer,yvess/cmsplugin-filer,skirsdeda/cmsplugin-filer,douwevandermeij/cmsplugin-filer,skirsdeda/cmsplugin-filer,sephii/cmsplugin-filer,brightinteractive/cmsplugin-filer,stefanfoulis/cmsplugin-filer,nephila/cmsplugin-filer,douwevandermeij/cmsplugin-filer,jschneier/cmsplugin-filer,wlanslovenija/cmsplugin-filer,douwevandermeij/cmsplugin-filer,divio/cmsplugin-filer,stefanfoulis/cmsplugin-filer,creimers/cmsplugin-filer,divio/cmsplugin-filer,skirsdeda/cmsplugin-filer,nephila/cmsplugin-filer,yvess/cmsplugin-filer,divio/cmsplugin-filer,sephii/cmsplugin-filer,alsoicode/cmsplugin-filer,brightinteractive/cmsplugin-filer,wlanslovenija/cmsplugin-filer,alsoicode/cmsplugin-filer | from __future__ import unicode_literals
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django.utils.translation import ugettext as _
from django.conf import settings
from .models import FilerLinkPlugin
class FilerLinkPlugin(CMSPluginBase):
module = 'Filer'
model = FilerLinkPlugin
name = _("Link")
text_enabled = True
render_template = "cmsplugin_filer_link/link.html"
def render(self, context, instance, placeholder):
if instance.file:
link = instance.file.url
elif instance.mailto:
link = "mailto:%s" % _(instance.mailto)
elif instance.url:
link = _(instance.url)
elif instance.page_link:
link = instance.page_link.get_absolute_url()
else:
link = ""
context.update({
'link': link,
'style': instance.link_style,
'name': instance.name,
'new_window': instance.new_window,
})
return context
def icon_src(self, instance):
return settings.STATIC_URL + "cms/images/plugins/link.png"
plugin_pool.register_plugin(FilerLinkPlugin)
Add "page_link" to "raw_id_fields" to prevent the run of "decompress"
Same issue as the already merged pull request for issue #106 however this applies to cmsfiler_link | from __future__ import unicode_literals
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django.utils.translation import ugettext as _
from django.conf import settings
from .models import FilerLinkPlugin
class FilerLinkPlugin(CMSPluginBase):
module = 'Filer'
model = FilerLinkPlugin
name = _("Link")
text_enabled = True
raw_id_fields = ('page_link', )
render_template = "cmsplugin_filer_link/link.html"
def render(self, context, instance, placeholder):
if instance.file:
link = instance.file.url
elif instance.mailto:
link = "mailto:%s" % _(instance.mailto)
elif instance.url:
link = _(instance.url)
elif instance.page_link:
link = instance.page_link.get_absolute_url()
else:
link = ""
context.update({
'link': link,
'style': instance.link_style,
'name': instance.name,
'new_window': instance.new_window,
})
return context
def icon_src(self, instance):
return settings.STATIC_URL + "cms/images/plugins/link.png"
plugin_pool.register_plugin(FilerLinkPlugin)
| <commit_before>from __future__ import unicode_literals
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django.utils.translation import ugettext as _
from django.conf import settings
from .models import FilerLinkPlugin
class FilerLinkPlugin(CMSPluginBase):
module = 'Filer'
model = FilerLinkPlugin
name = _("Link")
text_enabled = True
render_template = "cmsplugin_filer_link/link.html"
def render(self, context, instance, placeholder):
if instance.file:
link = instance.file.url
elif instance.mailto:
link = "mailto:%s" % _(instance.mailto)
elif instance.url:
link = _(instance.url)
elif instance.page_link:
link = instance.page_link.get_absolute_url()
else:
link = ""
context.update({
'link': link,
'style': instance.link_style,
'name': instance.name,
'new_window': instance.new_window,
})
return context
def icon_src(self, instance):
return settings.STATIC_URL + "cms/images/plugins/link.png"
plugin_pool.register_plugin(FilerLinkPlugin)
<commit_msg>Add "page_link" to "raw_id_fields" to prevent the run of "decompress"
Same issue as the already merged pull request for issue #106 however this applies to cmsfiler_link<commit_after> | from __future__ import unicode_literals
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django.utils.translation import ugettext as _
from django.conf import settings
from .models import FilerLinkPlugin
class FilerLinkPlugin(CMSPluginBase):
module = 'Filer'
model = FilerLinkPlugin
name = _("Link")
text_enabled = True
raw_id_fields = ('page_link', )
render_template = "cmsplugin_filer_link/link.html"
def render(self, context, instance, placeholder):
if instance.file:
link = instance.file.url
elif instance.mailto:
link = "mailto:%s" % _(instance.mailto)
elif instance.url:
link = _(instance.url)
elif instance.page_link:
link = instance.page_link.get_absolute_url()
else:
link = ""
context.update({
'link': link,
'style': instance.link_style,
'name': instance.name,
'new_window': instance.new_window,
})
return context
def icon_src(self, instance):
return settings.STATIC_URL + "cms/images/plugins/link.png"
plugin_pool.register_plugin(FilerLinkPlugin)
| from __future__ import unicode_literals
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django.utils.translation import ugettext as _
from django.conf import settings
from .models import FilerLinkPlugin
class FilerLinkPlugin(CMSPluginBase):
module = 'Filer'
model = FilerLinkPlugin
name = _("Link")
text_enabled = True
render_template = "cmsplugin_filer_link/link.html"
def render(self, context, instance, placeholder):
if instance.file:
link = instance.file.url
elif instance.mailto:
link = "mailto:%s" % _(instance.mailto)
elif instance.url:
link = _(instance.url)
elif instance.page_link:
link = instance.page_link.get_absolute_url()
else:
link = ""
context.update({
'link': link,
'style': instance.link_style,
'name': instance.name,
'new_window': instance.new_window,
})
return context
def icon_src(self, instance):
return settings.STATIC_URL + "cms/images/plugins/link.png"
plugin_pool.register_plugin(FilerLinkPlugin)
Add "page_link" to "raw_id_fields" to prevent the run of "decompress"
Same issue as the already merged pull request for issue #106 however this applies to cmsfiler_linkfrom __future__ import unicode_literals
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django.utils.translation import ugettext as _
from django.conf import settings
from .models import FilerLinkPlugin
class FilerLinkPlugin(CMSPluginBase):
module = 'Filer'
model = FilerLinkPlugin
name = _("Link")
text_enabled = True
raw_id_fields = ('page_link', )
render_template = "cmsplugin_filer_link/link.html"
def render(self, context, instance, placeholder):
if instance.file:
link = instance.file.url
elif instance.mailto:
link = "mailto:%s" % _(instance.mailto)
elif instance.url:
link = _(instance.url)
elif instance.page_link:
link = instance.page_link.get_absolute_url()
else:
link = ""
context.update({
'link': link,
'style': instance.link_style,
'name': instance.name,
'new_window': instance.new_window,
})
return context
def icon_src(self, instance):
return settings.STATIC_URL + "cms/images/plugins/link.png"
plugin_pool.register_plugin(FilerLinkPlugin)
| <commit_before>from __future__ import unicode_literals
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django.utils.translation import ugettext as _
from django.conf import settings
from .models import FilerLinkPlugin
class FilerLinkPlugin(CMSPluginBase):
module = 'Filer'
model = FilerLinkPlugin
name = _("Link")
text_enabled = True
render_template = "cmsplugin_filer_link/link.html"
def render(self, context, instance, placeholder):
if instance.file:
link = instance.file.url
elif instance.mailto:
link = "mailto:%s" % _(instance.mailto)
elif instance.url:
link = _(instance.url)
elif instance.page_link:
link = instance.page_link.get_absolute_url()
else:
link = ""
context.update({
'link': link,
'style': instance.link_style,
'name': instance.name,
'new_window': instance.new_window,
})
return context
def icon_src(self, instance):
return settings.STATIC_URL + "cms/images/plugins/link.png"
plugin_pool.register_plugin(FilerLinkPlugin)
<commit_msg>Add "page_link" to "raw_id_fields" to prevent the run of "decompress"
Same issue as the already merged pull request for issue #106 however this applies to cmsfiler_link<commit_after>from __future__ import unicode_literals
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django.utils.translation import ugettext as _
from django.conf import settings
from .models import FilerLinkPlugin
class FilerLinkPlugin(CMSPluginBase):
module = 'Filer'
model = FilerLinkPlugin
name = _("Link")
text_enabled = True
raw_id_fields = ('page_link', )
render_template = "cmsplugin_filer_link/link.html"
def render(self, context, instance, placeholder):
if instance.file:
link = instance.file.url
elif instance.mailto:
link = "mailto:%s" % _(instance.mailto)
elif instance.url:
link = _(instance.url)
elif instance.page_link:
link = instance.page_link.get_absolute_url()
else:
link = ""
context.update({
'link': link,
'style': instance.link_style,
'name': instance.name,
'new_window': instance.new_window,
})
return context
def icon_src(self, instance):
return settings.STATIC_URL + "cms/images/plugins/link.png"
plugin_pool.register_plugin(FilerLinkPlugin)
|
c25b7820ccd52b943586af42d09ce53c3633ed96 | cmsplugin_simple_markdown/models.py | cmsplugin_simple_markdown/models.py | import threading
from django.db import models
from django.utils.translation import ugettext_lazy as _
from cms.models.pluginmodel import CMSPlugin
from cmsplugin_simple_markdown import utils
localdata = threading.local()
localdata.TEMPLATE_CHOICES = utils.autodiscover_templates()
TEMPLATE_CHOICES = localdata.TEMPLATE_CHOICES
class SimpleMarkdownPlugin(CMSPlugin):
markdown_text = models.TextField(verbose_name=_('text'))
template = models.CharField(
verbose_name=_('template'),
choices=TEMPLATE_CHOICES,
max_length=255,
default='cmsplugin_simple_markdown/simple_markdown.html',
editable=len(TEMPLATE_CHOICES) > 1
)
def __unicode__(self):
return self.markdown_text
| import threading
from django.db import models
from django.utils.translation import ugettext_lazy as _
from cms.models.pluginmodel import CMSPlugin
from cmsplugin_simple_markdown import utils
localdata = threading.local()
localdata.TEMPLATE_CHOICES = utils.autodiscover_templates()
TEMPLATE_CHOICES = localdata.TEMPLATE_CHOICES
class SimpleMarkdownPlugin(CMSPlugin):
markdown_text = models.TextField(verbose_name=_('text'))
template = models.CharField(
verbose_name=_('template'),
choices=TEMPLATE_CHOICES,
max_length=255,
default='cmsplugin_simple_markdown/simple_markdown.html',
editable=len(TEMPLATE_CHOICES) > 1
)
def __unicode__(self):
"""
:rtype: str or unicode
"""
return self.markdown_text
| Add some tiny docstring to the unicode method | Add some tiny docstring to the unicode method
| Python | bsd-3-clause | Alir3z4/cmsplugin-simple-markdown,Alir3z4/cmsplugin-simple-markdown | import threading
from django.db import models
from django.utils.translation import ugettext_lazy as _
from cms.models.pluginmodel import CMSPlugin
from cmsplugin_simple_markdown import utils
localdata = threading.local()
localdata.TEMPLATE_CHOICES = utils.autodiscover_templates()
TEMPLATE_CHOICES = localdata.TEMPLATE_CHOICES
class SimpleMarkdownPlugin(CMSPlugin):
markdown_text = models.TextField(verbose_name=_('text'))
template = models.CharField(
verbose_name=_('template'),
choices=TEMPLATE_CHOICES,
max_length=255,
default='cmsplugin_simple_markdown/simple_markdown.html',
editable=len(TEMPLATE_CHOICES) > 1
)
def __unicode__(self):
return self.markdown_text
Add some tiny docstring to the unicode method | import threading
from django.db import models
from django.utils.translation import ugettext_lazy as _
from cms.models.pluginmodel import CMSPlugin
from cmsplugin_simple_markdown import utils
localdata = threading.local()
localdata.TEMPLATE_CHOICES = utils.autodiscover_templates()
TEMPLATE_CHOICES = localdata.TEMPLATE_CHOICES
class SimpleMarkdownPlugin(CMSPlugin):
markdown_text = models.TextField(verbose_name=_('text'))
template = models.CharField(
verbose_name=_('template'),
choices=TEMPLATE_CHOICES,
max_length=255,
default='cmsplugin_simple_markdown/simple_markdown.html',
editable=len(TEMPLATE_CHOICES) > 1
)
def __unicode__(self):
"""
:rtype: str or unicode
"""
return self.markdown_text
| <commit_before>import threading
from django.db import models
from django.utils.translation import ugettext_lazy as _
from cms.models.pluginmodel import CMSPlugin
from cmsplugin_simple_markdown import utils
localdata = threading.local()
localdata.TEMPLATE_CHOICES = utils.autodiscover_templates()
TEMPLATE_CHOICES = localdata.TEMPLATE_CHOICES
class SimpleMarkdownPlugin(CMSPlugin):
markdown_text = models.TextField(verbose_name=_('text'))
template = models.CharField(
verbose_name=_('template'),
choices=TEMPLATE_CHOICES,
max_length=255,
default='cmsplugin_simple_markdown/simple_markdown.html',
editable=len(TEMPLATE_CHOICES) > 1
)
def __unicode__(self):
return self.markdown_text
<commit_msg>Add some tiny docstring to the unicode method<commit_after> | import threading
from django.db import models
from django.utils.translation import ugettext_lazy as _
from cms.models.pluginmodel import CMSPlugin
from cmsplugin_simple_markdown import utils
localdata = threading.local()
localdata.TEMPLATE_CHOICES = utils.autodiscover_templates()
TEMPLATE_CHOICES = localdata.TEMPLATE_CHOICES
class SimpleMarkdownPlugin(CMSPlugin):
markdown_text = models.TextField(verbose_name=_('text'))
template = models.CharField(
verbose_name=_('template'),
choices=TEMPLATE_CHOICES,
max_length=255,
default='cmsplugin_simple_markdown/simple_markdown.html',
editable=len(TEMPLATE_CHOICES) > 1
)
def __unicode__(self):
"""
:rtype: str or unicode
"""
return self.markdown_text
| import threading
from django.db import models
from django.utils.translation import ugettext_lazy as _
from cms.models.pluginmodel import CMSPlugin
from cmsplugin_simple_markdown import utils
localdata = threading.local()
localdata.TEMPLATE_CHOICES = utils.autodiscover_templates()
TEMPLATE_CHOICES = localdata.TEMPLATE_CHOICES
class SimpleMarkdownPlugin(CMSPlugin):
markdown_text = models.TextField(verbose_name=_('text'))
template = models.CharField(
verbose_name=_('template'),
choices=TEMPLATE_CHOICES,
max_length=255,
default='cmsplugin_simple_markdown/simple_markdown.html',
editable=len(TEMPLATE_CHOICES) > 1
)
def __unicode__(self):
return self.markdown_text
Add some tiny docstring to the unicode methodimport threading
from django.db import models
from django.utils.translation import ugettext_lazy as _
from cms.models.pluginmodel import CMSPlugin
from cmsplugin_simple_markdown import utils
localdata = threading.local()
localdata.TEMPLATE_CHOICES = utils.autodiscover_templates()
TEMPLATE_CHOICES = localdata.TEMPLATE_CHOICES
class SimpleMarkdownPlugin(CMSPlugin):
markdown_text = models.TextField(verbose_name=_('text'))
template = models.CharField(
verbose_name=_('template'),
choices=TEMPLATE_CHOICES,
max_length=255,
default='cmsplugin_simple_markdown/simple_markdown.html',
editable=len(TEMPLATE_CHOICES) > 1
)
def __unicode__(self):
"""
:rtype: str or unicode
"""
return self.markdown_text
| <commit_before>import threading
from django.db import models
from django.utils.translation import ugettext_lazy as _
from cms.models.pluginmodel import CMSPlugin
from cmsplugin_simple_markdown import utils
localdata = threading.local()
localdata.TEMPLATE_CHOICES = utils.autodiscover_templates()
TEMPLATE_CHOICES = localdata.TEMPLATE_CHOICES
class SimpleMarkdownPlugin(CMSPlugin):
markdown_text = models.TextField(verbose_name=_('text'))
template = models.CharField(
verbose_name=_('template'),
choices=TEMPLATE_CHOICES,
max_length=255,
default='cmsplugin_simple_markdown/simple_markdown.html',
editable=len(TEMPLATE_CHOICES) > 1
)
def __unicode__(self):
return self.markdown_text
<commit_msg>Add some tiny docstring to the unicode method<commit_after>import threading
from django.db import models
from django.utils.translation import ugettext_lazy as _
from cms.models.pluginmodel import CMSPlugin
from cmsplugin_simple_markdown import utils
localdata = threading.local()
localdata.TEMPLATE_CHOICES = utils.autodiscover_templates()
TEMPLATE_CHOICES = localdata.TEMPLATE_CHOICES
class SimpleMarkdownPlugin(CMSPlugin):
markdown_text = models.TextField(verbose_name=_('text'))
template = models.CharField(
verbose_name=_('template'),
choices=TEMPLATE_CHOICES,
max_length=255,
default='cmsplugin_simple_markdown/simple_markdown.html',
editable=len(TEMPLATE_CHOICES) > 1
)
def __unicode__(self):
"""
:rtype: str or unicode
"""
return self.markdown_text
|
79ac1550b5acd407b2a107e694c66cccfbc0be89 | alerts/lib/deadman_alerttask.py | alerts/lib/deadman_alerttask.py | from alerttask import AlertTask
class DeadmanAlertTask(AlertTask):
def __init__(self):
self.deadman = True
def executeSearchEventsSimple(self):
# We override this method to specify the size as 1
# since we only care about if ANY events are found or not
return self.main_query.execute(self.es, indices=self.event_indices, size=1)
| from alerttask import AlertTask
class DeadmanAlertTask(AlertTask):
def executeSearchEventsSimple(self):
# We override this method to specify the size as 1
# since we only care about if ANY events are found or not
return self.main_query.execute(self.es, indices=self.event_indices, size=1)
| Remove deadman alerttask init method | Remove deadman alerttask init method
| Python | mpl-2.0 | jeffbryner/MozDef,gdestuynder/MozDef,mozilla/MozDef,mpurzynski/MozDef,mozilla/MozDef,Phrozyn/MozDef,jeffbryner/MozDef,jeffbryner/MozDef,Phrozyn/MozDef,mpurzynski/MozDef,gdestuynder/MozDef,Phrozyn/MozDef,mpurzynski/MozDef,mpurzynski/MozDef,gdestuynder/MozDef,Phrozyn/MozDef,mozilla/MozDef,jeffbryner/MozDef,mozilla/MozDef,gdestuynder/MozDef | from alerttask import AlertTask
class DeadmanAlertTask(AlertTask):
def __init__(self):
self.deadman = True
def executeSearchEventsSimple(self):
# We override this method to specify the size as 1
# since we only care about if ANY events are found or not
return self.main_query.execute(self.es, indices=self.event_indices, size=1)
Remove deadman alerttask init method | from alerttask import AlertTask
class DeadmanAlertTask(AlertTask):
def executeSearchEventsSimple(self):
# We override this method to specify the size as 1
# since we only care about if ANY events are found or not
return self.main_query.execute(self.es, indices=self.event_indices, size=1)
| <commit_before>from alerttask import AlertTask
class DeadmanAlertTask(AlertTask):
def __init__(self):
self.deadman = True
def executeSearchEventsSimple(self):
# We override this method to specify the size as 1
# since we only care about if ANY events are found or not
return self.main_query.execute(self.es, indices=self.event_indices, size=1)
<commit_msg>Remove deadman alerttask init method<commit_after> | from alerttask import AlertTask
class DeadmanAlertTask(AlertTask):
def executeSearchEventsSimple(self):
# We override this method to specify the size as 1
# since we only care about if ANY events are found or not
return self.main_query.execute(self.es, indices=self.event_indices, size=1)
| from alerttask import AlertTask
class DeadmanAlertTask(AlertTask):
def __init__(self):
self.deadman = True
def executeSearchEventsSimple(self):
# We override this method to specify the size as 1
# since we only care about if ANY events are found or not
return self.main_query.execute(self.es, indices=self.event_indices, size=1)
Remove deadman alerttask init methodfrom alerttask import AlertTask
class DeadmanAlertTask(AlertTask):
def executeSearchEventsSimple(self):
# We override this method to specify the size as 1
# since we only care about if ANY events are found or not
return self.main_query.execute(self.es, indices=self.event_indices, size=1)
| <commit_before>from alerttask import AlertTask
class DeadmanAlertTask(AlertTask):
def __init__(self):
self.deadman = True
def executeSearchEventsSimple(self):
# We override this method to specify the size as 1
# since we only care about if ANY events are found or not
return self.main_query.execute(self.es, indices=self.event_indices, size=1)
<commit_msg>Remove deadman alerttask init method<commit_after>from alerttask import AlertTask
class DeadmanAlertTask(AlertTask):
def executeSearchEventsSimple(self):
# We override this method to specify the size as 1
# since we only care about if ANY events are found or not
return self.main_query.execute(self.es, indices=self.event_indices, size=1)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.